mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(ollama.py): raise async errors
This commit is contained in:
parent
9eb487efb3
commit
5936664a16
1 changed files with 8 additions and 5 deletions
|
@ -1572,11 +1572,14 @@ def client(original_function):
|
|||
|
||||
def post_call_processing(original_response, model):
|
||||
try:
|
||||
call_type = original_function.__name__
|
||||
if call_type == CallTypes.completion.value or call_type == CallTypes.acompletion.value:
|
||||
model_response = original_response['choices'][0]['message']['content']
|
||||
### POST-CALL RULES ###
|
||||
rules_obj.post_call_rules(input=model_response, model=model)
|
||||
if original_response is None:
|
||||
pass
|
||||
else:
|
||||
call_type = original_function.__name__
|
||||
if call_type == CallTypes.completion.value or call_type == CallTypes.acompletion.value:
|
||||
model_response = original_response['choices'][0]['message']['content']
|
||||
### POST-CALL RULES ###
|
||||
rules_obj.post_call_rules(input=model_response, model=model)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue