forked from phoenix/litellm-mirror
fix(utils.py): fix recreating model response object when stream usage is true
This commit is contained in:
parent
e112379d2f
commit
b2e46086dd
3 changed files with 88 additions and 17 deletions
|
@ -573,6 +573,8 @@ class ModelResponse(OpenAIObject):
|
|||
_new_choice = choice # type: ignore
|
||||
elif isinstance(choice, dict):
|
||||
_new_choice = Choices(**choice) # type: ignore
|
||||
else:
|
||||
_new_choice = choice
|
||||
new_choices.append(_new_choice)
|
||||
choices = new_choices
|
||||
else:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue