forked from phoenix/litellm-mirror
fix(utils.py): handle finish reason logic
This commit is contained in:
parent
9f5ba67f5d
commit
1ebc7bb3b7
1 changed files with 5 additions and 2 deletions
|
@ -10191,6 +10191,9 @@ class CustomStreamWrapper:
|
||||||
try:
|
try:
|
||||||
if isinstance(choice, BaseModel):
|
if isinstance(choice, BaseModel):
|
||||||
choice_json = choice.model_dump()
|
choice_json = choice.model_dump()
|
||||||
|
choice_json.pop(
|
||||||
|
"finish_reason", None
|
||||||
|
) # for mistral etc. which return a value in their last chunk (not-openai compatible).
|
||||||
choices.append(StreamingChoices(**choice_json))
|
choices.append(StreamingChoices(**choice_json))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
choices.append(StreamingChoices())
|
choices.append(StreamingChoices())
|
||||||
|
@ -10239,11 +10242,11 @@ class CustomStreamWrapper:
|
||||||
)
|
)
|
||||||
self.holding_chunk = ""
|
self.holding_chunk = ""
|
||||||
# if delta is None
|
# if delta is None
|
||||||
is_delta_empty = self.is_delta_empty(
|
_is_delta_empty = self.is_delta_empty(
|
||||||
delta=model_response.choices[0].delta
|
delta=model_response.choices[0].delta
|
||||||
)
|
)
|
||||||
|
|
||||||
if is_delta_empty:
|
if _is_delta_empty:
|
||||||
# get any function call arguments
|
# get any function call arguments
|
||||||
model_response.choices[0].finish_reason = map_finish_reason(
|
model_response.choices[0].finish_reason = map_finish_reason(
|
||||||
finish_reason=self.received_finish_reason
|
finish_reason=self.received_finish_reason
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue