mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(utils.py): don't send subsequent chunks if last chunk sent
prevents multiple empty finish chunks from being sent
This commit is contained in:
parent
0f432dfb4b
commit
3a82ff2ef2
2 changed files with 30 additions and 0 deletions
|
@ -9531,6 +9531,9 @@ class CustomStreamWrapper:
|
|||
else:
|
||||
return
|
||||
elif self.received_finish_reason is not None:
|
||||
if self.sent_last_chunk == True:
|
||||
raise StopIteration
|
||||
|
||||
# flush any remaining holding chunk
|
||||
if len(self.holding_chunk) > 0:
|
||||
if model_response.choices[0].delta.content is None:
|
||||
|
@ -9544,6 +9547,7 @@ class CustomStreamWrapper:
|
|||
is_delta_empty = self.is_delta_empty(
|
||||
delta=model_response.choices[0].delta
|
||||
)
|
||||
|
||||
if is_delta_empty:
|
||||
# get any function call arguments
|
||||
model_response.choices[0].finish_reason = map_finish_reason(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue