forked from phoenix/litellm-mirror
fix(utils.py): fix streaming to not return usage dict
Fixes https://github.com/BerriAI/litellm/issues/3237
This commit is contained in:
parent
70c98617da
commit
48c2c3d78a
24 changed files with 107 additions and 83 deletions
|
@ -104,7 +104,7 @@ def completion(
|
|||
completion_tokens=completion_tokens,
|
||||
total_tokens=prompt_tokens + completion_tokens,
|
||||
)
|
||||
model_response.usage = usage
|
||||
setattr(model_response, "usage", usage)
|
||||
return model_response
|
||||
|
||||
|
||||
|
@ -186,7 +186,7 @@ def batch_completions(
|
|||
completion_tokens=completion_tokens,
|
||||
total_tokens=prompt_tokens + completion_tokens,
|
||||
)
|
||||
model_response.usage = usage
|
||||
setattr(model_response, "usage", usage)
|
||||
final_outputs.append(model_response)
|
||||
return final_outputs
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue