Merge pull request #3267 from BerriAI/litellm_openai_streaming_fix

fix(utils.py): fix streaming to not return usage dict
This commit is contained in:
Krish Dholakia 2024-04-24 21:08:33 -07:00 committed by GitHub
commit 435a4b5ed4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 216 additions and 5301 deletions

View file

@ -789,7 +789,7 @@ def completion(
completion_tokens=completion_tokens,
total_tokens=prompt_tokens + completion_tokens,
)
model_response.usage = usage
setattr(model_response, "usage", usage)
return model_response
except Exception as e:
raise VertexAIError(status_code=500, message=str(e))
@ -997,7 +997,7 @@ async def async_completion(
completion_tokens=completion_tokens,
total_tokens=prompt_tokens + completion_tokens,
)
model_response.usage = usage
setattr(model_response, "usage", usage)
return model_response
except Exception as e:
raise VertexAIError(status_code=500, message=str(e))