fix(vertex_ai_llama3.py): Fix llama3 streaming issue

Closes https://github.com/BerriAI/litellm/issues/4885
This commit is contained in:
Krrish Dholakia 2024-07-25 22:30:55 -07:00
parent 0ce5a7962e
commit ce210ddaf6
2 changed files with 2 additions and 3 deletions

View file

@ -103,7 +103,8 @@ class VertexAILlama3Config:
for param, value in non_default_params.items():
if param == "max_tokens":
optional_params["max_tokens"] = value
if param == "stream":
optional_params["stream"] = value
return optional_params