fix(gemini.py): support streaming

This commit is contained in:
Krrish Dholakia 2024-01-19 19:26:23 -08:00
parent b59e67f099
commit b07677c6be
4 changed files with 67 additions and 13 deletions

View file

@ -1382,6 +1382,18 @@ def completion(
acompletion=acompletion,
custom_prompt_dict=custom_prompt_dict,
)
if (
"stream" in optional_params
and optional_params["stream"] == True
and acompletion == False
):
response = CustomStreamWrapper(
iter(model_response),
model,
custom_llm_provider="gemini",
logging_obj=logging,
)
return response
response = model_response
elif custom_llm_provider == "vertex_ai":
vertex_ai_project = litellm.vertex_project or get_secret("VERTEXAI_PROJECT")