fix(vertex_ai.py): add support for real async streaming + completion calls

This commit is contained in:
Krrish Dholakia 2023-12-13 11:53:55 -08:00
parent 07015843ac
commit 69c29f8f86
5 changed files with 134 additions and 49 deletions

View file

@ -1157,7 +1157,7 @@ def completion(
acompletion=acompletion
)
if "stream" in optional_params and optional_params["stream"] == True:
if "stream" in optional_params and optional_params["stream"] == True and acompletion == False:
response = CustomStreamWrapper(
model_response, model, custom_llm_provider="vertex_ai", logging_obj=logging
)