From 200e1232e23a07d5cd7a34b34ef42c4e68f45751 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 29 Aug 2023 09:48:52 -0700 Subject: [PATCH] vertex ai fix --- litellm/main.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/litellm/main.py b/litellm/main.py index 354c5591a9..6cdce68f35 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -612,8 +612,6 @@ def completion( } response = model_response elif model in litellm.vertex_chat_models: - # import vertexai/if it fails then pip install vertexai# import cohere/if it fails then pip install cohere - install_and_import("vertexai") import vertexai from vertexai.preview.language_models import ChatModel, InputOutputTextPair @@ -637,12 +635,10 @@ def completion( ) ## RESPONSE OBJECT - model_response["choices"][0]["message"]["content"] = completion_response + model_response["choices"][0]["message"]["content"] = str(completion_response) model_response["created"] = time.time() model_response["model"] = model elif model in litellm.vertex_text_models: - # import vertexai/if it fails then pip install vertexai# import cohere/if it fails then pip install cohere - install_and_import("vertexai") import vertexai from vertexai.language_models import TextGenerationModel @@ -663,7 +659,7 @@ def completion( input=prompt, api_key=None, original_response=completion_response ) ## RESPONSE OBJECT - model_response["choices"][0]["message"]["content"] = completion_response + model_response["choices"][0]["message"]["content"] = str(completion_response) model_response["created"] = time.time() model_response["model"] = model response = model_response