From 65b07bcb8c737152f26afa6b51cb3919a05007c3 Mon Sep 17 00:00:00 2001 From: Rajan Paneru Date: Fri, 10 May 2024 22:12:32 +0930 Subject: [PATCH] Preserving the Pydantic Message Object Following statement replaces the Pydantic Message Object and initialize it with the dict model_response["choices"][0]["message"] = response_json["message"] We need to make sure message is always litellm.Message object As a fix, based on the code of ollama.py file, i am updating just the content intead of entire object for both sync and async functions --- litellm/llms/ollama_chat.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index 866761905..d1ff4953f 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -300,7 +300,7 @@ def get_ollama_response( model_response["choices"][0]["message"] = message model_response["choices"][0]["finish_reason"] = "tool_calls" else: - model_response["choices"][0]["message"] = response_json["message"] + model_response["choices"][0]["message"]["content"] = response_json["message"]["content"] model_response["created"] = int(time.time()) model_response["model"] = "ollama/" + model prompt_tokens = response_json.get("prompt_eval_count", litellm.token_counter(messages=messages)) # type: ignore @@ -484,7 +484,7 @@ async def ollama_acompletion( model_response["choices"][0]["message"] = message model_response["choices"][0]["finish_reason"] = "tool_calls" else: - model_response["choices"][0]["message"] = response_json["message"] + model_response["choices"][0]["message"]["content"] = response_json["message"]["content"] model_response["created"] = int(time.time()) model_response["model"] = "ollama_chat/" + data["model"]