From 61185aa12c5daeef2b54be6dabbaf0011e6a20b8 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Wed, 29 Nov 2023 16:49:59 -0800 Subject: [PATCH] fix(main.py): fix null finish reason issue for ollama --- litellm/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/main.py b/litellm/main.py index baec460d58..03807b9d7d 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -369,7 +369,6 @@ def completion( model ] # update the model to the actual value if an alias has been passed in model_response = ModelResponse() - if kwargs.get('azure', False) == True: # don't remove flag check, to remain backwards compatible for repos like Codium custom_llm_provider="azure" if deployment_id != None: # azure llms @@ -1263,6 +1262,7 @@ def completion( response_string+=chunk['content'] ## RESPONSE OBJECT + model_response["choices"][0]["finish_reason"] = "stop" model_response["choices"][0]["message"]["content"] = response_string model_response["created"] = int(time.time()) model_response["model"] = "ollama/" + model