From 1a7ffbe7b883a8d9ee668fc2346050a42016ff04 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Wed, 11 Oct 2023 16:59:59 -0700 Subject: [PATCH] (fix) Ollama use new streaming format --- litellm/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/main.py b/litellm/main.py index 353438ea57..aebb273ce8 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -1040,7 +1040,7 @@ def completion( else: response_string = "" for chunk in generator: - response_string+=chunk['choices'][0]['delta']['content'] + response_string+=chunk['content'] ## RESPONSE OBJECT model_response["choices"][0]["message"]["content"] = response_string