From 1feb6317f670cc7b1609af6f2bbcdb2c822a701b Mon Sep 17 00:00:00 2001 From: Mariusz Woloszyn Date: Wed, 13 Dec 2023 21:42:35 +0100 Subject: [PATCH] Fix #1119, no content when streaming. --- litellm/llms/ollama.py | 1 + 1 file changed, 1 insertion(+) diff --git a/litellm/llms/ollama.py b/litellm/llms/ollama.py index b1bf77f0e..ad2ae03b8 100644 --- a/litellm/llms/ollama.py +++ b/litellm/llms/ollama.py @@ -221,6 +221,7 @@ async def ollama_async_streaming(url, data, model_response, encoding, logging_ob async def ollama_acompletion(url, data, model_response, encoding, logging_obj): + data["stream"] = False try: timeout = aiohttp.ClientTimeout(total=600) # 10 minutes async with aiohttp.ClientSession(timeout=timeout) as session: