From 751d57379dd19b7e83e042569b01bdcbad1c8631 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 26 Dec 2023 19:56:32 +0530 Subject: [PATCH] (fix) support ollama_chat for acompletion --- litellm/main.py | 1 + 1 file changed, 1 insertion(+) diff --git a/litellm/main.py b/litellm/main.py index 00d86ca20..d0cebaffd 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -196,6 +196,7 @@ async def acompletion(*args, **kwargs): or custom_llm_provider == "text-completion-openai" or custom_llm_provider == "huggingface" or custom_llm_provider == "ollama" + or custom_llm_provider == "ollama_chat" or custom_llm_provider == "vertex_ai" ): # currently implemented aiohttp calls for just azure and openai, soon all. if kwargs.get("stream", False):