diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index cfcc883fa..16d234f7a 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -263,7 +263,7 @@ async def ollama_async_streaming(url, data, model_response, encoding, logging_ob streamwrapper = litellm.CustomStreamWrapper( completion_stream=response.aiter_lines(), model=data["model"], - custom_llm_provider="ollama", + custom_llm_provider="ollama_chat", logging_obj=logging_obj, ) async for transformed_chunk in streamwrapper: