diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index cebdbb74ab..11e08fb725 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -173,10 +173,11 @@ class OllamaChatConfig: litellm.add_function_to_prompt = ( True # so that main.py adds the function call to the prompt ) - optional_params["functions_unsupported_model"] = non_default_params.pop( + optional_params["functions_unsupported_model"] = non_default_params.get( "functions" ) non_default_params.pop("tool_choice", None) # causes ollama requests to hang + non_default_params.pop("functions", None) # causes ollama requests to hang return optional_params