diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index 8378a95ff..cebdbb74a 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -134,6 +134,7 @@ class OllamaChatConfig: "tools", "tool_choice", "functions", + "response_format", ] def map_openai_params(self, non_default_params: dict, optional_params: dict): @@ -150,6 +151,8 @@ class OllamaChatConfig: optional_params["repeat_penalty"] = param if param == "stop": optional_params["stop"] = value + if param == "response_format" and value["type"] == "json_object": + optional_params["format"] = "json" ### FUNCTION CALLING LOGIC ### if param == "tools": # ollama actually supports json output diff --git a/litellm/utils.py b/litellm/utils.py index bcef061d6..a8c003181 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -4860,6 +4860,8 @@ def get_optional_params( optional_params["repeat_penalty"] = frequency_penalty if stop is not None: optional_params["stop"] = stop + if response_format is not None and response_format["type"] == "json_object": + optional_params["format"] = "json" elif custom_llm_provider == "ollama_chat": supported_params = litellm.OllamaChatConfig().get_supported_openai_params() @@ -5320,6 +5322,7 @@ def get_supported_openai_params(model: str, custom_llm_provider: str): "temperature", "frequency_penalty", "stop", + "response_format", ] elif custom_llm_provider == "nlp_cloud": return [