fix(utils.py): support response_format param for ollama

https://github.com/BerriAI/litellm/issues/2580
This commit is contained in:
Krrish Dholakia 2024-03-19 21:07:20 -07:00
parent d6624bf6c3
commit 524c244dd9
2 changed files with 6 additions and 0 deletions

View file

@ -134,6 +134,7 @@ class OllamaChatConfig:
"tools", "tools",
"tool_choice", "tool_choice",
"functions", "functions",
"response_format",
] ]
def map_openai_params(self, non_default_params: dict, optional_params: dict): def map_openai_params(self, non_default_params: dict, optional_params: dict):
@ -150,6 +151,8 @@ class OllamaChatConfig:
optional_params["repeat_penalty"] = param optional_params["repeat_penalty"] = param
if param == "stop": if param == "stop":
optional_params["stop"] = value optional_params["stop"] = value
if param == "response_format" and value["type"] == "json_object":
optional_params["format"] = "json"
### FUNCTION CALLING LOGIC ### ### FUNCTION CALLING LOGIC ###
if param == "tools": if param == "tools":
# ollama actually supports json output # ollama actually supports json output

View file

@ -4860,6 +4860,8 @@ def get_optional_params(
optional_params["repeat_penalty"] = frequency_penalty optional_params["repeat_penalty"] = frequency_penalty
if stop is not None: if stop is not None:
optional_params["stop"] = stop optional_params["stop"] = stop
if response_format is not None and response_format["type"] == "json_object":
optional_params["format"] = "json"
elif custom_llm_provider == "ollama_chat": elif custom_llm_provider == "ollama_chat":
supported_params = litellm.OllamaChatConfig().get_supported_openai_params() supported_params = litellm.OllamaChatConfig().get_supported_openai_params()
@ -5320,6 +5322,7 @@ def get_supported_openai_params(model: str, custom_llm_provider: str):
"temperature", "temperature",
"frequency_penalty", "frequency_penalty",
"stop", "stop",
"response_format",
] ]
elif custom_llm_provider == "nlp_cloud": elif custom_llm_provider == "nlp_cloud":
return [ return [