forked from phoenix/litellm-mirror
fix(utils.py): support response_format param for ollama
https://github.com/BerriAI/litellm/issues/2580
This commit is contained in:
parent
d6624bf6c3
commit
524c244dd9
2 changed files with 6 additions and 0 deletions
|
@ -4860,6 +4860,8 @@ def get_optional_params(
|
|||
optional_params["repeat_penalty"] = frequency_penalty
|
||||
if stop is not None:
|
||||
optional_params["stop"] = stop
|
||||
if response_format is not None and response_format["type"] == "json_object":
|
||||
optional_params["format"] = "json"
|
||||
elif custom_llm_provider == "ollama_chat":
|
||||
supported_params = litellm.OllamaChatConfig().get_supported_openai_params()
|
||||
|
||||
|
@ -5320,6 +5322,7 @@ def get_supported_openai_params(model: str, custom_llm_provider: str):
|
|||
"temperature",
|
||||
"frequency_penalty",
|
||||
"stop",
|
||||
"response_format",
|
||||
]
|
||||
elif custom_llm_provider == "nlp_cloud":
|
||||
return [
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue