mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix: add missing seed parameter to ollama input
Current ollama interfacing does not allow for seed, which is supported in https://github.com/ollama/ollama/blob/main/docs/api.md#parameters and https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values This resolves that by adding in handling of seed parameter.
This commit is contained in:
parent
9f189ac91c
commit
d3921a3d28
3 changed files with 28 additions and 10 deletions
|
@ -5752,6 +5752,8 @@ def get_optional_params(
|
|||
optional_params["stream"] = stream
|
||||
if temperature is not None:
|
||||
optional_params["temperature"] = temperature
|
||||
if seed is not None:
|
||||
optional_params["seed"] = seed
|
||||
if top_p is not None:
|
||||
optional_params["top_p"] = top_p
|
||||
if frequency_penalty is not None:
|
||||
|
@ -6404,6 +6406,8 @@ def get_supported_openai_params(
|
|||
return ["stream", "temperature", "max_tokens"]
|
||||
elif model.startswith("mistral"):
|
||||
return ["max_tokens", "temperature", "stop", "top_p", "stream"]
|
||||
elif custom_llm_provider == "ollama":
|
||||
return litellm.OllamaConfig().get_supported_openai_params()
|
||||
elif custom_llm_provider == "ollama_chat":
|
||||
return litellm.OllamaChatConfig().get_supported_openai_params()
|
||||
elif custom_llm_provider == "anthropic":
|
||||
|
@ -6573,16 +6577,6 @@ def get_supported_openai_params(
|
|||
]
|
||||
elif custom_llm_provider == "cloudflare":
|
||||
return ["max_tokens", "stream"]
|
||||
elif custom_llm_provider == "ollama":
|
||||
return [
|
||||
"max_tokens",
|
||||
"stream",
|
||||
"top_p",
|
||||
"temperature",
|
||||
"frequency_penalty",
|
||||
"stop",
|
||||
"response_format",
|
||||
]
|
||||
elif custom_llm_provider == "nlp_cloud":
|
||||
return [
|
||||
"max_tokens",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue