forked from phoenix/litellm-mirror
fix(utils.py): fix non_default_param pop error for ollama
This commit is contained in:
parent
04bbd0649f
commit
6795f0447a
1 changed files with 5 additions and 1 deletions
|
@ -2397,7 +2397,11 @@ def get_optional_params( # use the openai defaults
|
|||
# ollama actually supports json output
|
||||
optional_params["format"] = "json"
|
||||
litellm.add_function_to_prompt = True # so that main.py adds the function call to the prompt
|
||||
optional_params["functions_unsupported_model"] = non_default_params.pop("tools", non_default_params.pop("functions"))
|
||||
if "tools" in non_default_params:
|
||||
optional_params["functions_unsupported_model"] = non_default_params.pop("tools")
|
||||
non_default_params.pop("tool_choice", None) # causes ollama requests to hang
|
||||
elif "functions" in non_default_params:
|
||||
optional_params["functions_unsupported_model"] = non_default_params.pop("functions")
|
||||
elif custom_llm_provider == "anyscale" and model == "mistralai/Mistral-7B-Instruct-v0.1": # anyscale just supports function calling with mistral
|
||||
pass
|
||||
elif litellm.add_function_to_prompt: # if user opts to add it to prompt instead
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue