mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(ollama_chat.py): don't pop from dictionary while iterating through it
This commit is contained in:
parent
93a1a865f0
commit
dfcc0c9ff0
1 changed files with 2 additions and 1 deletions
|
@ -173,10 +173,11 @@ class OllamaChatConfig:
|
||||||
litellm.add_function_to_prompt = (
|
litellm.add_function_to_prompt = (
|
||||||
True # so that main.py adds the function call to the prompt
|
True # so that main.py adds the function call to the prompt
|
||||||
)
|
)
|
||||||
optional_params["functions_unsupported_model"] = non_default_params.pop(
|
optional_params["functions_unsupported_model"] = non_default_params.get(
|
||||||
"functions"
|
"functions"
|
||||||
)
|
)
|
||||||
non_default_params.pop("tool_choice", None) # causes ollama requests to hang
|
non_default_params.pop("tool_choice", None) # causes ollama requests to hang
|
||||||
|
non_default_params.pop("functions", None) # causes ollama requests to hang
|
||||||
return optional_params
|
return optional_params
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue