mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
raise exception if function call passed to provider that doesn't support it
This commit is contained in:
parent
e26152436f
commit
a7ee0bcc8c
2 changed files with 35 additions and 3 deletions
|
@ -950,8 +950,40 @@ def get_optional_params( # use the openai defaults
|
||||||
return_full_text=False,
|
return_full_text=False,
|
||||||
task=None,
|
task=None,
|
||||||
):
|
):
|
||||||
|
# retrieve all parameters passed to the function
|
||||||
|
passed_params = locals()
|
||||||
|
default_params = {
|
||||||
|
"functions":[],
|
||||||
|
"function_call":"",
|
||||||
|
"temperature":1,
|
||||||
|
"top_p":1,
|
||||||
|
"n":1,
|
||||||
|
"stream":False,
|
||||||
|
"stop":None,
|
||||||
|
"max_tokens":float("inf"),
|
||||||
|
"presence_penalty":0,
|
||||||
|
"frequency_penalty":0,
|
||||||
|
"logit_bias":{},
|
||||||
|
"num_beams":1,
|
||||||
|
"remove_input":False, # for nlp_cloud
|
||||||
|
"user":"",
|
||||||
|
"deployment_id":None,
|
||||||
|
"model":None,
|
||||||
|
"custom_llm_provider":"",
|
||||||
|
"top_k":40,
|
||||||
|
"return_full_text":False,
|
||||||
|
"task":None,
|
||||||
|
}
|
||||||
|
# filter out those parameters that were passed with non-default values
|
||||||
|
non_default_params = {k: v for k, v in passed_params.items() if v != default_params[k]}
|
||||||
|
|
||||||
|
## raise exception if function calling passed in for a provider that doesn't support it
|
||||||
|
if "functions" in non_default_params or "function_call" in non_default_params:
|
||||||
|
if custom_llm_provider != "openai" and custom_llm_provider != "text-completion-openai" and custom_llm_provider != "azure":
|
||||||
|
raise ValueError("LiteLLM.Exception: Function calling is not supported by this provider")
|
||||||
|
|
||||||
optional_params = {}
|
optional_params = {}
|
||||||
if model in litellm.anthropic_models:
|
if custom_llm_provider == "anthropic":
|
||||||
# handle anthropic params
|
# handle anthropic params
|
||||||
if stream:
|
if stream:
|
||||||
optional_params["stream"] = stream
|
optional_params["stream"] = stream
|
||||||
|
@ -964,7 +996,7 @@ def get_optional_params( # use the openai defaults
|
||||||
if max_tokens != float("inf"):
|
if max_tokens != float("inf"):
|
||||||
optional_params["max_tokens_to_sample"] = max_tokens
|
optional_params["max_tokens_to_sample"] = max_tokens
|
||||||
return optional_params
|
return optional_params
|
||||||
elif model in litellm.cohere_models:
|
elif custom_llm_provider == "cohere":
|
||||||
# handle cohere params
|
# handle cohere params
|
||||||
if stream:
|
if stream:
|
||||||
optional_params["stream"] = stream
|
optional_params["stream"] = stream
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "0.1.807"
|
version = "0.1.808"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT License"
|
license = "MIT License"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue