diff --git a/litellm/utils.py b/litellm/utils.py index 66df7c503..103f854b6 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -2406,7 +2406,9 @@ def get_optional_params( elif k == "hf_model_name" and custom_llm_provider != "sagemaker": continue elif ( - k.startswith("vertex_") and custom_llm_provider != "vertex_ai" and custom_llm_provider != "vertex_ai_beta" + k.startswith("vertex_") + and custom_llm_provider != "vertex_ai" + and custom_llm_provider != "vertex_ai_beta" ): # allow dynamically setting vertex ai init logic continue passed_params[k] = v @@ -3871,6 +3873,10 @@ def get_supported_openai_params( "top_p", "stop", "seed", + "tools", + "tool_choice", + "functions", + "function_call", ] elif custom_llm_provider == "huggingface": return litellm.HuggingfaceConfig().get_supported_openai_params()