diff --git a/litellm/main.py b/litellm/main.py index c6774c9f50..82fa65eefa 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -837,6 +837,7 @@ def completion( # type: ignore # noqa: PLR0915 Optional[ProviderSpecificHeader], kwargs.get("provider_specific_header", None) ) headers = kwargs.get("headers", None) or extra_headers + ensure_alternating_roles: Optional[bool] = kwargs.get( "ensure_alternating_roles", None ) @@ -848,6 +849,8 @@ def completion( # type: ignore # noqa: PLR0915 ) if headers is None: headers = {} + if extra_headers is not None: + headers.update(extra_headers) num_retries = kwargs.get( "num_retries", None ) ## alt. param for 'max_retries'. Use this to pass retries w/ instructor. @@ -1052,14 +1055,9 @@ def completion( # type: ignore # noqa: PLR0915 api_version=api_version, parallel_tool_calls=parallel_tool_calls, messages=messages, - extra_headers=extra_headers, **non_default_params, ) - extra_headers = optional_params.pop("extra_headers", None) - if extra_headers is not None: - headers.update(extra_headers) - if litellm.add_function_to_prompt and optional_params.get( "functions_unsupported_model", None ): # if user opts to add it to prompt, when API doesn't support function calling diff --git a/litellm/utils.py b/litellm/utils.py index a878802ed3..205f8928a7 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -2584,25 +2584,6 @@ def _remove_unsupported_params( return non_default_params -def get_clean_extra_headers(extra_headers: dict, custom_llm_provider: str) -> dict: - """ - For `anthropic-beta` headers, ensure provider is anthropic. - - Vertex AI raises an exception if `anthropic-beta` is passed in. - """ - if litellm.filter_invalid_headers is not True: # allow user to opt out of filtering - return extra_headers - clean_extra_headers = {} - for k, v in extra_headers.items(): - if k in ANTHROPIC_API_ONLY_HEADERS and custom_llm_provider != "anthropic": - verbose_logger.debug( - f"Provider {custom_llm_provider} does not support {k} header. Dropping from request, to prevent errors." - ) # Switching between anthropic api and vertex ai anthropic fails when anthropic-beta is passed in. Welcome feedback on this. - else: - clean_extra_headers[k] = v - return clean_extra_headers - - def get_optional_params( # noqa: PLR0915 # use the openai defaults # https://platform.openai.com/docs/api-reference/chat/create @@ -2741,12 +2722,6 @@ def get_optional_params( # noqa: PLR0915 ) } - ## Supports anthropic headers - if extra_headers is not None: - extra_headers = get_clean_extra_headers( - extra_headers=extra_headers, custom_llm_provider=custom_llm_provider - ) - ## raise exception if function calling passed in for a provider that doesn't support it if ( "functions" in non_default_params @@ -3516,12 +3491,6 @@ def get_optional_params( # noqa: PLR0915 for k in passed_params.keys(): if k not in default_params.keys(): optional_params[k] = passed_params[k] - if extra_headers is not None: - optional_params.setdefault("extra_headers", {}) - optional_params["extra_headers"] = { - **optional_params["extra_headers"], - **extra_headers, - } print_verbose(f"Final returned optional params: {optional_params}") return optional_params