diff --git a/litellm/proxy/anthropic_endpoints/endpoints.py b/litellm/proxy/anthropic_endpoints/endpoints.py index a3956ef274..78078b93f8 100644 --- a/litellm/proxy/anthropic_endpoints/endpoints.py +++ b/litellm/proxy/anthropic_endpoints/endpoints.py @@ -14,6 +14,7 @@ import litellm from litellm._logging import verbose_proxy_logger from litellm.proxy._types import * from litellm.proxy.auth.user_api_key_auth import user_api_key_auth +from litellm.proxy.common_request_processing import ProxyBaseLLMRequestProcessing from litellm.proxy.common_utils.http_parsing_utils import _read_request_body from litellm.proxy.litellm_pre_call_utils import add_litellm_data_to_request from litellm.proxy.utils import ProxyLogging @@ -89,7 +90,6 @@ async def anthropic_response( # noqa: PLR0915 """ from litellm.proxy.proxy_server import ( general_settings, - get_custom_headers, llm_router, proxy_config, proxy_logging_obj, @@ -205,7 +205,7 @@ async def anthropic_response( # noqa: PLR0915 verbose_proxy_logger.debug("final response: %s", response) fastapi_response.headers.update( - get_custom_headers( + ProxyBaseLLMRequestProcessing.get_custom_headers( user_api_key_dict=user_api_key_dict, model_id=model_id, cache_key=cache_key,