From ecaad0bff0b74c65ab428c480e4e49b3b1f5f617 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Wed, 12 Mar 2025 17:47:11 -0700 Subject: [PATCH] _handle_llm_api_exception --- litellm/proxy/common_request_processing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/proxy/common_request_processing.py b/litellm/proxy/common_request_processing.py index 00613e5d76..02663f4555 100644 --- a/litellm/proxy/common_request_processing.py +++ b/litellm/proxy/common_request_processing.py @@ -291,7 +291,7 @@ class ProxyBaseLLMRequestProcessing: ): """Raises ProxyException (OpenAI API compatible) if an exception is raised""" verbose_proxy_logger.exception( - f"litellm.proxy.proxy_server.chat_completion(): Exception occured - {str(e)}" + f"litellm.proxy.proxy_server._handle_llm_api_exception(): Exception occured - {str(e)}" ) await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data