_handle_llm_api_exception

This commit is contained in:
Ishaan Jaff 2025-03-12 17:47:11 -07:00
parent 2ca2ecacb5
commit 8689a25396

View file

@ -291,7 +291,7 @@ class ProxyBaseLLMRequestProcessing:
):
"""Raises ProxyException (OpenAI API compatible) if an exception is raised"""
verbose_proxy_logger.exception(
f"litellm.proxy.proxy_server.chat_completion(): Exception occured - {str(e)}"
f"litellm.proxy.proxy_server._handle_llm_api_exception(): Exception occured - {str(e)}"
)
await proxy_logging_obj.post_call_failure_hook(
user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data