From 1a11b151b7914d13476bc187f2398b8a3e3ab6ad Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Wed, 15 May 2024 14:18:12 -0700 Subject: [PATCH] fix - show litellm debug info in errors --- litellm/proxy/proxy_server.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index aee75d54f4..145c281b0d 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -3857,8 +3857,11 @@ async def chat_completion( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) + litellm_debug_info = getattr(e, "litellm_debug_info", "") verbose_proxy_logger.debug( - f"\033[1;31mAn error occurred: {e}\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`" + "\033[1;31mAn error occurred: %s %s\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`", + e, + litellm_debug_info, ) router_model_names = llm_router.model_names if llm_router is not None else [] if user_debug: @@ -4046,9 +4049,11 @@ async def completion( except Exception as e: data["litellm_status"] = "fail" # used for alerting verbose_proxy_logger.debug("EXCEPTION RAISED IN PROXY MAIN.PY") + litellm_debug_info = getattr(e, "litellm_debug_info", "") verbose_proxy_logger.debug( - "\033[1;31mAn error occurred: %s\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`", + "\033[1;31mAn error occurred: %s %s\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`", e, + litellm_debug_info, ) traceback.print_exc() error_traceback = traceback.format_exc() @@ -4251,6 +4256,12 @@ async def embeddings( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) + litellm_debug_info = getattr(e, "litellm_debug_info", "") + verbose_proxy_logger.debug( + "\033[1;31mAn error occurred: %s %s\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`", + e, + litellm_debug_info, + ) traceback.print_exc() if isinstance(e, HTTPException): raise ProxyException(