From 71edb28d6a1d8fd73e33ff63b6078008f4e16ad4 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Sat, 6 Jul 2024 11:14:41 -0700 Subject: [PATCH 1/2] fix(proxy_server.py): fix embedding model exception mapping --- litellm/proxy/proxy_server.py | 3 ++- litellm/proxy/utils.py | 5 +++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index d633075b7..5011b64b3 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -3372,8 +3372,9 @@ async def embeddings( ) verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): + message = get_error_message_str(e) raise ProxyException( - message=getattr(e, "message", str(e)), + message=message, type=getattr(e, "type", "None"), param=getattr(e, "param", "None"), code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), diff --git a/litellm/proxy/utils.py b/litellm/proxy/utils.py index 32b74be7c..ba1a61080 100644 --- a/litellm/proxy/utils.py +++ b/litellm/proxy/utils.py @@ -2888,6 +2888,11 @@ def get_error_message_str(e: Exception) -> str: error_message = e.detail elif isinstance(e.detail, dict): error_message = json.dumps(e.detail) + elif hasattr(e, "message"): + if isinstance(e.message, "str"): + error_message = e.message + elif isinstance(e.message, dict): + error_message = json.dumps(e.message) else: error_message = str(e) else: From 85d723f5f87fff415ad867c6748a5b0c0aed7fdc Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Sat, 6 Jul 2024 12:36:05 -0700 Subject: [PATCH 2/2] fix(utils.py): fix openrouter exception handling --- litellm/utils.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/litellm/utils.py b/litellm/utils.py index d17ba8911..62386b1d2 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -7521,7 +7521,7 @@ def exception_type( if original_exception.status_code == 400: exception_mapping_worked = True raise BadRequestError( - message=f"{exception_provider} - {message}", + message=f"{exception_provider} - {error_str}", llm_provider=custom_llm_provider, model=model, response=original_exception.response, @@ -7530,7 +7530,7 @@ def exception_type( elif original_exception.status_code == 401: exception_mapping_worked = True raise AuthenticationError( - message=f"AuthenticationError: {exception_provider} - {message}", + message=f"AuthenticationError: {exception_provider} - {error_str}", llm_provider=custom_llm_provider, model=model, response=original_exception.response, @@ -7539,7 +7539,7 @@ def exception_type( elif original_exception.status_code == 404: exception_mapping_worked = True raise NotFoundError( - message=f"NotFoundError: {exception_provider} - {message}", + message=f"NotFoundError: {exception_provider} - {error_str}", model=model, llm_provider=custom_llm_provider, response=original_exception.response, @@ -7548,7 +7548,7 @@ def exception_type( elif original_exception.status_code == 408: exception_mapping_worked = True raise Timeout( - message=f"Timeout Error: {exception_provider} - {message}", + message=f"Timeout Error: {exception_provider} - {error_str}", model=model, llm_provider=custom_llm_provider, litellm_debug_info=extra_information, @@ -7556,7 +7556,7 @@ def exception_type( elif original_exception.status_code == 422: exception_mapping_worked = True raise BadRequestError( - message=f"BadRequestError: {exception_provider} - {message}", + message=f"BadRequestError: {exception_provider} - {error_str}", model=model, llm_provider=custom_llm_provider, response=original_exception.response, @@ -7565,7 +7565,7 @@ def exception_type( elif original_exception.status_code == 429: exception_mapping_worked = True raise RateLimitError( - message=f"RateLimitError: {exception_provider} - {message}", + message=f"RateLimitError: {exception_provider} - {error_str}", model=model, llm_provider=custom_llm_provider, response=original_exception.response, @@ -7574,7 +7574,7 @@ def exception_type( elif original_exception.status_code == 503: exception_mapping_worked = True raise ServiceUnavailableError( - message=f"ServiceUnavailableError: {exception_provider} - {message}", + message=f"ServiceUnavailableError: {exception_provider} - {error_str}", model=model, llm_provider=custom_llm_provider, response=original_exception.response, @@ -7583,7 +7583,7 @@ def exception_type( elif original_exception.status_code == 504: # gateway timeout error exception_mapping_worked = True raise Timeout( - message=f"Timeout Error: {exception_provider} - {message}", + message=f"Timeout Error: {exception_provider} - {error_str}", model=model, llm_provider=custom_llm_provider, litellm_debug_info=extra_information, @@ -7592,7 +7592,7 @@ def exception_type( exception_mapping_worked = True raise APIError( status_code=original_exception.status_code, - message=f"APIError: {exception_provider} - {message}", + message=f"APIError: {exception_provider} - {error_str}", llm_provider=custom_llm_provider, model=model, request=original_exception.request, @@ -7601,7 +7601,7 @@ def exception_type( else: # if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors raise APIConnectionError( - message=f"APIConnectionError: {exception_provider} - {message}", + message=f"APIConnectionError: {exception_provider} - {error_str}", llm_provider=custom_llm_provider, model=model, litellm_debug_info=extra_information,