From f499e8bb1d5692d1c719f57ae987dfeec13ae7ef Mon Sep 17 00:00:00 2001 From: Jack Collins <6640905+jackmpcollins@users.noreply.github.com> Date: Mon, 13 Nov 2023 21:17:47 -0800 Subject: [PATCH 1/2] Add response and body to APIStatus init call --- litellm/exceptions.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/litellm/exceptions.py b/litellm/exceptions.py index d96992d57f..4378bd6766 100644 --- a/litellm/exceptions.py +++ b/litellm/exceptions.py @@ -83,13 +83,15 @@ class ContextWindowExceededError(BadRequestError): # type: ignore ) # Call the base class constructor with the parameters it needs class ServiceUnavailableError(APIStatusError): # type: ignore - def __init__(self, message, llm_provider, model): + def __init__(self, message, llm_provider, model, response: httpx.Response): self.status_code = 503 self.message = message self.llm_provider = llm_provider self.model = model super().__init__( - self.message + self.message, + response=response, + body=None ) # Call the base class constructor with the parameters it needs From 223ec8933d904ff8b436940746a43ff3753c6da3 Mon Sep 17 00:00:00 2001 From: Jack Collins <6640905+jackmpcollins@users.noreply.github.com> Date: Mon, 13 Nov 2023 21:20:40 -0800 Subject: [PATCH 2/2] Provide response to ServiceUnavailableError where needed --- litellm/utils.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/litellm/utils.py b/litellm/utils.py index b63f8645b7..fe6c84ad3c 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3264,7 +3264,8 @@ def exception_type( raise ServiceUnavailableError( message=f"AnthropicException - {original_exception.message}", llm_provider="anthropic", - model=model + model=model, + response=original_exception.response ) else: exception_mapping_worked = True @@ -3346,7 +3347,8 @@ def exception_type( raise ServiceUnavailableError( message=f"ReplicateException - {original_exception.message}", llm_provider="replicate", - model=model + model=model, + response=original_exception.response ) exception_mapping_worked = True raise APIError( @@ -3395,7 +3397,8 @@ def exception_type( raise ServiceUnavailableError( message=f"BedrockException - {original_exception.message}", llm_provider="bedrock", - model=model + model=model, + response=original_exception.response ) elif original_exception.status_code == 401: exception_mapping_worked = True @@ -3477,7 +3480,8 @@ def exception_type( raise ServiceUnavailableError( message=f"CohereException - {original_exception.message}", llm_provider="cohere", - model=model + model=model, + response=original_exception.response ) elif ( "CohereConnectionError" in exception_type @@ -3502,7 +3506,8 @@ def exception_type( raise ServiceUnavailableError( message=f"CohereException - {original_exception.message}", llm_provider="cohere", - model=model + model=model, + response=original_exception.response ) else: if hasattr(original_exception, "status_code"): @@ -3710,7 +3715,8 @@ def exception_type( raise ServiceUnavailableError( message=f"NLPCloudException - {original_exception.message}", model=model, - llm_provider="nlp_cloud" + llm_provider="nlp_cloud", + response=original_exception.response ) else: exception_mapping_worked = True @@ -3847,7 +3853,8 @@ def exception_type( raise ServiceUnavailableError( message=f"AlephAlphaException - {original_exception.message}", llm_provider="aleph_alpha", - model=model + model=model, + response=original_exception.response ) raise original_exception raise original_exception @@ -3872,7 +3879,8 @@ def exception_type( raise ServiceUnavailableError( message=f"OllamaException: {original_exception}", llm_provider="ollama", - model=model + model=model, + response=original_exception.response ) elif "Invalid response object from API" in error_str: exception_mapping_worked = True