diff --git a/dist/litellm-1.14.5.dev1-py3-none-any.whl b/dist/litellm-1.14.5.dev1-py3-none-any.whl new file mode 100644 index 000000000..1555e6f68 Binary files /dev/null and b/dist/litellm-1.14.5.dev1-py3-none-any.whl differ diff --git a/dist/litellm-1.14.5.dev1.tar.gz b/dist/litellm-1.14.5.dev1.tar.gz new file mode 100644 index 000000000..06f47b0eb Binary files /dev/null and b/dist/litellm-1.14.5.dev1.tar.gz differ diff --git a/litellm/utils.py b/litellm/utils.py index 120f55bf6..7c45e3c22 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -4858,29 +4858,30 @@ def exception_type( llm_provider="together_ai", response=original_exception.response ) - elif original_exception.status_code == 408: + if hasattr(original_exception, "status_code"): + if original_exception.status_code == 408: + exception_mapping_worked = True + raise Timeout( + message=f"TogetherAIException - {original_exception.message}", + model=model, + llm_provider="together_ai", + request=original_exception.request + ) + elif original_exception.status_code == 429: + exception_mapping_worked = True + raise RateLimitError( + message=f"TogetherAIException - {original_exception.message}", + llm_provider="together_ai", + model=model, + response=original_exception.response + ) + elif original_exception.status_code == 524: exception_mapping_worked = True raise Timeout( - message=f"TogetherAIException - {original_exception.message}", - model=model, - llm_provider="together_ai", - request=original_exception.request - ) - elif original_exception.status_code == 429: - exception_mapping_worked = True - raise RateLimitError( message=f"TogetherAIException - {original_exception.message}", llm_provider="together_ai", model=model, - response=original_exception.response ) - elif original_exception.status_code == 524: - exception_mapping_worked = True - raise Timeout( - message=f"TogetherAIException - {original_exception.message}", - llm_provider="together_ai", - model=model, - ) else: exception_mapping_worked = True raise APIError(