From 000d6784457282f58b348ca66b27a8ebee0eddef Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Fri, 21 Jun 2024 21:15:10 -0700 Subject: [PATCH] fix(utils.py): improve coverage for anthropic exception mapping --- litellm/tests/test_exceptions.py | 22 +++++++++++++++++++++- litellm/utils.py | 23 ++++++++++++++++------- 2 files changed, 37 insertions(+), 8 deletions(-) diff --git a/litellm/tests/test_exceptions.py b/litellm/tests/test_exceptions.py index 28d742931..01e212973 100644 --- a/litellm/tests/test_exceptions.py +++ b/litellm/tests/test_exceptions.py @@ -670,7 +670,7 @@ def test_litellm_predibase_exception(): # print(f"accuracy_score: {accuracy_score}") -@pytest.mark.parametrize("provider", ["predibase", "vertex_ai_beta"]) +@pytest.mark.parametrize("provider", ["predibase", "vertex_ai_beta", "anthropic"]) def test_exception_mapping(provider): """ For predibase, run through a set of mock exceptions @@ -712,3 +712,23 @@ def test_exception_mapping(provider): ) pass + + +def test_anthropic_tool_calling_exception(): + """ + Related - https://github.com/BerriAI/litellm/issues/4348 + """ + tools = [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + }, + } + ] + litellm.completion( + model="claude-3.5", + messages=[{"role": "user", "content": "Hey, how's it going?"}], + tools=tools, + ) diff --git a/litellm/utils.py b/litellm/utils.py index 7a6420076..d08735080 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -5948,7 +5948,6 @@ def exception_type( message=f"AnthropicException - {error_str}", llm_provider="anthropic", model=model, - response=original_exception.response, ) elif ( original_exception.status_code == 400 @@ -5959,7 +5958,13 @@ def exception_type( message=f"AnthropicException - {error_str}", model=model, llm_provider="anthropic", - response=original_exception.response, + ) + elif original_exception.status_code == 404: + exception_mapping_worked = True + raise NotFoundError( + message=f"AnthropicException - {error_str}", + model=model, + llm_provider="anthropic", ) elif original_exception.status_code == 408: exception_mapping_worked = True @@ -5974,16 +5979,20 @@ def exception_type( message=f"AnthropicException - {error_str}", llm_provider="anthropic", model=model, - response=original_exception.response, ) elif original_exception.status_code == 500: exception_mapping_worked = True - raise APIError( - status_code=500, - message=f"AnthropicException - {error_str}. Handle with `litellm.APIError`.", + raise litellm.InternalServerError( + message=f"AnthropicException - {error_str}. Handle with `litellm.InternalServerError`.", + llm_provider="anthropic", + model=model, + ) + elif original_exception.status_code == 503: + exception_mapping_worked = True + raise litellm.ServiceUnavailableError( + message=f"AnthropicException - {error_str}. Handle with `litellm.ServiceUnavailableError`.", llm_provider="anthropic", model=model, - request=original_exception.request, ) elif custom_llm_provider == "replicate": if "Incorrect authentication token" in error_str: