fix(utils.py): improve coverage for anthropic exception mapping

This commit is contained in:
Krrish Dholakia 2024-06-21 21:15:10 -07:00
parent 24addd8a4f
commit 000d678445
2 changed files with 37 additions and 8 deletions

View file

@ -670,7 +670,7 @@ def test_litellm_predibase_exception():
# print(f"accuracy_score: {accuracy_score}")
@pytest.mark.parametrize("provider", ["predibase", "vertex_ai_beta"])
@pytest.mark.parametrize("provider", ["predibase", "vertex_ai_beta", "anthropic"])
def test_exception_mapping(provider):
"""
For predibase, run through a set of mock exceptions
@ -712,3 +712,23 @@ def test_exception_mapping(provider):
)
pass
def test_anthropic_tool_calling_exception():
"""
Related - https://github.com/BerriAI/litellm/issues/4348
"""
tools = [
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
},
}
]
litellm.completion(
model="claude-3.5",
messages=[{"role": "user", "content": "Hey, how's it going?"}],
tools=tools,
)

View file

@ -5948,7 +5948,6 @@ def exception_type(
message=f"AnthropicException - {error_str}",
llm_provider="anthropic",
model=model,
response=original_exception.response,
)
elif (
original_exception.status_code == 400
@ -5959,7 +5958,13 @@ def exception_type(
message=f"AnthropicException - {error_str}",
model=model,
llm_provider="anthropic",
response=original_exception.response,
)
elif original_exception.status_code == 404:
exception_mapping_worked = True
raise NotFoundError(
message=f"AnthropicException - {error_str}",
model=model,
llm_provider="anthropic",
)
elif original_exception.status_code == 408:
exception_mapping_worked = True
@ -5974,16 +5979,20 @@ def exception_type(
message=f"AnthropicException - {error_str}",
llm_provider="anthropic",
model=model,
response=original_exception.response,
)
elif original_exception.status_code == 500:
exception_mapping_worked = True
raise APIError(
status_code=500,
message=f"AnthropicException - {error_str}. Handle with `litellm.APIError`.",
raise litellm.InternalServerError(
message=f"AnthropicException - {error_str}. Handle with `litellm.InternalServerError`.",
llm_provider="anthropic",
model=model,
)
elif original_exception.status_code == 503:
exception_mapping_worked = True
raise litellm.ServiceUnavailableError(
message=f"AnthropicException - {error_str}. Handle with `litellm.ServiceUnavailableError`.",
llm_provider="anthropic",
model=model,
request=original_exception.request,
)
elif custom_llm_provider == "replicate":
if "Incorrect authentication token" in error_str: