Merge pull request #1765 from BerriAI/litellm_show_correct_provider_in_exception

[Feat] Show correct provider in exceptions - for Mistral API, PerplexityAPI
This commit is contained in:
Ishaan Jaff 2024-02-02 08:51:57 -08:00 committed by GitHub
commit 01a69ea5a8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 81 additions and 25 deletions

View file

@ -444,6 +444,52 @@ def test_content_policy_violation_error_streaming():
asyncio.run(test_get_error()) asyncio.run(test_get_error())
def test_completion_perplexity_exception():
try:
import openai
print("perplexity test\n\n")
litellm.set_verbose = True
## Test azure call
old_azure_key = os.environ["PERPLEXITYAI_API_KEY"]
os.environ["PERPLEXITYAI_API_KEY"] = "good morning"
response = completion(
model="perplexity/mistral-7b-instruct",
messages=[{"role": "user", "content": "hello"}],
)
os.environ["PERPLEXITYAI_API_KEY"] = old_azure_key
pytest.fail("Request should have failed - bad api key")
except openai.AuthenticationError as e:
os.environ["PERPLEXITYAI_API_KEY"] = old_azure_key
print("exception: ", e)
assert "PerplexityException" in str(e)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
def test_completion_openai_api_key_exception():
try:
import openai
print("gpt-3.5 test\n\n")
litellm.set_verbose = True
## Test azure call
old_azure_key = os.environ["OPENAI_API_KEY"]
os.environ["OPENAI_API_KEY"] = "good morning"
response = completion(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "hello"}],
)
os.environ["OPENAI_API_KEY"] = old_azure_key
pytest.fail("Request should have failed - bad api key")
except openai.AuthenticationError as e:
os.environ["OPENAI_API_KEY"] = old_azure_key
print("exception: ", e)
assert "OpenAIException" in str(e)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# tesy_async_acompletion() # tesy_async_acompletion()
# # test_invalid_request_error(model="command-nightly") # # test_invalid_request_error(model="command-nightly")

View file

@ -5906,14 +5906,24 @@ def exception_type(
or custom_llm_provider == "custom_openai" or custom_llm_provider == "custom_openai"
or custom_llm_provider in litellm.openai_compatible_providers or custom_llm_provider in litellm.openai_compatible_providers
): ):
# custom_llm_provider is openai, make it OpenAI
if custom_llm_provider == "openai":
exception_provider = "OpenAI" + "Exception"
else:
exception_provider = (
custom_llm_provider[0].upper()
+ custom_llm_provider[1:]
+ "Exception"
)
if ( if (
"This model's maximum context length is" in error_str "This model's maximum context length is" in error_str
or "Request too large" in error_str or "Request too large" in error_str
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise ContextWindowExceededError( raise ContextWindowExceededError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
llm_provider="openai", llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
) )
@ -5923,8 +5933,8 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise NotFoundError( raise NotFoundError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
llm_provider="openai", llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
) )
@ -5934,8 +5944,8 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise ContentPolicyViolationError( raise ContentPolicyViolationError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
llm_provider="openai", llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
) )
@ -5945,8 +5955,8 @@ def exception_type(
): ):
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
llm_provider="openai", llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
) )
@ -5955,63 +5965,63 @@ def exception_type(
if original_exception.status_code == 401: if original_exception.status_code == 401:
exception_mapping_worked = True exception_mapping_worked = True
raise AuthenticationError( raise AuthenticationError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
llm_provider="openai", llm_provider=custom_llm_provider,
model=model, model=model,
response=original_exception.response, response=original_exception.response,
) )
elif original_exception.status_code == 404: elif original_exception.status_code == 404:
exception_mapping_worked = True exception_mapping_worked = True
raise NotFoundError( raise NotFoundError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
model=model, model=model,
llm_provider="openai", llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
) )
elif original_exception.status_code == 408: elif original_exception.status_code == 408:
exception_mapping_worked = True exception_mapping_worked = True
raise Timeout( raise Timeout(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
model=model, model=model,
llm_provider="openai", llm_provider=custom_llm_provider,
) )
elif original_exception.status_code == 422: elif original_exception.status_code == 422:
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
model=model, model=model,
llm_provider="openai", llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
) )
elif original_exception.status_code == 429: elif original_exception.status_code == 429:
exception_mapping_worked = True exception_mapping_worked = True
raise RateLimitError( raise RateLimitError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
model=model, model=model,
llm_provider="openai", llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
) )
elif original_exception.status_code == 503: elif original_exception.status_code == 503:
exception_mapping_worked = True exception_mapping_worked = True
raise ServiceUnavailableError( raise ServiceUnavailableError(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
model=model, model=model,
llm_provider="openai", llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,
) )
elif original_exception.status_code == 504: # gateway timeout error elif original_exception.status_code == 504: # gateway timeout error
exception_mapping_worked = True exception_mapping_worked = True
raise Timeout( raise Timeout(
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
model=model, model=model,
llm_provider="openai", llm_provider=custom_llm_provider,
) )
else: else:
exception_mapping_worked = True exception_mapping_worked = True
raise APIError( raise APIError(
status_code=original_exception.status_code, status_code=original_exception.status_code,
message=f"OpenAIException - {original_exception.message}", message=f"{exception_provider} - {original_exception.message}",
llm_provider="openai", llm_provider=custom_llm_provider,
model=model, model=model,
request=original_exception.request, request=original_exception.request,
) )
@ -7015,7 +7025,7 @@ def exception_type(
): # deal with edge-case invalid request error bug in openai-python sdk ): # deal with edge-case invalid request error bug in openai-python sdk
exception_mapping_worked = True exception_mapping_worked = True
raise BadRequestError( raise BadRequestError(
message=f"OpenAIException: This can happen due to missing AZURE_API_VERSION: {str(original_exception)}", message=f"{exception_provider}: This can happen due to missing AZURE_API_VERSION: {str(original_exception)}",
model=model, model=model,
llm_provider=custom_llm_provider, llm_provider=custom_llm_provider,
response=original_exception.response, response=original_exception.response,