mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
(feat) show correct provider in exceptions
This commit is contained in:
parent
5fc6081b60
commit
a55e0a9689
1 changed files with 35 additions and 25 deletions
|
@ -5906,14 +5906,24 @@ def exception_type(
|
||||||
or custom_llm_provider == "custom_openai"
|
or custom_llm_provider == "custom_openai"
|
||||||
or custom_llm_provider in litellm.openai_compatible_providers
|
or custom_llm_provider in litellm.openai_compatible_providers
|
||||||
):
|
):
|
||||||
|
# custom_llm_provider is openai, make it OpenAI
|
||||||
|
if custom_llm_provider == "openai":
|
||||||
|
exception_provider = "OpenAI" + "Exception"
|
||||||
|
else:
|
||||||
|
exception_provider = (
|
||||||
|
custom_llm_provider[0].upper()
|
||||||
|
+ custom_llm_provider[1:]
|
||||||
|
+ "Exception"
|
||||||
|
)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
"This model's maximum context length is" in error_str
|
"This model's maximum context length is" in error_str
|
||||||
or "Request too large" in error_str
|
or "Request too large" in error_str
|
||||||
):
|
):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise ContextWindowExceededError(
|
raise ContextWindowExceededError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
|
@ -5923,8 +5933,8 @@ def exception_type(
|
||||||
):
|
):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise NotFoundError(
|
raise NotFoundError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
|
@ -5934,8 +5944,8 @@ def exception_type(
|
||||||
):
|
):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise ContentPolicyViolationError(
|
raise ContentPolicyViolationError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
|
@ -5945,8 +5955,8 @@ def exception_type(
|
||||||
):
|
):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise BadRequestError(
|
raise BadRequestError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
|
@ -5955,63 +5965,63 @@ def exception_type(
|
||||||
if original_exception.status_code == 401:
|
if original_exception.status_code == 401:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise AuthenticationError(
|
raise AuthenticationError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 404:
|
elif original_exception.status_code == 404:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise NotFoundError(
|
raise NotFoundError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 408:
|
elif original_exception.status_code == 408:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise Timeout(
|
raise Timeout(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 422:
|
elif original_exception.status_code == 422:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise BadRequestError(
|
raise BadRequestError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 429:
|
elif original_exception.status_code == 429:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise RateLimitError(
|
raise RateLimitError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 503:
|
elif original_exception.status_code == 503:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise ServiceUnavailableError(
|
raise ServiceUnavailableError(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 504: # gateway timeout error
|
elif original_exception.status_code == 504: # gateway timeout error
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise Timeout(
|
raise Timeout(
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise APIError(
|
raise APIError(
|
||||||
status_code=original_exception.status_code,
|
status_code=original_exception.status_code,
|
||||||
message=f"OpenAIException - {original_exception.message}",
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
llm_provider="openai",
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
request=original_exception.request,
|
request=original_exception.request,
|
||||||
)
|
)
|
||||||
|
@ -7015,7 +7025,7 @@ def exception_type(
|
||||||
): # deal with edge-case invalid request error bug in openai-python sdk
|
): # deal with edge-case invalid request error bug in openai-python sdk
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise BadRequestError(
|
raise BadRequestError(
|
||||||
message=f"OpenAIException: This can happen due to missing AZURE_API_VERSION: {str(original_exception)}",
|
message=f"{exception_provider}: This can happen due to missing AZURE_API_VERSION: {str(original_exception)}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue