mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
(feat) improve exceptions for OpenAI compatible -
This commit is contained in:
parent
fedb53771c
commit
aae54e0305
1 changed files with 39 additions and 12 deletions
|
@ -5907,6 +5907,11 @@ def exception_type(
|
||||||
or custom_llm_provider in litellm.openai_compatible_providers
|
or custom_llm_provider in litellm.openai_compatible_providers
|
||||||
):
|
):
|
||||||
# custom_llm_provider is openai, make it OpenAI
|
# custom_llm_provider is openai, make it OpenAI
|
||||||
|
message = original_exception.message
|
||||||
|
if message is not None and isinstance(message, str):
|
||||||
|
message = message.replace("OPENAI", custom_llm_provider.upper())
|
||||||
|
message = message.replace("openai", custom_llm_provider)
|
||||||
|
message = message.replace("OpenAI", custom_llm_provider)
|
||||||
if custom_llm_provider == "openai":
|
if custom_llm_provider == "openai":
|
||||||
exception_provider = "OpenAI" + "Exception"
|
exception_provider = "OpenAI" + "Exception"
|
||||||
else:
|
else:
|
||||||
|
@ -5922,7 +5927,7 @@ def exception_type(
|
||||||
):
|
):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise ContextWindowExceededError(
|
raise ContextWindowExceededError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -5933,7 +5938,7 @@ def exception_type(
|
||||||
):
|
):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise NotFoundError(
|
raise NotFoundError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -5944,7 +5949,7 @@ def exception_type(
|
||||||
):
|
):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise ContentPolicyViolationError(
|
raise ContentPolicyViolationError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -5955,7 +5960,18 @@ def exception_type(
|
||||||
):
|
):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise BadRequestError(
|
raise BadRequestError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
model=model,
|
||||||
|
response=original_exception.response,
|
||||||
|
)
|
||||||
|
elif (
|
||||||
|
"The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable"
|
||||||
|
in error_str
|
||||||
|
):
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise AuthenticationError(
|
||||||
|
message=f"{exception_provider} - {message}",
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -5965,7 +5981,7 @@ def exception_type(
|
||||||
if original_exception.status_code == 401:
|
if original_exception.status_code == 401:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise AuthenticationError(
|
raise AuthenticationError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -5973,7 +5989,7 @@ def exception_type(
|
||||||
elif original_exception.status_code == 404:
|
elif original_exception.status_code == 404:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise NotFoundError(
|
raise NotFoundError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -5981,14 +5997,14 @@ def exception_type(
|
||||||
elif original_exception.status_code == 408:
|
elif original_exception.status_code == 408:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise Timeout(
|
raise Timeout(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
)
|
)
|
||||||
elif original_exception.status_code == 422:
|
elif original_exception.status_code == 422:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise BadRequestError(
|
raise BadRequestError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -5996,7 +6012,7 @@ def exception_type(
|
||||||
elif original_exception.status_code == 429:
|
elif original_exception.status_code == 429:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise RateLimitError(
|
raise RateLimitError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -6004,7 +6020,7 @@ def exception_type(
|
||||||
elif original_exception.status_code == 503:
|
elif original_exception.status_code == 503:
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise ServiceUnavailableError(
|
raise ServiceUnavailableError(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
|
@ -6012,7 +6028,7 @@ def exception_type(
|
||||||
elif original_exception.status_code == 504: # gateway timeout error
|
elif original_exception.status_code == 504: # gateway timeout error
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise Timeout(
|
raise Timeout(
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
model=model,
|
model=model,
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
)
|
)
|
||||||
|
@ -6020,7 +6036,7 @@ def exception_type(
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise APIError(
|
raise APIError(
|
||||||
status_code=original_exception.status_code,
|
status_code=original_exception.status_code,
|
||||||
message=f"{exception_provider} - {original_exception.message}",
|
message=f"{exception_provider} - {message}",
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
model=model,
|
model=model,
|
||||||
request=original_exception.request,
|
request=original_exception.request,
|
||||||
|
@ -6953,6 +6969,17 @@ def exception_type(
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
|
elif (
|
||||||
|
"The api_key client option must be set either by passing api_key to the client or by setting"
|
||||||
|
in error_str
|
||||||
|
):
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise AuthenticationError(
|
||||||
|
message=f"{exception_provider} - {original_exception.message}",
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
model=model,
|
||||||
|
response=original_exception.response,
|
||||||
|
)
|
||||||
elif hasattr(original_exception, "status_code"):
|
elif hasattr(original_exception, "status_code"):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
if original_exception.status_code == 401:
|
if original_exception.status_code == 401:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue