forked from phoenix/litellm-mirror
Merge pull request #4282 from BerriAI/litellm_add_openrouter_exception_mapping
feat - add open router exception mapping
This commit is contained in:
commit
f5ebc1a042
1 changed files with 94 additions and 0 deletions
|
@ -7181,6 +7181,100 @@ def exception_type(
|
||||||
litellm_debug_info=extra_information,
|
litellm_debug_info=extra_information,
|
||||||
request=httpx.Request(method="POST", url="https://openai.com/"),
|
request=httpx.Request(method="POST", url="https://openai.com/"),
|
||||||
)
|
)
|
||||||
|
if custom_llm_provider == "openrouter":
|
||||||
|
if hasattr(original_exception, "status_code"):
|
||||||
|
exception_mapping_worked = True
|
||||||
|
if original_exception.status_code == 400:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise BadRequestError(
|
||||||
|
message=f"{exception_provider} - {message}",
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
model=model,
|
||||||
|
response=original_exception.response,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
elif original_exception.status_code == 401:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise AuthenticationError(
|
||||||
|
message=f"AuthenticationError: {exception_provider} - {message}",
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
model=model,
|
||||||
|
response=original_exception.response,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
elif original_exception.status_code == 404:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise NotFoundError(
|
||||||
|
message=f"NotFoundError: {exception_provider} - {message}",
|
||||||
|
model=model,
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
response=original_exception.response,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
elif original_exception.status_code == 408:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise Timeout(
|
||||||
|
message=f"Timeout Error: {exception_provider} - {message}",
|
||||||
|
model=model,
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
elif original_exception.status_code == 422:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise BadRequestError(
|
||||||
|
message=f"BadRequestError: {exception_provider} - {message}",
|
||||||
|
model=model,
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
response=original_exception.response,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
elif original_exception.status_code == 429:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise RateLimitError(
|
||||||
|
message=f"RateLimitError: {exception_provider} - {message}",
|
||||||
|
model=model,
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
response=original_exception.response,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
elif original_exception.status_code == 503:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise ServiceUnavailableError(
|
||||||
|
message=f"ServiceUnavailableError: {exception_provider} - {message}",
|
||||||
|
model=model,
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
response=original_exception.response,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
elif original_exception.status_code == 504: # gateway timeout error
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise Timeout(
|
||||||
|
message=f"Timeout Error: {exception_provider} - {message}",
|
||||||
|
model=model,
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise APIError(
|
||||||
|
status_code=original_exception.status_code,
|
||||||
|
message=f"APIError: {exception_provider} - {message}",
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
model=model,
|
||||||
|
request=original_exception.request,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors
|
||||||
|
raise APIConnectionError(
|
||||||
|
message=f"APIConnectionError: {exception_provider} - {message}",
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
model=model,
|
||||||
|
litellm_debug_info=extra_information,
|
||||||
|
request=httpx.Request(
|
||||||
|
method="POST", url="https://api.openai.com/v1/"
|
||||||
|
),
|
||||||
|
)
|
||||||
if (
|
if (
|
||||||
"BadRequestError.__init__() missing 1 required positional argument: 'param'"
|
"BadRequestError.__init__() missing 1 required positional argument: 'param'"
|
||||||
in str(original_exception)
|
in str(original_exception)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue