feat - add open router exception mapping

This commit is contained in:
Ishaan Jaff 2024-06-19 08:40:08 -07:00
parent 52bcf37079
commit 5982fd88f9

View file

@ -7167,6 +7167,100 @@ def exception_type(
litellm_debug_info=extra_information,
request=httpx.Request(method="POST", url="https://openai.com/"),
)
if custom_llm_provider == "openrouter":
if hasattr(original_exception, "status_code"):
exception_mapping_worked = True
if original_exception.status_code == 400:
exception_mapping_worked = True
raise BadRequestError(
message=f"{exception_provider} - {message}",
llm_provider=custom_llm_provider,
model=model,
response=original_exception.response,
litellm_debug_info=extra_information,
)
elif original_exception.status_code == 401:
exception_mapping_worked = True
raise AuthenticationError(
message=f"AuthenticationError: {exception_provider} - {message}",
llm_provider=custom_llm_provider,
model=model,
response=original_exception.response,
litellm_debug_info=extra_information,
)
elif original_exception.status_code == 404:
exception_mapping_worked = True
raise NotFoundError(
message=f"NotFoundError: {exception_provider} - {message}",
model=model,
llm_provider=custom_llm_provider,
response=original_exception.response,
litellm_debug_info=extra_information,
)
elif original_exception.status_code == 408:
exception_mapping_worked = True
raise Timeout(
message=f"Timeout Error: {exception_provider} - {message}",
model=model,
llm_provider=custom_llm_provider,
litellm_debug_info=extra_information,
)
elif original_exception.status_code == 422:
exception_mapping_worked = True
raise BadRequestError(
message=f"BadRequestError: {exception_provider} - {message}",
model=model,
llm_provider=custom_llm_provider,
response=original_exception.response,
litellm_debug_info=extra_information,
)
elif original_exception.status_code == 429:
exception_mapping_worked = True
raise RateLimitError(
message=f"RateLimitError: {exception_provider} - {message}",
model=model,
llm_provider=custom_llm_provider,
response=original_exception.response,
litellm_debug_info=extra_information,
)
elif original_exception.status_code == 503:
exception_mapping_worked = True
raise ServiceUnavailableError(
message=f"ServiceUnavailableError: {exception_provider} - {message}",
model=model,
llm_provider=custom_llm_provider,
response=original_exception.response,
litellm_debug_info=extra_information,
)
elif original_exception.status_code == 504: # gateway timeout error
exception_mapping_worked = True
raise Timeout(
message=f"Timeout Error: {exception_provider} - {message}",
model=model,
llm_provider=custom_llm_provider,
litellm_debug_info=extra_information,
)
else:
exception_mapping_worked = True
raise APIError(
status_code=original_exception.status_code,
message=f"APIError: {exception_provider} - {message}",
llm_provider=custom_llm_provider,
model=model,
request=original_exception.request,
litellm_debug_info=extra_information,
)
else:
# if no status code then it is an APIConnectionError: https://github.com/openai/openai-python#handling-errors
raise APIConnectionError(
message=f"APIConnectionError: {exception_provider} - {message}",
llm_provider=custom_llm_provider,
model=model,
litellm_debug_info=extra_information,
request=httpx.Request(
method="POST", url="https://api.openai.com/v1/"
),
)
if (
"BadRequestError.__init__() missing 1 required positional argument: 'param'"
in str(original_exception)