mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
(feat) add openai.NotFoundError
This commit is contained in:
parent
4fd59c5847
commit
353433e5ce
3 changed files with 28 additions and 3 deletions
|
@ -412,6 +412,7 @@ from .exceptions import (
|
|||
AuthenticationError,
|
||||
InvalidRequestError,
|
||||
BadRequestError,
|
||||
NotFoundError,
|
||||
RateLimitError,
|
||||
ServiceUnavailableError,
|
||||
OpenAIError,
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
from openai import (
|
||||
AuthenticationError,
|
||||
BadRequestError,
|
||||
NotFoundError,
|
||||
RateLimitError,
|
||||
APIStatusError,
|
||||
OpenAIError,
|
||||
|
@ -36,6 +37,20 @@ class AuthenticationError(AuthenticationError): # type: ignore
|
|||
body=None
|
||||
) # Call the base class constructor with the parameters it needs
|
||||
|
||||
# raise when invalid models passed, example gpt-8
|
||||
class NotFoundError(NotFoundError): # type: ignore
|
||||
def __init__(self, message, model, llm_provider, response: httpx.Response):
|
||||
self.status_code = 404
|
||||
self.message = message
|
||||
self.model = model
|
||||
self.llm_provider = llm_provider
|
||||
super().__init__(
|
||||
self.message,
|
||||
response=response,
|
||||
body=None
|
||||
) # Call the base class constructor with the parameters it needs
|
||||
|
||||
|
||||
class BadRequestError(BadRequestError): # type: ignore
|
||||
def __init__(self, message, model, llm_provider, response: httpx.Response):
|
||||
self.status_code = 400
|
||||
|
|
|
@ -46,6 +46,7 @@ from openai._models import BaseModel as OpenAIObject
|
|||
from .exceptions import (
|
||||
AuthenticationError,
|
||||
BadRequestError,
|
||||
NotFoundError,
|
||||
RateLimitError,
|
||||
ServiceUnavailableError,
|
||||
OpenAIError,
|
||||
|
@ -4169,6 +4170,14 @@ def exception_type(
|
|||
model=model,
|
||||
response=original_exception.response
|
||||
)
|
||||
elif "invalid_request_error" in error_str and "model_not_found" in error_str:
|
||||
exception_mapping_worked = True
|
||||
raise NotFoundError(
|
||||
message=f"OpenAIException - {original_exception.message}",
|
||||
llm_provider="openai",
|
||||
model=model,
|
||||
response=original_exception.response
|
||||
)
|
||||
elif "invalid_request_error" in error_str and "Incorrect API key provided" not in error_str:
|
||||
exception_mapping_worked = True
|
||||
raise BadRequestError(
|
||||
|
@ -4994,15 +5003,15 @@ def exception_type(
|
|||
model=model,
|
||||
response=original_exception.response
|
||||
)
|
||||
elif "invalid_request_error" in error_str:
|
||||
elif "DeploymentNotFound" in error_str:
|
||||
exception_mapping_worked = True
|
||||
raise BadRequestError(
|
||||
raise NotFoundError(
|
||||
message=f"AzureException - {original_exception.message}",
|
||||
llm_provider="azure",
|
||||
model=model,
|
||||
response=original_exception.response
|
||||
)
|
||||
elif "DeploymentNotFound" in error_str:
|
||||
elif "invalid_request_error" in error_str:
|
||||
exception_mapping_worked = True
|
||||
raise BadRequestError(
|
||||
message=f"AzureException - {original_exception.message}",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue