mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
(v0) add ContentPolicyViolationError
This commit is contained in:
parent
f49c87a872
commit
66b23ecbb5
3 changed files with 28 additions and 0 deletions
|
@ -544,6 +544,7 @@ from .exceptions import (
|
||||||
ServiceUnavailableError,
|
ServiceUnavailableError,
|
||||||
OpenAIError,
|
OpenAIError,
|
||||||
ContextWindowExceededError,
|
ContextWindowExceededError,
|
||||||
|
ContentPolicyViolationError,
|
||||||
BudgetExceededError,
|
BudgetExceededError,
|
||||||
APIError,
|
APIError,
|
||||||
Timeout,
|
Timeout,
|
||||||
|
|
|
@ -108,6 +108,21 @@ class ContextWindowExceededError(BadRequestError): # type: ignore
|
||||||
) # Call the base class constructor with the parameters it needs
|
) # Call the base class constructor with the parameters it needs
|
||||||
|
|
||||||
|
|
||||||
|
class ContentPolicyViolationError(BadRequestError): # type: ignore
|
||||||
|
# Error code: 400 - {'error': {'code': 'content_policy_violation', 'message': 'Your request was rejected as a result of our safety system. Image descriptions generated from your prompt may contain text that is not allowed by our safety system. If you believe this was done in error, your request may succeed if retried, or by adjusting your prompt.', 'param': None, 'type': 'invalid_request_error'}}
|
||||||
|
def __init__(self, message, model, llm_provider, response: httpx.Response):
|
||||||
|
self.status_code = 400
|
||||||
|
self.message = message
|
||||||
|
self.model = model
|
||||||
|
self.llm_provider = llm_provider
|
||||||
|
super().__init__(
|
||||||
|
message=self.message,
|
||||||
|
model=self.model, # type: ignore
|
||||||
|
llm_provider=self.llm_provider, # type: ignore
|
||||||
|
response=response,
|
||||||
|
) # Call the base class constructor with the parameters it needs
|
||||||
|
|
||||||
|
|
||||||
class ServiceUnavailableError(APIStatusError): # type: ignore
|
class ServiceUnavailableError(APIStatusError): # type: ignore
|
||||||
def __init__(self, message, llm_provider, model, response: httpx.Response):
|
def __init__(self, message, llm_provider, model, response: httpx.Response):
|
||||||
self.status_code = 503
|
self.status_code = 503
|
||||||
|
|
|
@ -59,6 +59,7 @@ from .exceptions import (
|
||||||
ServiceUnavailableError,
|
ServiceUnavailableError,
|
||||||
OpenAIError,
|
OpenAIError,
|
||||||
ContextWindowExceededError,
|
ContextWindowExceededError,
|
||||||
|
ContentPolicyViolationError,
|
||||||
Timeout,
|
Timeout,
|
||||||
APIConnectionError,
|
APIConnectionError,
|
||||||
APIError,
|
APIError,
|
||||||
|
@ -5559,6 +5560,17 @@ def exception_type(
|
||||||
model=model,
|
model=model,
|
||||||
response=original_exception.response,
|
response=original_exception.response,
|
||||||
)
|
)
|
||||||
|
elif (
|
||||||
|
"invalid_request_error" in error_str
|
||||||
|
and "content_policy_violation" in error_str
|
||||||
|
):
|
||||||
|
exception_mapping_worked = True
|
||||||
|
raise Con(
|
||||||
|
message=f"OpenAIException - {original_exception.message}",
|
||||||
|
llm_provider="openai",
|
||||||
|
model=model,
|
||||||
|
response=original_exception.response,
|
||||||
|
)
|
||||||
elif hasattr(original_exception, "status_code"):
|
elif hasattr(original_exception, "status_code"):
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
if original_exception.status_code == 401:
|
if original_exception.status_code == 401:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue