mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix testing exception mapping
This commit is contained in:
parent
d0dbc0742b
commit
bcc89a2c3a
3 changed files with 27 additions and 1 deletions
|
@ -876,6 +876,7 @@ from .exceptions import (
|
||||||
InternalServerError,
|
InternalServerError,
|
||||||
JSONSchemaValidationError,
|
JSONSchemaValidationError,
|
||||||
LITELLM_EXCEPTION_TYPES,
|
LITELLM_EXCEPTION_TYPES,
|
||||||
|
MockException,
|
||||||
)
|
)
|
||||||
from .budget_manager import BudgetManager
|
from .budget_manager import BudgetManager
|
||||||
from .proxy.proxy_cli import run_server
|
from .proxy.proxy_cli import run_server
|
||||||
|
|
|
@ -723,3 +723,28 @@ class InvalidRequestError(openai.BadRequestError): # type: ignore
|
||||||
super().__init__(
|
super().__init__(
|
||||||
self.message, f"{self.model}"
|
self.message, f"{self.model}"
|
||||||
) # Call the base class constructor with the parameters it needs
|
) # Call the base class constructor with the parameters it needs
|
||||||
|
|
||||||
|
|
||||||
|
class MockException(openai.APIError):
|
||||||
|
# used for testing
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
status_code,
|
||||||
|
message,
|
||||||
|
llm_provider,
|
||||||
|
model,
|
||||||
|
request: Optional[httpx.Request] = None,
|
||||||
|
litellm_debug_info: Optional[str] = None,
|
||||||
|
max_retries: Optional[int] = None,
|
||||||
|
num_retries: Optional[int] = None,
|
||||||
|
):
|
||||||
|
self.status_code = status_code
|
||||||
|
self.message = "litellm.MockException: {}".format(message)
|
||||||
|
self.llm_provider = llm_provider
|
||||||
|
self.model = model
|
||||||
|
self.litellm_debug_info = litellm_debug_info
|
||||||
|
self.max_retries = max_retries
|
||||||
|
self.num_retries = num_retries
|
||||||
|
if request is None:
|
||||||
|
request = httpx.Request(method="POST", url="https://api.openai.com/v1")
|
||||||
|
super().__init__(self.message, request=request, body=None) # type: ignore
|
||||||
|
|
|
@ -479,7 +479,7 @@ def mock_completion(
|
||||||
if isinstance(mock_response, Exception):
|
if isinstance(mock_response, Exception):
|
||||||
if isinstance(mock_response, openai.APIError):
|
if isinstance(mock_response, openai.APIError):
|
||||||
raise mock_response
|
raise mock_response
|
||||||
raise litellm.APIError(
|
raise litellm.MockException(
|
||||||
status_code=getattr(mock_response, "status_code", 500), # type: ignore
|
status_code=getattr(mock_response, "status_code", 500), # type: ignore
|
||||||
message=getattr(mock_response, "text", str(mock_response)),
|
message=getattr(mock_response, "text", str(mock_response)),
|
||||||
llm_provider=getattr(mock_response, "llm_provider", custom_llm_provider or "openai"), # type: ignore
|
llm_provider=getattr(mock_response, "llm_provider", custom_llm_provider or "openai"), # type: ignore
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue