add UnsupportedParamsError to litellm exceptions

This commit is contained in:
Ishaan Jaff 2024-07-24 12:20:14 -07:00
parent 11512c057d
commit 8ea4b73c27

View file

@ -129,6 +129,7 @@ from .exceptions import (
ServiceUnavailableError,
Timeout,
UnprocessableEntityError,
UnsupportedParamsError,
)
from .proxy._types import KeyManagementSystem
from .types.llms.openai import (
@ -225,17 +226,6 @@ last_fetched_at_keys = None
# }
class UnsupportedParamsError(Exception):
def __init__(self, status_code, message):
self.status_code = status_code
self.message = message
self.request = httpx.Request(method="POST", url=" https://openai.api.com/v1/")
self.response = httpx.Response(status_code=status_code, request=self.request)
super().__init__(
self.message
) # Call the base class constructor with the parameters it needs
############################################################
def print_verbose(
print_statement,