Litellm merge pr (#7161)

* build: merge branch

* test: fix openai naming

* fix(main.py): fix openai renaming

* style: ignore function length for config factory

* fix(sagemaker/): fix routing logic

* fix: fix imports

* fix: fix override
This commit is contained in:
Krish Dholakia 2024-12-10 22:49:26 -08:00 committed by GitHub
parent d5aae81c6d
commit 350cfc36f7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
88 changed files with 3617 additions and 4421 deletions

View file

@ -13,6 +13,7 @@ from typing import (
Iterator,
List,
Optional,
TypedDict,
Union,
)
@ -34,15 +35,25 @@ class BaseLLMException(Exception):
self,
status_code: int,
message: str,
headers: Optional[Union[httpx.Headers, Dict]] = None,
headers: Optional[Union[dict, httpx.Headers]] = None,
request: Optional[httpx.Request] = None,
response: Optional[httpx.Response] = None,
):
self.status_code = status_code
self.message: str = message
self.headers = headers
self.request = httpx.Request(method="POST", url="https://docs.litellm.ai/docs")
self.response = httpx.Response(status_code=status_code, request=self.request)
if request:
self.request = request
else:
self.request = httpx.Request(
method="POST", url="https://docs.litellm.ai/docs"
)
if response:
self.response = response
else:
self.response = httpx.Response(
status_code=status_code, request=self.request
)
super().__init__(
self.message
) # Call the base class constructor with the parameters it needs
@ -117,12 +128,6 @@ class BaseConfig(ABC):
) -> dict:
pass
@abstractmethod
def _transform_messages(
self, messages: List[AllMessageValues]
) -> List[AllMessageValues]:
pass
@abstractmethod
def transform_response(
self,
@ -133,7 +138,8 @@ class BaseConfig(ABC):
request_data: dict,
messages: List[AllMessageValues],
optional_params: dict,
encoding: str,
litellm_params: dict,
encoding: Any,
api_key: Optional[str] = None,
json_mode: Optional[bool] = None,
) -> ModelResponse: