Litellm merge pr (#7161)

* build: merge branch

* test: fix openai naming

* fix(main.py): fix openai renaming

* style: ignore function length for config factory

* fix(sagemaker/): fix routing logic

* fix: fix imports

* fix: fix override
This commit is contained in:
Krish Dholakia 2024-12-10 22:49:26 -08:00 committed by GitHub
parent d5aae81c6d
commit 350cfc36f7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
88 changed files with 3617 additions and 4421 deletions

View file

@ -1,7 +1,10 @@
import types
from typing import List, Optional, Type, Union
from typing import TYPE_CHECKING, Any, List, Optional, Type, Union
from httpx._models import Headers, Response
import litellm
from litellm.llms.base_llm.transformation import BaseLLMException
from ....exceptions import UnsupportedParamsError
from ....types.llms.openai import (
@ -11,10 +14,19 @@ from ....types.llms.openai import (
ChatCompletionToolParam,
ChatCompletionToolParamFunctionChunk,
)
from ...base_llm.transformation import BaseConfig
from ...prompt_templates.factory import convert_to_azure_openai_messages
from ..common_utils import AzureOpenAIError
if TYPE_CHECKING:
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
LoggingClass = LiteLLMLoggingObj
else:
LoggingClass = Any
class AzureOpenAIConfig:
class AzureOpenAIConfig(BaseConfig):
"""
Reference: https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions
@ -61,23 +73,9 @@ class AzureOpenAIConfig:
@classmethod
def get_config(cls):
return {
k: v
for k, v in cls.__dict__.items()
if not k.startswith("__")
and not isinstance(
v,
(
types.FunctionType,
types.BuiltinFunctionType,
classmethod,
staticmethod,
),
)
and v is not None
}
return super().get_config()
def get_supported_openai_params(self):
def get_supported_openai_params(self, model: str) -> List[str]:
return [
"temperature",
"n",
@ -110,10 +108,10 @@ class AzureOpenAIConfig:
non_default_params: dict,
optional_params: dict,
model: str,
api_version: str, # Y-M-D-{optional}
drop_params,
drop_params: bool,
api_version: str = "",
) -> dict:
supported_openai_params = self.get_supported_openai_params()
supported_openai_params = self.get_supported_openai_params(model)
api_version_times = api_version.split("-")
api_version_year = api_version_times[0]
@ -204,9 +202,13 @@ class AzureOpenAIConfig:
return optional_params
@classmethod
def transform_request(
cls, model: str, messages: List[AllMessageValues], optional_params: dict
self,
model: str,
messages: List[AllMessageValues],
optional_params: dict,
litellm_params: dict,
headers: dict,
) -> dict:
messages = convert_to_azure_openai_messages(messages)
return {
@ -215,6 +217,24 @@ class AzureOpenAIConfig:
**optional_params,
}
def transform_response(
self,
model: str,
raw_response: Response,
model_response: litellm.ModelResponse,
logging_obj: LoggingClass,
request_data: dict,
messages: List[AllMessageValues],
optional_params: dict,
litellm_params: dict,
encoding: Any,
api_key: Optional[str] = None,
json_mode: Optional[bool] = None,
) -> litellm.ModelResponse:
raise NotImplementedError(
"Azure OpenAI handler.py has custom logic for transforming response, as it uses the OpenAI SDK."
)
def get_mapped_special_auth_params(self) -> dict:
return {"token": "azure_ad_token"}
@ -246,3 +266,22 @@ class AzureOpenAIConfig:
"westus3",
"westus4",
]
def get_error_class(
self, error_message: str, status_code: int, headers: Union[dict, Headers]
) -> BaseLLMException:
return AzureOpenAIError(
message=error_message, status_code=status_code, headers=headers
)
def validate_environment(
self,
headers: dict,
model: str,
messages: List[AllMessageValues],
optional_params: dict,
api_key: Optional[str] = None,
) -> dict:
raise NotImplementedError(
"Azure OpenAI has custom logic for validating environment, as it uses the OpenAI SDK."
)