Litellm merge pr (#7161)

* build: merge branch

* test: fix openai naming

* fix(main.py): fix openai renaming

* style: ignore function length for config factory

* fix(sagemaker/): fix routing logic

* fix: fix imports

* fix: fix override
This commit is contained in:
Krish Dholakia 2024-12-10 22:49:26 -08:00 committed by GitHub
parent d5aae81c6d
commit 350cfc36f7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
88 changed files with 3617 additions and 4421 deletions

View file

@ -11,8 +11,10 @@ API calling is done using the OpenAI SDK with an api_base
import types
from typing import Optional, Union
from litellm.llms.openai.chat.gpt_transformation import OpenAIGPTConfig
class NvidiaNimConfig:
class NvidiaNimConfig(OpenAIGPTConfig):
"""
Reference: https://docs.api.nvidia.com/nim/reference/databricks-dbrx-instruct-infer
@ -42,21 +44,7 @@ class NvidiaNimConfig:
@classmethod
def get_config(cls):
return {
k: v
for k, v in cls.__dict__.items()
if not k.startswith("__")
and not isinstance(
v,
(
types.FunctionType,
types.BuiltinFunctionType,
classmethod,
staticmethod,
),
)
and v is not None
}
return super().get_config()
def get_supported_openai_params(self, model: str) -> list:
"""
@ -132,7 +120,11 @@ class NvidiaNimConfig:
]
def map_openai_params(
self, model: str, non_default_params: dict, optional_params: dict
self,
non_default_params: dict,
optional_params: dict,
model: str,
drop_params: bool,
) -> dict:
supported_openai_params = self.get_supported_openai_params(model=model)
for param, value in non_default_params.items():