mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
Litellm merge pr (#7161)
* build: merge branch * test: fix openai naming * fix(main.py): fix openai renaming * style: ignore function length for config factory * fix(sagemaker/): fix routing logic * fix: fix imports * fix: fix override
This commit is contained in:
parent
d5aae81c6d
commit
350cfc36f7
88 changed files with 3617 additions and 4421 deletions
|
@ -14,6 +14,7 @@ from pydantic import BaseModel
|
|||
import litellm
|
||||
from litellm import verbose_logger
|
||||
from litellm.llms.custom_httpx.http_handler import get_async_httpx_client
|
||||
from litellm.llms.openai.chat.gpt_transformation import OpenAIGPTConfig
|
||||
from litellm.types.llms.ollama import OllamaToolCall, OllamaToolCallFunction
|
||||
from litellm.types.llms.openai import ChatCompletionAssistantToolCall
|
||||
from litellm.types.utils import StreamingChoices
|
||||
|
@ -30,7 +31,7 @@ class OllamaError(Exception):
|
|||
) # Call the base class constructor with the parameters it needs
|
||||
|
||||
|
||||
class OllamaChatConfig:
|
||||
class OllamaChatConfig(OpenAIGPTConfig):
|
||||
"""
|
||||
Reference: https://github.com/ollama/ollama/blob/main/docs/api.md#parameters
|
||||
|
||||
|
@ -81,15 +82,10 @@ class OllamaChatConfig:
|
|||
num_thread: Optional[int] = None
|
||||
repeat_last_n: Optional[int] = None
|
||||
repeat_penalty: Optional[float] = None
|
||||
temperature: Optional[float] = None
|
||||
seed: Optional[int] = None
|
||||
stop: Optional[list] = (
|
||||
None # stop is a list based on this - https://github.com/ollama/ollama/pull/442
|
||||
)
|
||||
tfs_z: Optional[float] = None
|
||||
num_predict: Optional[int] = None
|
||||
top_k: Optional[int] = None
|
||||
top_p: Optional[float] = None
|
||||
system: Optional[str] = None
|
||||
template: Optional[str] = None
|
||||
|
||||
|
@ -120,26 +116,9 @@ class OllamaChatConfig:
|
|||
|
||||
@classmethod
|
||||
def get_config(cls):
|
||||
return {
|
||||
k: v
|
||||
for k, v in cls.__dict__.items()
|
||||
if not k.startswith("__")
|
||||
and k != "function_name" # special param for function calling
|
||||
and not isinstance(
|
||||
v,
|
||||
(
|
||||
types.FunctionType,
|
||||
types.BuiltinFunctionType,
|
||||
classmethod,
|
||||
staticmethod,
|
||||
),
|
||||
)
|
||||
and v is not None
|
||||
}
|
||||
return super().get_config()
|
||||
|
||||
def get_supported_openai_params(
|
||||
self,
|
||||
):
|
||||
def get_supported_openai_params(self, model: str):
|
||||
return [
|
||||
"max_tokens",
|
||||
"max_completion_tokens",
|
||||
|
@ -156,8 +135,12 @@ class OllamaChatConfig:
|
|||
]
|
||||
|
||||
def map_openai_params(
|
||||
self, model: str, non_default_params: dict, optional_params: dict
|
||||
):
|
||||
self,
|
||||
non_default_params: dict,
|
||||
optional_params: dict,
|
||||
model: str,
|
||||
drop_params: bool,
|
||||
) -> dict:
|
||||
for param, value in non_default_params.items():
|
||||
if param == "max_tokens" or param == "max_completion_tokens":
|
||||
optional_params["num_predict"] = value
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue