From df12f87a64cc2078b4cb1f4700d3033ca95807da Mon Sep 17 00:00:00 2001 From: Krish Dholakia Date: Tue, 10 Dec 2024 17:12:42 -0800 Subject: [PATCH] LiteLLM Common Base LLM Config (pt.3): Move all OAI compatible providers to base llm config (#7148) * refactor(fireworks_ai/): inherit from openai like base config refactors fireworks ai to use a common config * test: fix import in test * refactor(watsonx/): refactor watsonx to use llm base config refactors chat + completion routes to base config path * fix: fix linting error * refactor: inherit base llm config for oai compatible routes * test: fix test * test: fix test --- litellm/__init__.py | 4 ++ .../llms/databricks/chat/transformation.py | 16 +----- litellm/llms/empower/chat/transformation.py | 9 ++++ litellm/llms/galadriel/chat/transformation.py | 24 +++++++++ litellm/llms/github/chat/transformation.py | 24 +++++++++ litellm/utils.py | 17 ++++++ tests/local_testing/test_config.py | 54 ++++++++++--------- 7 files changed, 107 insertions(+), 41 deletions(-) create mode 100644 litellm/llms/empower/chat/transformation.py create mode 100644 litellm/llms/galadriel/chat/transformation.py create mode 100644 litellm/llms/github/chat/transformation.py diff --git a/litellm/__init__.py b/litellm/__init__.py index f119dde2a8..fb3d629ab4 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -1055,7 +1055,11 @@ ALL_LITELLM_RESPONSE_TYPES = [ from .types.utils import ImageObject from .llms.custom_llm import CustomLLM +from .llms.openai_like.chat.handler import OpenAILikeChatConfig +from .llms.galadriel.chat.transformation import GaladrielChatConfig from .llms.huggingface_restapi import HuggingfaceConfig +from .llms.empower.chat.transformation import EmpowerChatConfig +from .llms.github.chat.transformation import GithubChatConfig from .llms.anthropic.chat.handler import AnthropicConfig from .llms.anthropic.experimental_pass_through.transformation import ( AnthropicExperimentalPassThroughConfig, diff --git a/litellm/llms/databricks/chat/transformation.py b/litellm/llms/databricks/chat/transformation.py index 6b362c3662..8a1b468a82 100644 --- a/litellm/llms/databricks/chat/transformation.py +++ b/litellm/llms/databricks/chat/transformation.py @@ -45,21 +45,7 @@ class DatabricksConfig(OpenAIGPTConfig): @classmethod def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } + return super().get_config() def get_required_params(self) -> List[ProviderField]: """For a given provider, return it's required fields with a description""" diff --git a/litellm/llms/empower/chat/transformation.py b/litellm/llms/empower/chat/transformation.py new file mode 100644 index 0000000000..045becffa5 --- /dev/null +++ b/litellm/llms/empower/chat/transformation.py @@ -0,0 +1,9 @@ +""" +Translate from OpenAI's `/v1/chat/completions` to Empower's `/v1/chat/completions` +""" + +from ...openai_like.chat.transformation import OpenAILikeChatConfig + + +class EmpowerChatConfig(OpenAILikeChatConfig): + pass diff --git a/litellm/llms/galadriel/chat/transformation.py b/litellm/llms/galadriel/chat/transformation.py new file mode 100644 index 0000000000..9ce39ed01a --- /dev/null +++ b/litellm/llms/galadriel/chat/transformation.py @@ -0,0 +1,24 @@ +""" +Translate from OpenAI's `/v1/chat/completions` to Galadriel's `/v1/chat/completions` +""" + +import json +import types +from typing import List, Optional, Tuple, Union + +from pydantic import BaseModel + +import litellm +from litellm.secret_managers.main import get_secret_str +from litellm.types.llms.openai import ( + AllMessageValues, + ChatCompletionAssistantMessage, + ChatCompletionToolParam, + ChatCompletionToolParamFunctionChunk, +) + +from ...openai_like.chat.handler import OpenAILikeChatConfig + + +class GaladrielChatConfig(OpenAILikeChatConfig): + pass diff --git a/litellm/llms/github/chat/transformation.py b/litellm/llms/github/chat/transformation.py new file mode 100644 index 0000000000..9d7adff3d2 --- /dev/null +++ b/litellm/llms/github/chat/transformation.py @@ -0,0 +1,24 @@ +""" +Translate from OpenAI's `/v1/chat/completions` to Github's `/v1/chat/completions` +""" + +import json +import types +from typing import List, Optional, Tuple, Union + +from pydantic import BaseModel + +import litellm +from litellm.secret_managers.main import get_secret_str +from litellm.types.llms.openai import ( + AllMessageValues, + ChatCompletionAssistantMessage, + ChatCompletionToolParam, + ChatCompletionToolParamFunctionChunk, +) + +from ...openai_like.chat.handler import OpenAILikeChatConfig + + +class GithubChatConfig(OpenAILikeChatConfig): + pass diff --git a/litellm/utils.py b/litellm/utils.py index 94f9a41276..5d94f64c62 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -6303,6 +6303,23 @@ class ProviderConfigManager: return litellm.IBMWatsonXChatConfig() elif litellm.LlmProviders.WATSONX_TEXT == provider: return litellm.IBMWatsonXAIConfig() + elif litellm.LlmProviders.EMPOWER == provider: + return litellm.EmpowerChatConfig() + elif litellm.LlmProviders.GITHUB == provider: + return litellm.GithubChatConfig() + elif ( + litellm.LlmProviders.CUSTOM == provider + or litellm.LlmProviders.CUSTOM_OPENAI == provider + or litellm.LlmProviders.OPENAI_LIKE == provider + or litellm.LlmProviders.LITELLM_PROXY == provider + ): + return litellm.OpenAILikeChatConfig() + elif litellm.LlmProviders.HOSTED_VLLM == provider: + return litellm.HostedVLLMChatConfig() + elif litellm.LlmProviders.LM_STUDIO == provider: + return litellm.LMStudioChatConfig() + elif litellm.LlmProviders.GALADRIEL == provider: + return litellm.GaladrielChatConfig() return litellm.OpenAIGPTConfig() diff --git a/tests/local_testing/test_config.py b/tests/local_testing/test_config.py index c5896793a7..3d73ad000a 100644 --- a/tests/local_testing/test_config.py +++ b/tests/local_testing/test_config.py @@ -290,33 +290,35 @@ async def test_add_and_delete_deployments(llm_router, model_list_flag_value): assert len(llm_router.model_list) == len(model_list) + prev_llm_router_val -# def test_provider_config_manager(): -# from litellm import LITELLM_CHAT_PROVIDERS, LlmProviders -# from litellm.utils import ProviderConfigManager -# from litellm.llms.base_llm.transformation import BaseConfig -# from litellm.llms.OpenAI.chat.gpt_transformation import OpenAIGPTConfig +def test_provider_config_manager(): + from litellm import LITELLM_CHAT_PROVIDERS, LlmProviders + from litellm.utils import ProviderConfigManager + from litellm.llms.base_llm.transformation import BaseConfig + from litellm.llms.OpenAI.chat.gpt_transformation import OpenAIGPTConfig -# for provider in LITELLM_CHAT_PROVIDERS: -# assert isinstance( -# ProviderConfigManager.get_provider_chat_config( -# model="gpt-3.5-turbo", provider=LlmProviders(provider) -# ), -# BaseConfig, -# ), f"Provider {provider} is not a subclass of BaseConfig" + for provider in LITELLM_CHAT_PROVIDERS: + if provider == LlmProviders.TRITON or provider == LlmProviders.PREDIBASE: + continue + assert isinstance( + ProviderConfigManager.get_provider_chat_config( + model="gpt-3.5-turbo", provider=LlmProviders(provider) + ), + BaseConfig, + ), f"Provider {provider} is not a subclass of BaseConfig" -# config = ProviderConfigManager.get_provider_chat_config( -# model="gpt-3.5-turbo", provider=LlmProviders(provider) -# ) + config = ProviderConfigManager.get_provider_chat_config( + model="gpt-3.5-turbo", provider=LlmProviders(provider) + ) -# if ( -# provider != litellm.LlmProviders.OPENAI -# and provider != litellm.LlmProviders.OPENAI_LIKE -# and provider != litellm.LlmProviders.CUSTOM_OPENAI -# ): -# assert ( -# config.__class__.__name__ != "OpenAIGPTConfig" -# ), f"Provider {provider} is an instance of OpenAIGPTConfig" + if ( + provider != litellm.LlmProviders.OPENAI + and provider != litellm.LlmProviders.OPENAI_LIKE + and provider != litellm.LlmProviders.CUSTOM_OPENAI + ): + assert ( + config.__class__.__name__ != "OpenAIGPTConfig" + ), f"Provider {provider} is an instance of OpenAIGPTConfig" -# assert ( -# "_abc_impl" not in config.get_config() -# ), f"Provider {provider} has _abc_impl" + assert ( + "_abc_impl" not in config.get_config() + ), f"Provider {provider} has _abc_impl"