mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
LiteLLM Common Base LLM Config (pt.3): Move all OAI compatible providers to base llm config (#7148)
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 45s
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 45s
* refactor(fireworks_ai/): inherit from openai like base config refactors fireworks ai to use a common config * test: fix import in test * refactor(watsonx/): refactor watsonx to use llm base config refactors chat + completion routes to base config path * fix: fix linting error * refactor: inherit base llm config for oai compatible routes * test: fix test * test: fix test
This commit is contained in:
parent
311432ca17
commit
1e87782215
7 changed files with 107 additions and 41 deletions
|
@ -6303,6 +6303,23 @@ class ProviderConfigManager:
|
|||
return litellm.IBMWatsonXChatConfig()
|
||||
elif litellm.LlmProviders.WATSONX_TEXT == provider:
|
||||
return litellm.IBMWatsonXAIConfig()
|
||||
elif litellm.LlmProviders.EMPOWER == provider:
|
||||
return litellm.EmpowerChatConfig()
|
||||
elif litellm.LlmProviders.GITHUB == provider:
|
||||
return litellm.GithubChatConfig()
|
||||
elif (
|
||||
litellm.LlmProviders.CUSTOM == provider
|
||||
or litellm.LlmProviders.CUSTOM_OPENAI == provider
|
||||
or litellm.LlmProviders.OPENAI_LIKE == provider
|
||||
or litellm.LlmProviders.LITELLM_PROXY == provider
|
||||
):
|
||||
return litellm.OpenAILikeChatConfig()
|
||||
elif litellm.LlmProviders.HOSTED_VLLM == provider:
|
||||
return litellm.HostedVLLMChatConfig()
|
||||
elif litellm.LlmProviders.LM_STUDIO == provider:
|
||||
return litellm.LMStudioChatConfig()
|
||||
elif litellm.LlmProviders.GALADRIEL == provider:
|
||||
return litellm.GaladrielChatConfig()
|
||||
return litellm.OpenAIGPTConfig()
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue