refactor(fireworks_ai/): inherit from openai like base config (#7146)

* refactor(fireworks_ai/): inherit from openai like base config

refactors fireworks ai to use a common config

* test: fix import in test

* refactor(watsonx/): refactor watsonx to use llm base config

refactors chat + completion routes to base config path

* fix: fix linting error

* test: fix test

* fix: fix test
This commit is contained in:
Krish Dholakia 2024-12-10 16:15:19 -08:00 committed by GitHub
parent 2fb2801eb4
commit 311432ca17
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 449 additions and 307 deletions

View file

@ -3515,6 +3515,11 @@ def get_optional_params( # noqa: PLR0915
non_default_params=non_default_params,
optional_params=optional_params,
model=model,
drop_params=(
drop_params
if drop_params is not None and isinstance(drop_params, bool)
else False
),
)
elif custom_llm_provider == "volcengine":
supported_params = get_supported_openai_params(
@ -3658,6 +3663,12 @@ def get_optional_params( # noqa: PLR0915
optional_params = litellm.IBMWatsonXAIConfig().map_openai_params(
non_default_params=non_default_params,
optional_params=optional_params,
model=model,
drop_params=(
drop_params
if drop_params is not None and isinstance(drop_params, bool)
else False
),
)
elif custom_llm_provider == "openai":
supported_params = get_supported_openai_params(
@ -6284,7 +6295,14 @@ class ProviderConfigManager:
return litellm.VertexAIAnthropicConfig()
elif litellm.LlmProviders.CLOUDFLARE == provider:
return litellm.CloudflareChatConfig()
elif litellm.LlmProviders.FIREWORKS_AI == provider:
return litellm.FireworksAIConfig()
elif litellm.LlmProviders.FRIENDLIAI == provider:
return litellm.FriendliaiChatConfig()
elif litellm.LlmProviders.WATSONX == provider:
return litellm.IBMWatsonXChatConfig()
elif litellm.LlmProviders.WATSONX_TEXT == provider:
return litellm.IBMWatsonXAIConfig()
return litellm.OpenAIGPTConfig()