Litellm dev 11 08 2024 (#6658)

* fix(deepseek/chat): convert content list to str

Fixes https://github.com/BerriAI/litellm/issues/6642

* test(test_deepseek_completion.py): implement base llm unit tests

increase robustness across providers

* fix(router.py): support content policy violation fallbacks with default fallbacks

* fix(opentelemetry.py): refactor to move otel imports behing flag

Fixes https://github.com/BerriAI/litellm/issues/6636

* fix(opentelemtry.py): close span on success completion

* fix(user_api_key_auth.py): allow user_role to default to none

* fix: mark flaky test

* fix(opentelemetry.py): move otelconfig.from_env to inside the init

prevent otel errors raised just by importing the litellm class

* fix(user_api_key_auth.py): fix auth error
This commit is contained in:
Krish Dholakia 2024-11-08 22:07:17 +05:30 committed by GitHub
parent a9038087cb
commit 7e4dfaa13f
19 changed files with 287 additions and 34 deletions

View file

@ -8252,3 +8252,22 @@ def validate_chat_completion_user_messages(messages: List[AllMessageValues]):
)
return messages
from litellm.llms.OpenAI.chat.gpt_transformation import OpenAIGPTConfig
class ProviderConfigManager:
@staticmethod
def get_provider_config(
model: str, provider: litellm.LlmProviders
) -> OpenAIGPTConfig:
"""
Returns the provider config for a given provider.
"""
if litellm.openAIO1Config.is_model_o1_reasoning_model(model=model):
return litellm.OpenAIO1Config()
elif litellm.LlmProviders.DEEPSEEK == provider:
return litellm.DeepSeekChatConfig()
return OpenAIGPTConfig()