Litellm dev 11 08 2024 (#6658)

* fix(deepseek/chat): convert content list to str

Fixes https://github.com/BerriAI/litellm/issues/6642

* test(test_deepseek_completion.py): implement base llm unit tests

increase robustness across providers

* fix(router.py): support content policy violation fallbacks with default fallbacks

* fix(opentelemetry.py): refactor to move otel imports behing flag

Fixes https://github.com/BerriAI/litellm/issues/6636

* fix(opentelemtry.py): close span on success completion

* fix(user_api_key_auth.py): allow user_role to default to none

* fix: mark flaky test

* fix(opentelemetry.py): move otelconfig.from_env to inside the init

prevent otel errors raised just by importing the litellm class

* fix(user_api_key_auth.py): fix auth error
This commit is contained in:
Krish Dholakia 2024-11-08 22:07:17 +05:30 committed by GitHub
parent a9038087cb
commit 7e4dfaa13f
19 changed files with 287 additions and 34 deletions

View file

@ -3558,6 +3558,15 @@ class Router:
# Catch all - if any exceptions default to cooling down
return True
def _has_default_fallbacks(self) -> bool:
if self.fallbacks is None:
return False
for fallback in self.fallbacks:
if isinstance(fallback, dict):
if "*" in fallback:
return True
return False
def _should_raise_content_policy_error(
self, model: str, response: ModelResponse, kwargs: dict
) -> bool:
@ -3574,6 +3583,7 @@ class Router:
content_policy_fallbacks = kwargs.get(
"content_policy_fallbacks", self.content_policy_fallbacks
)
### ONLY RAISE ERROR IF CP FALLBACK AVAILABLE ###
if content_policy_fallbacks is not None:
fallback_model_group = None
@ -3584,6 +3594,8 @@ class Router:
if fallback_model_group is not None:
return True
elif self._has_default_fallbacks(): # default fallbacks set
return True
verbose_router_logger.info(
"Content Policy Error occurred. No available fallbacks. Returning original response. model={}, content_policy_fallbacks={}".format(