Litellm UI qa 04 12 2025 p1 (#9955)

* fix(model_info_view.tsx): cleanup text

* fix(key_management_endpoints.py): fix filtering litellm-dashboard keys for internal users

* fix(proxy_track_cost_callback.py): prevent flooding spend logs with admin endpoint errors

* test: add unit testing for logic

* test(test_auth_exception_handler.py): add more unit testing

* fix(router.py): correctly handle retrieving model info on get_model_group_info

fixes issue where model hub was showing None prices

* fix: fix linting errors
This commit is contained in:
Krish Dholakia 2025-04-12 19:30:48 -07:00 committed by GitHub
parent f8d52e2db9
commit 00e49380df
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 249 additions and 80 deletions

View file

@ -339,9 +339,9 @@ class Router:
) # names of models under litellm_params. ex. azure/chatgpt-v-2
self.deployment_latency_map = {}
### CACHING ###
cache_type: Literal["local", "redis", "redis-semantic", "s3", "disk"] = (
"local" # default to an in-memory cache
)
cache_type: Literal[
"local", "redis", "redis-semantic", "s3", "disk"
] = "local" # default to an in-memory cache
redis_cache = None
cache_config: Dict[str, Any] = {}
@ -562,9 +562,9 @@ class Router:
)
)
self.model_group_retry_policy: Optional[Dict[str, RetryPolicy]] = (
model_group_retry_policy
)
self.model_group_retry_policy: Optional[
Dict[str, RetryPolicy]
] = model_group_retry_policy
self.allowed_fails_policy: Optional[AllowedFailsPolicy] = None
if allowed_fails_policy is not None:
@ -1105,9 +1105,9 @@ class Router:
"""
Adds default litellm params to kwargs, if set.
"""
self.default_litellm_params[metadata_variable_name] = (
self.default_litellm_params.pop("metadata", {})
)
self.default_litellm_params[
metadata_variable_name
] = self.default_litellm_params.pop("metadata", {})
for k, v in self.default_litellm_params.items():
if (
k not in kwargs and v is not None
@ -3243,11 +3243,11 @@ class Router:
if isinstance(e, litellm.ContextWindowExceededError):
if context_window_fallbacks is not None:
fallback_model_group: Optional[List[str]] = (
self._get_fallback_model_group_from_fallbacks(
fallbacks=context_window_fallbacks,
model_group=model_group,
)
fallback_model_group: Optional[
List[str]
] = self._get_fallback_model_group_from_fallbacks(
fallbacks=context_window_fallbacks,
model_group=model_group,
)
if fallback_model_group is None:
raise original_exception
@ -3279,11 +3279,11 @@ class Router:
e.message += "\n{}".format(error_message)
elif isinstance(e, litellm.ContentPolicyViolationError):
if content_policy_fallbacks is not None:
fallback_model_group: Optional[List[str]] = (
self._get_fallback_model_group_from_fallbacks(
fallbacks=content_policy_fallbacks,
model_group=model_group,
)
fallback_model_group: Optional[
List[str]
] = self._get_fallback_model_group_from_fallbacks(
fallbacks=content_policy_fallbacks,
model_group=model_group,
)
if fallback_model_group is None:
raise original_exception
@ -4853,10 +4853,11 @@ class Router:
from litellm.utils import _update_dictionary
model_info: Optional[ModelInfo] = None
custom_model_info: Optional[dict] = None
litellm_model_name_model_info: Optional[ModelInfo] = None
try:
model_info = litellm.get_model_info(model=model_id)
custom_model_info = litellm.model_cost.get(model_id)
except Exception:
pass
@ -4865,14 +4866,16 @@ class Router:
except Exception:
pass
if model_info is not None and litellm_model_name_model_info is not None:
if custom_model_info is not None and litellm_model_name_model_info is not None:
model_info = cast(
ModelInfo,
_update_dictionary(
cast(dict, litellm_model_name_model_info).copy(),
cast(dict, model_info),
custom_model_info,
),
)
elif litellm_model_name_model_info is not None:
model_info = litellm_model_name_model_info
return model_info