Litellm dev 12 31 2024 p1 (#7488)

* fix(internal_user_endpoints.py): fix team list sort - handle team_alias being set + None

* fix(key_management_endpoints.py): allow team admin to create key for member via admin ui

Fixes https://github.com/BerriAI/litellm/issues/7482

* fix(proxy_server.py): allow querying info on specific model group via `/model_group/info`

allows client-side user to get model info from proxy

* fix(proxy_server.py): add docstring on `/model_group/info` showing how to filter by model name

* test(test_proxy_utils.py): add unit test for returning model group info filtered

* fix(proxy_server.py): fix query param

* fix(test_Get_model_info.py): handle no whitelisted bedrock modells
This commit is contained in:
Krish Dholakia 2024-12-31 23:21:51 -08:00 committed by GitHub
parent 080de89cfb
commit 39cbd9d878
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 124 additions and 18 deletions

View file

@ -1191,3 +1191,64 @@ def test_litellm_verification_token_view_response_with_budget_table(
getattr(resp, expected_user_api_key_auth_key)
== expected_user_api_key_auth_value
)
def test_is_allowed_to_create_key():
from litellm.proxy._types import LitellmUserRoles
from litellm.proxy.management_endpoints.key_management_endpoints import (
_is_allowed_to_create_key,
)
assert (
_is_allowed_to_create_key(
user_api_key_dict=UserAPIKeyAuth(
user_id="test_user_id", user_role=LitellmUserRoles.PROXY_ADMIN
),
user_id="test_user_id",
team_id="test_team_id",
)
is True
)
assert (
_is_allowed_to_create_key(
user_api_key_dict=UserAPIKeyAuth(
user_id="test_user_id",
user_role=LitellmUserRoles.INTERNAL_USER,
team_id="litellm-dashboard",
),
user_id="test_user_id",
team_id="test_team_id",
)
is True
)
def test_get_model_group_info():
from litellm.proxy.proxy_server import _get_model_group_info
from litellm import Router
router = Router(
model_list=[
{
"model_name": "openai/tts-1",
"litellm_params": {
"model": "openai/tts-1",
"api_key": "sk-1234",
},
},
{
"model_name": "openai/gpt-3.5-turbo",
"litellm_params": {
"model": "openai/gpt-3.5-turbo",
"api_key": "sk-1234",
},
},
]
)
model_list = _get_model_group_info(
llm_router=router,
all_models_str=["openai/tts-1", "openai/gpt-3.5-turbo"],
model_group="openai/tts-1",
)
assert len(model_list) == 1