mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix(team_endpoints.py): allow team member to view team info (#8644)
* fix(team_endpoints.py): allow team member to view team info * test: handle model overloaded in tool calling test * test: handle internal server error
This commit is contained in:
parent
ed5b29cad7
commit
da121bd792
2 changed files with 13 additions and 24 deletions
|
@ -1211,7 +1211,10 @@ async def delete_team(
|
||||||
def validate_membership(
|
def validate_membership(
|
||||||
user_api_key_dict: UserAPIKeyAuth, team_table: LiteLLM_TeamTable
|
user_api_key_dict: UserAPIKeyAuth, team_table: LiteLLM_TeamTable
|
||||||
):
|
):
|
||||||
if user_api_key_dict.user_role == LitellmUserRoles.PROXY_ADMIN.value:
|
if (
|
||||||
|
user_api_key_dict.user_role == LitellmUserRoles.PROXY_ADMIN.value
|
||||||
|
or user_api_key_dict.user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
@ -1270,32 +1273,19 @@ async def team_info(
|
||||||
detail={"message": "Malformed request. No team id passed in."},
|
detail={"message": "Malformed request. No team id passed in."},
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
try:
|
||||||
user_api_key_dict.user_role == LitellmUserRoles.PROXY_ADMIN.value
|
team_info: BaseModel = await prisma_client.db.litellm_teamtable.find_unique(
|
||||||
or user_api_key_dict.user_role
|
where={"team_id": team_id}
|
||||||
== LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value
|
|
||||||
):
|
|
||||||
pass
|
|
||||||
elif user_api_key_dict.team_id is None or (
|
|
||||||
team_id != user_api_key_dict.team_id
|
|
||||||
):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="key not allowed to access this team's info. Key team_id={}, Requested team_id={}".format(
|
|
||||||
user_api_key_dict.team_id, team_id
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
except Exception:
|
||||||
team_info: Optional[Union[LiteLLM_TeamTable, dict]] = (
|
|
||||||
await prisma_client.get_data(
|
|
||||||
team_id=team_id, table_name="team", query_type="find_unique"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if team_info is None:
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
status_code=status.HTTP_404_NOT_FOUND,
|
||||||
detail={"message": f"Team not found, passed team id: {team_id}."},
|
detail={"message": f"Team not found, passed team id: {team_id}."},
|
||||||
)
|
)
|
||||||
|
validate_membership(
|
||||||
|
user_api_key_dict=user_api_key_dict,
|
||||||
|
team_table=LiteLLM_TeamTable(**team_info.model_dump()),
|
||||||
|
)
|
||||||
|
|
||||||
## GET ALL KEYS ##
|
## GET ALL KEYS ##
|
||||||
keys = await prisma_client.get_data(
|
keys = await prisma_client.get_data(
|
||||||
|
|
|
@ -294,8 +294,7 @@ class TestOpenAIChatCompletion(BaseLLMChatTest):
|
||||||
)
|
)
|
||||||
assert response is not None
|
assert response is not None
|
||||||
except litellm.InternalServerError:
|
except litellm.InternalServerError:
|
||||||
pytest.skip("OpenAI API is raising internal server errors")
|
pytest.skip("Skipping test due to InternalServerError")
|
||||||
|
|
||||||
|
|
||||||
def test_completion_bad_org():
|
def test_completion_bad_org():
|
||||||
import litellm
|
import litellm
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue