fix(proxy/_types.py): fixes issue where internal user able to escalat… (#8740)

* fix(proxy/_types.py): fixes issue where internal user able to escalate their role with ui key

Fixes https://github.com/BerriAI/litellm/issues/8029

* style: cleanup

* test: handle bedrock instability
This commit is contained in:
Krish Dholakia 2025-02-22 22:59:58 -08:00 committed by GitHub
parent d82fa10f93
commit fd5939f2b0
3 changed files with 24 additions and 14 deletions

View file

@ -279,6 +279,7 @@ class LiteLLMRoutes(enum.Enum):
"/model_group/info", "/model_group/info",
"/health", "/health",
"/key/list", "/key/list",
"/user/filter/ui",
] ]
# NOTE: ROUTES ONLY FOR MASTER KEY - only the Master Key should be able to Reset Spend # NOTE: ROUTES ONLY FOR MASTER KEY - only the Master Key should be able to Reset Spend
@ -355,7 +356,6 @@ class LiteLLMRoutes(enum.Enum):
"/key/info", "/key/info",
"/config", "/config",
"/spend", "/spend",
"/user",
"/model/info", "/model/info",
"/v2/model/info", "/v2/model/info",
"/v2/key/info", "/v2/key/info",
@ -1569,6 +1569,11 @@ class LiteLLM_UserTable(LiteLLMPydanticObjectBase):
model_config = ConfigDict(protected_namespaces=()) model_config = ConfigDict(protected_namespaces=())
class LiteLLM_UserTableFiltered(BaseModel): # done to avoid exposing sensitive data
user_id: str
user_email: str
class LiteLLM_EndUserTable(LiteLLMPydanticObjectBase): class LiteLLM_EndUserTable(LiteLLMPydanticObjectBase):
user_id: str user_id: str
blocked: bool blocked: bool

View file

@ -950,7 +950,7 @@ async def add_internal_user_to_organization(
dependencies=[Depends(user_api_key_auth)], dependencies=[Depends(user_api_key_auth)],
include_in_schema=False, include_in_schema=False,
responses={ responses={
200: {"model": List[LiteLLM_UserTable]}, 200: {"model": List[LiteLLM_UserTableFiltered]},
}, },
) )
async def ui_view_users( async def ui_view_users(
@ -1006,17 +1006,19 @@ async def ui_view_users(
} }
# Query users with pagination and filters # Query users with pagination and filters
users = await prisma_client.db.litellm_usertable.find_many( users: Optional[List[BaseModel]] = (
where=where_conditions, await prisma_client.db.litellm_usertable.find_many(
skip=skip, where=where_conditions,
take=page_size, skip=skip,
order={"created_at": "desc"}, take=page_size,
order={"created_at": "desc"},
)
) )
if not users: if not users:
return [] return []
return users return [LiteLLM_UserTableFiltered(**user.model_dump()) for user in users]
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=f"Error searching users: {str(e)}") raise HTTPException(status_code=500, detail=f"Error searching users: {str(e)}")

View file

@ -2559,12 +2559,15 @@ async def test_bedrock_document_understanding(image_url):
}, },
] ]
response = await acompletion( try:
model=model, response = await acompletion(
messages=[{"role": "user", "content": image_content}], model=model,
) messages=[{"role": "user", "content": image_content}],
assert response is not None )
assert response.choices[0].message.content != "" assert response is not None
assert response.choices[0].message.content != ""
except litellm.ServiceUnavailableError as e:
pytest.skip("Skipping test due to ServiceUnavailableError")
def test_bedrock_custom_proxy(): def test_bedrock_custom_proxy():