mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
style(internal_user_endpoints.py): add response model to /user/list
endpoint
make sure we maintain consistent response spec
This commit is contained in:
parent
4a50cf10fb
commit
57b887478a
3 changed files with 23 additions and 1 deletions
|
@ -43,6 +43,9 @@ from litellm.types.proxy.management_endpoints.common_daily_activity import (
|
|||
SpendAnalyticsPaginatedResponse,
|
||||
SpendMetrics,
|
||||
)
|
||||
from litellm.types.proxy.management_endpoints.internal_user_endpoints import (
|
||||
UserListResponse,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
@ -903,11 +906,13 @@ async def get_user_key_counts(
|
|||
"/user/get_users",
|
||||
tags=["Internal User management"],
|
||||
dependencies=[Depends(user_api_key_auth)],
|
||||
response_model=UserListResponse,
|
||||
)
|
||||
@router.get(
|
||||
"/user/list",
|
||||
tags=["Internal User management"],
|
||||
dependencies=[Depends(user_api_key_auth)],
|
||||
response_model=UserListResponse,
|
||||
)
|
||||
async def get_users(
|
||||
role: Optional[str] = fastapi.Query(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue