mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
add check for admin only routes
This commit is contained in:
parent
bb9ee4a068
commit
bfb0aceeae
6 changed files with 217 additions and 156 deletions
|
@ -26,7 +26,7 @@ from litellm.proxy._types import (
|
||||||
LitellmUserRoles,
|
LitellmUserRoles,
|
||||||
UserAPIKeyAuth,
|
UserAPIKeyAuth,
|
||||||
)
|
)
|
||||||
from litellm.proxy.auth.auth_utils import is_llm_api_route
|
from litellm.proxy.auth.route_checks import is_llm_api_route
|
||||||
from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry
|
from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry
|
||||||
from litellm.types.services import ServiceLoggerPayload, ServiceTypes
|
from litellm.types.services import ServiceLoggerPayload, ServiceTypes
|
||||||
|
|
||||||
|
|
|
@ -160,48 +160,6 @@ def route_in_additonal_public_routes(current_route: str):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def is_llm_api_route(route: str) -> bool:
|
|
||||||
"""
|
|
||||||
Helper to checks if provided route is an OpenAI route
|
|
||||||
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- True: if route is an OpenAI route
|
|
||||||
- False: if route is not an OpenAI route
|
|
||||||
"""
|
|
||||||
|
|
||||||
if route in LiteLLMRoutes.openai_routes.value:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if route in LiteLLMRoutes.anthropic_routes.value:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# fuzzy match routes like "/v1/threads/thread_49EIN5QF32s4mH20M7GFKdlZ"
|
|
||||||
# Check for routes with placeholders
|
|
||||||
for openai_route in LiteLLMRoutes.openai_routes.value:
|
|
||||||
# Replace placeholders with regex pattern
|
|
||||||
# placeholders are written as "/threads/{thread_id}"
|
|
||||||
if "{" in openai_route:
|
|
||||||
pattern = re.sub(r"\{[^}]+\}", r"[^/]+", openai_route)
|
|
||||||
# Anchor the pattern to match the entire string
|
|
||||||
pattern = f"^{pattern}$"
|
|
||||||
if re.match(pattern, route):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Pass through Bedrock, VertexAI, and Cohere Routes
|
|
||||||
if "/bedrock/" in route:
|
|
||||||
return True
|
|
||||||
if "/vertex-ai/" in route:
|
|
||||||
return True
|
|
||||||
if "/gemini/" in route:
|
|
||||||
return True
|
|
||||||
if "/cohere/" in route:
|
|
||||||
return True
|
|
||||||
if "/langfuse/" in route:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_request_route(request: Request) -> str:
|
def get_request_route(request: Request) -> str:
|
||||||
"""
|
"""
|
||||||
Helper to get the route from the request
|
Helper to get the route from the request
|
||||||
|
@ -369,3 +327,21 @@ def should_run_auth_on_pass_through_provider_route(route: str) -> bool:
|
||||||
"""
|
"""
|
||||||
# by default we do not run virtual key auth checks on /vertex-ai/{endpoint} routes
|
# by default we do not run virtual key auth checks on /vertex-ai/{endpoint} routes
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _has_user_setup_sso():
|
||||||
|
"""
|
||||||
|
Check if the user has set up single sign-on (SSO) by verifying the presence of Microsoft client ID, Google client ID, and UI username environment variables.
|
||||||
|
Returns a boolean indicating whether SSO has been set up.
|
||||||
|
"""
|
||||||
|
microsoft_client_id = os.getenv("MICROSOFT_CLIENT_ID", None)
|
||||||
|
google_client_id = os.getenv("GOOGLE_CLIENT_ID", None)
|
||||||
|
ui_username = os.getenv("UI_USERNAME", None)
|
||||||
|
|
||||||
|
sso_setup = (
|
||||||
|
(microsoft_client_id is not None)
|
||||||
|
or (google_client_id is not None)
|
||||||
|
or (ui_username is not None)
|
||||||
|
)
|
||||||
|
|
||||||
|
return sso_setup
|
||||||
|
|
180
litellm/proxy/auth/route_checks.py
Normal file
180
litellm/proxy/auth/route_checks.py
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
import re
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import HTTPException, Request, status
|
||||||
|
|
||||||
|
from litellm._logging import verbose_proxy_logger
|
||||||
|
from litellm.proxy._types import (
|
||||||
|
CommonProxyErrors,
|
||||||
|
LiteLLM_UserTable,
|
||||||
|
LiteLLMRoutes,
|
||||||
|
LitellmUserRoles,
|
||||||
|
UserAPIKeyAuth,
|
||||||
|
)
|
||||||
|
from litellm.proxy.utils import hash_token
|
||||||
|
|
||||||
|
from .auth_utils import _has_user_setup_sso
|
||||||
|
|
||||||
|
|
||||||
|
def non_admin_allowed_routes_check(
|
||||||
|
user_obj: Optional[LiteLLM_UserTable],
|
||||||
|
_user_role: Optional[LitellmUserRoles],
|
||||||
|
route: str,
|
||||||
|
request: Request,
|
||||||
|
valid_token: UserAPIKeyAuth,
|
||||||
|
api_key: str,
|
||||||
|
request_data: dict,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Checks if Non-Admin User is allowed to access the route
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Check user has defined custom admin routes
|
||||||
|
custom_admin_only_route_check(
|
||||||
|
route=route,
|
||||||
|
)
|
||||||
|
|
||||||
|
if is_llm_api_route(route=route):
|
||||||
|
pass
|
||||||
|
elif (
|
||||||
|
route in LiteLLMRoutes.info_routes.value
|
||||||
|
): # check if user allowed to call an info route
|
||||||
|
if route == "/key/info":
|
||||||
|
# check if user can access this route
|
||||||
|
query_params = request.query_params
|
||||||
|
key = query_params.get("key")
|
||||||
|
if key is not None and hash_token(token=key) != api_key:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="user not allowed to access this key's info",
|
||||||
|
)
|
||||||
|
elif route == "/user/info":
|
||||||
|
# check if user can access this route
|
||||||
|
query_params = request.query_params
|
||||||
|
user_id = query_params.get("user_id")
|
||||||
|
verbose_proxy_logger.debug(
|
||||||
|
f"user_id: {user_id} & valid_token.user_id: {valid_token.user_id}"
|
||||||
|
)
|
||||||
|
if user_id != valid_token.user_id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="key not allowed to access this user's info. user_id={}, key's user_id={}".format(
|
||||||
|
user_id, valid_token.user_id
|
||||||
|
),
|
||||||
|
)
|
||||||
|
elif route == "/model/info":
|
||||||
|
# /model/info just shows models user has access to
|
||||||
|
pass
|
||||||
|
elif route == "/team/info":
|
||||||
|
pass # handled by function itself
|
||||||
|
elif _has_user_setup_sso() and route in LiteLLMRoutes.sso_only_routes.value:
|
||||||
|
pass
|
||||||
|
elif (
|
||||||
|
route in LiteLLMRoutes.global_spend_tracking_routes.value
|
||||||
|
and getattr(valid_token, "permissions", None) is not None
|
||||||
|
and "get_spend_routes" in getattr(valid_token, "permissions", [])
|
||||||
|
):
|
||||||
|
|
||||||
|
pass
|
||||||
|
elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value:
|
||||||
|
if is_llm_api_route(route=route):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail=f"user not allowed to access this OpenAI routes, role= {_user_role}",
|
||||||
|
)
|
||||||
|
if route in LiteLLMRoutes.management_routes.value:
|
||||||
|
# the Admin Viewer is only allowed to call /user/update for their own user_id and can only update
|
||||||
|
if route == "/user/update":
|
||||||
|
|
||||||
|
# Check the Request params are valid for PROXY_ADMIN_VIEW_ONLY
|
||||||
|
if request_data is not None and isinstance(request_data, dict):
|
||||||
|
_params_updated = request_data.keys()
|
||||||
|
for param in _params_updated:
|
||||||
|
if param not in ["user_email", "password"]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail=f"user not allowed to access this route, role= {_user_role}. Trying to access: {route} and updating invalid param: {param}. only user_email and password can be updated",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail=f"user not allowed to access this route, role= {_user_role}. Trying to access: {route}",
|
||||||
|
)
|
||||||
|
|
||||||
|
elif (
|
||||||
|
_user_role == LitellmUserRoles.INTERNAL_USER.value
|
||||||
|
and route in LiteLLMRoutes.internal_user_routes.value
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
elif (
|
||||||
|
route in LiteLLMRoutes.self_managed_routes.value
|
||||||
|
): # routes that manage their own allowed/disallowed logic
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
user_role = "unknown"
|
||||||
|
user_id = "unknown"
|
||||||
|
if user_obj is not None:
|
||||||
|
user_role = user_obj.user_role or "unknown"
|
||||||
|
user_id = user_obj.user_id or "unknown"
|
||||||
|
raise Exception(
|
||||||
|
f"Only proxy admin can be used to generate, delete, update info for new keys/users/teams. Route={route}. Your role={user_role}. Your user_id={user_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def custom_admin_only_route_check(route: str):
|
||||||
|
from litellm.proxy.proxy_server import general_settings, premium_user
|
||||||
|
|
||||||
|
if "admin_only_routes" in general_settings:
|
||||||
|
if premium_user is not True:
|
||||||
|
verbose_proxy_logger.error(
|
||||||
|
f"Trying to use 'admin_only_routes' this is an Enterprise only feature. {CommonProxyErrors.not_premium_user.value}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
if route in general_settings["admin_only_routes"]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
|
detail="user not allowed to access this route. This is an admin only route",
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def is_llm_api_route(route: str) -> bool:
|
||||||
|
"""
|
||||||
|
Helper to checks if provided route is an OpenAI route
|
||||||
|
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- True: if route is an OpenAI route
|
||||||
|
- False: if route is not an OpenAI route
|
||||||
|
"""
|
||||||
|
|
||||||
|
if route in LiteLLMRoutes.openai_routes.value:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if route in LiteLLMRoutes.anthropic_routes.value:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# fuzzy match routes like "/v1/threads/thread_49EIN5QF32s4mH20M7GFKdlZ"
|
||||||
|
# Check for routes with placeholders
|
||||||
|
for openai_route in LiteLLMRoutes.openai_routes.value:
|
||||||
|
# Replace placeholders with regex pattern
|
||||||
|
# placeholders are written as "/threads/{thread_id}"
|
||||||
|
if "{" in openai_route:
|
||||||
|
pattern = re.sub(r"\{[^}]+\}", r"[^/]+", openai_route)
|
||||||
|
# Anchor the pattern to match the entire string
|
||||||
|
pattern = f"^{pattern}$"
|
||||||
|
if re.match(pattern, route):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Pass through Bedrock, VertexAI, and Cohere Routes
|
||||||
|
if "/bedrock/" in route:
|
||||||
|
return True
|
||||||
|
if "/vertex-ai/" in route:
|
||||||
|
return True
|
||||||
|
if "/gemini/" in route:
|
||||||
|
return True
|
||||||
|
if "/cohere/" in route:
|
||||||
|
return True
|
||||||
|
if "/langfuse/" in route:
|
||||||
|
return True
|
||||||
|
return False
|
|
@ -58,9 +58,8 @@ from litellm.proxy.auth.auth_checks import (
|
||||||
)
|
)
|
||||||
from litellm.proxy.auth.auth_utils import (
|
from litellm.proxy.auth.auth_utils import (
|
||||||
_get_request_ip_address,
|
_get_request_ip_address,
|
||||||
check_if_request_size_is_safe,
|
_has_user_setup_sso,
|
||||||
get_request_route,
|
get_request_route,
|
||||||
is_llm_api_route,
|
|
||||||
is_pass_through_provider_route,
|
is_pass_through_provider_route,
|
||||||
pre_db_read_auth_checks,
|
pre_db_read_auth_checks,
|
||||||
route_in_additonal_public_routes,
|
route_in_additonal_public_routes,
|
||||||
|
@ -68,6 +67,7 @@ from litellm.proxy.auth.auth_utils import (
|
||||||
)
|
)
|
||||||
from litellm.proxy.auth.oauth2_check import check_oauth2_token
|
from litellm.proxy.auth.oauth2_check import check_oauth2_token
|
||||||
from litellm.proxy.auth.oauth2_proxy_hook import handle_oauth2_proxy_request
|
from litellm.proxy.auth.oauth2_proxy_hook import handle_oauth2_proxy_request
|
||||||
|
from litellm.proxy.auth.route_checks import non_admin_allowed_routes_check
|
||||||
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
|
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
|
||||||
from litellm.proxy.utils import _to_ns
|
from litellm.proxy.utils import _to_ns
|
||||||
|
|
||||||
|
@ -976,96 +976,15 @@ async def user_api_key_auth(
|
||||||
_user_role = _get_user_role(user_obj=user_obj)
|
_user_role = _get_user_role(user_obj=user_obj)
|
||||||
|
|
||||||
if not _is_user_proxy_admin(user_obj=user_obj): # if non-admin
|
if not _is_user_proxy_admin(user_obj=user_obj): # if non-admin
|
||||||
if is_llm_api_route(route=route):
|
non_admin_allowed_routes_check(
|
||||||
pass
|
user_obj=user_obj,
|
||||||
elif (
|
_user_role=_user_role,
|
||||||
route in LiteLLMRoutes.info_routes.value
|
route=route,
|
||||||
): # check if user allowed to call an info route
|
request=request,
|
||||||
if route == "/key/info":
|
request_data=request_data,
|
||||||
# check if user can access this route
|
api_key=api_key,
|
||||||
query_params = request.query_params
|
valid_token=valid_token,
|
||||||
key = query_params.get("key")
|
)
|
||||||
if key is not None and hash_token(token=key) != api_key:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="user not allowed to access this key's info",
|
|
||||||
)
|
|
||||||
elif route == "/user/info":
|
|
||||||
# check if user can access this route
|
|
||||||
query_params = request.query_params
|
|
||||||
user_id = query_params.get("user_id")
|
|
||||||
verbose_proxy_logger.debug(
|
|
||||||
f"user_id: {user_id} & valid_token.user_id: {valid_token.user_id}"
|
|
||||||
)
|
|
||||||
if user_id != valid_token.user_id:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail="key not allowed to access this user's info. user_id={}, key's user_id={}".format(
|
|
||||||
user_id, valid_token.user_id
|
|
||||||
),
|
|
||||||
)
|
|
||||||
elif route == "/model/info":
|
|
||||||
# /model/info just shows models user has access to
|
|
||||||
pass
|
|
||||||
elif route == "/team/info":
|
|
||||||
pass # handled by function itself
|
|
||||||
elif (
|
|
||||||
_has_user_setup_sso()
|
|
||||||
and route in LiteLLMRoutes.sso_only_routes.value
|
|
||||||
):
|
|
||||||
pass
|
|
||||||
elif (
|
|
||||||
route in LiteLLMRoutes.global_spend_tracking_routes.value
|
|
||||||
and getattr(valid_token, "permissions", None) is not None
|
|
||||||
and "get_spend_routes" in getattr(valid_token, "permissions", [])
|
|
||||||
):
|
|
||||||
|
|
||||||
pass
|
|
||||||
elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value:
|
|
||||||
if is_llm_api_route(route=route):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail=f"user not allowed to access this OpenAI routes, role= {_user_role}",
|
|
||||||
)
|
|
||||||
if route in LiteLLMRoutes.management_routes.value:
|
|
||||||
# the Admin Viewer is only allowed to call /user/update for their own user_id and can only update
|
|
||||||
if route == "/user/update":
|
|
||||||
|
|
||||||
# Check the Request params are valid for PROXY_ADMIN_VIEW_ONLY
|
|
||||||
if request_data is not None and isinstance(
|
|
||||||
request_data, dict
|
|
||||||
):
|
|
||||||
_params_updated = request_data.keys()
|
|
||||||
for param in _params_updated:
|
|
||||||
if param not in ["user_email", "password"]:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail=f"user not allowed to access this route, role= {_user_role}. Trying to access: {route} and updating invalid param: {param}. only user_email and password can be updated",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
|
||||||
detail=f"user not allowed to access this route, role= {_user_role}. Trying to access: {route}",
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
|
||||||
_user_role == LitellmUserRoles.INTERNAL_USER.value
|
|
||||||
and route in LiteLLMRoutes.internal_user_routes.value
|
|
||||||
):
|
|
||||||
pass
|
|
||||||
elif (
|
|
||||||
route in LiteLLMRoutes.self_managed_routes.value
|
|
||||||
): # routes that manage their own allowed/disallowed logic
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
user_role = "unknown"
|
|
||||||
user_id = "unknown"
|
|
||||||
if user_obj is not None:
|
|
||||||
user_role = user_obj.user_role or "unknown"
|
|
||||||
user_id = user_obj.user_id or "unknown"
|
|
||||||
raise Exception(
|
|
||||||
f"Only proxy admin can be used to generate, delete, update info for new keys/users/teams. Route={route}. Your role={user_role}. Your user_id={user_id}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# check if token is from litellm-ui, litellm ui makes keys to allow users to login with sso. These keys can only be used for LiteLLM UI functions
|
# check if token is from litellm-ui, litellm ui makes keys to allow users to login with sso. These keys can only be used for LiteLLM UI functions
|
||||||
# sso/login, ui/login, /key functions and /user functions
|
# sso/login, ui/login, /key functions and /user functions
|
||||||
|
@ -1219,24 +1138,6 @@ def _return_user_api_key_auth_obj(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _has_user_setup_sso():
|
|
||||||
"""
|
|
||||||
Check if the user has set up single sign-on (SSO) by verifying the presence of Microsoft client ID, Google client ID, and UI username environment variables.
|
|
||||||
Returns a boolean indicating whether SSO has been set up.
|
|
||||||
"""
|
|
||||||
microsoft_client_id = os.getenv("MICROSOFT_CLIENT_ID", None)
|
|
||||||
google_client_id = os.getenv("GOOGLE_CLIENT_ID", None)
|
|
||||||
ui_username = os.getenv("UI_USERNAME", None)
|
|
||||||
|
|
||||||
sso_setup = (
|
|
||||||
(microsoft_client_id is not None)
|
|
||||||
or (google_client_id is not None)
|
|
||||||
or (ui_username is not None)
|
|
||||||
)
|
|
||||||
|
|
||||||
return sso_setup
|
|
||||||
|
|
||||||
|
|
||||||
def _is_user_proxy_admin(user_obj: Optional[LiteLLM_UserTable]):
|
def _is_user_proxy_admin(user_obj: Optional[LiteLLM_UserTable]):
|
||||||
if user_obj is None:
|
if user_obj is None:
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -12,5 +12,8 @@ litellm_settings:
|
||||||
|
|
||||||
general_settings:
|
general_settings:
|
||||||
master_key: sk-1234
|
master_key: sk-1234
|
||||||
custom_auth: example_config_yaml.custom_auth_basic.user_api_key_auth
|
|
||||||
allowed_routes: []
|
allowed_routes: ["/chat/completions"]
|
||||||
|
public_routes: ["LiteLLMRoutes.public_routes"]
|
||||||
|
admin_only_routes: ["/key/generate"]
|
||||||
|
|
|
@ -21,7 +21,8 @@ from starlette.datastructures import URL, Headers, QueryParams
|
||||||
|
|
||||||
import litellm
|
import litellm
|
||||||
from litellm.proxy._types import LiteLLMRoutes
|
from litellm.proxy._types import LiteLLMRoutes
|
||||||
from litellm.proxy.auth.auth_utils import get_request_route, is_llm_api_route
|
from litellm.proxy.auth.auth_utils import get_request_route
|
||||||
|
from litellm.proxy.auth.route_checks import is_llm_api_route
|
||||||
from litellm.proxy.proxy_server import app
|
from litellm.proxy.proxy_server import app
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue