diff --git a/docs/my-website/docs/enterprise.md b/docs/my-website/docs/enterprise.md index f5cd77aea..acc1331f9 100644 --- a/docs/my-website/docs/enterprise.md +++ b/docs/my-website/docs/enterprise.md @@ -22,7 +22,7 @@ This covers: - ✅ [SSO for Admin UI](./proxy/ui#✨-enterprise-features) - ✅ [Audit Logs with retention policy](./proxy/enterprise#audit-logs) - ✅ [JWT-Auth](../docs/proxy/token_auth.md) - - ✅ [Control available public, private routes](./proxy/enterprise#control-available-public-private-routes) + - ✅ [Control available public, private routes (Restrict certain endpoints on proxy)](./proxy/enterprise#control-available-public-private-routes) - ✅ [**Secret Managers** AWS Key Manager, Google Secret Manager, Azure Key](./secret) - ✅ IP address‑based access control lists - ✅ Track Request IP Address diff --git a/docs/my-website/docs/proxy/enterprise.md b/docs/my-website/docs/proxy/enterprise.md index 3ff160225..9b117c7e2 100644 --- a/docs/my-website/docs/proxy/enterprise.md +++ b/docs/my-website/docs/proxy/enterprise.md @@ -16,8 +16,9 @@ Features: - ✅ [SSO for Admin UI](./ui.md#✨-enterprise-features) - ✅ [Audit Logs with retention policy](#audit-logs) - ✅ [JWT-Auth](../docs/proxy/token_auth.md) + - ✅ [Control available public, private routes (Restrict certain endpoints on proxy)](#control-available-public-private-routes) - ✅ [Control available public, private routes](#control-available-public-private-routes) - - ✅ [**Secret Managers** AWS Key Manager, Google Secret Manager, Azure Key](../secret) + - ✅ [[BETA] AWS Key Manager v2 - Key Decryption](#beta-aws-key-manager---key-decryption) - ✅ IP address‑based access control lists - ✅ Track Request IP Address - ✅ [Use LiteLLM keys/authentication on Pass Through Endpoints](pass_through#✨-enterprise---use-litellm-keysauthentication-on-pass-through-endpoints) @@ -609,24 +610,35 @@ Expected Response ## Control available public, private routes +**Restrict certain endpoints of proxy** + :::info -❓ Use this when you want to make an existing private route -> public - -Example - Make `/spend/calculate` a publicly available route (by default `/spend/calculate` on LiteLLM Proxy requires authentication) +❓ Use this when you want to: +- make an existing private route -> public +- set certain routes as admin_only routes ::: -#### Usage - Define public routes +#### Usage - Define public, admin only routes -**Step 1** - set allowed public routes on config.yaml +**Step 1** - Set on config.yaml + + +| Route Type | Optional | Requires Virtual Key Auth | Admin Can Access | All Roles Can Access | Description | +|------------|----------|---------------------------|-------------------|----------------------|-------------| +| `public_routes` | ✅ | ❌ | ✅ | ✅ | Routes that can be accessed without any authentication | +| `admin_only_routes` | ✅ | ✅ | ✅ | ❌ | Routes that can only be accessed by [Proxy Admin](./self_serve#available-roles) | +| `allowed_routes` | ✅ | ✅ | ✅ | ✅ | Routes are exposed on the proxy. If not set then all routes exposed. | `LiteLLMRoutes.public_routes` is an ENUM corresponding to the default public routes on LiteLLM. [You can see this here](https://github.com/BerriAI/litellm/blob/main/litellm/proxy/_types.py) ```yaml general_settings: master_key: sk-1234 - public_routes: ["LiteLLMRoutes.public_routes", "/spend/calculate"] + public_routes: ["LiteLLMRoutes.public_routes", "/spend/calculate"] # routes that can be accessed without any auth + admin_only_routes: ["/key/generate"] # Optional - routes that can only be accessed by Proxy Admin + allowed_routes: ["/chat/completions", "/spend/calculate", "LiteLLMRoutes.public_routes"] # Optional - routes that can be accessed by anyone after Authentication ``` **Step 2** - start proxy @@ -637,6 +649,10 @@ litellm --config config.yaml **Step 3** - Test it + + + + ```shell curl --request POST \ --url 'http://localhost:4000/spend/calculate' \ @@ -649,6 +665,97 @@ curl --request POST \ 🎉 Expect this endpoint to work without an `Authorization / Bearer Token` + + + + + +**Successfull Request** + +```shell +curl --location 'http://0.0.0.0:4000/key/generate' \ +--header 'Authorization: Bearer ' \ +--header 'Content-Type: application/json' \ +--data '{}' +``` + + +**Un-successfull Request** + +```shell + curl --location 'http://0.0.0.0:4000/key/generate' \ +--header 'Authorization: Bearer ' \ +--header 'Content-Type: application/json' \ +--data '{"user_role": "internal_user"}' +``` + +**Expected Response** + +```json +{ + "error": { + "message": "user not allowed to access this route. Route=/key/generate is an admin only route", + "type": "auth_error", + "param": "None", + "code": "403" + } +} +``` + + + + + + + +**Successfull Request** + +```shell +curl http://localhost:4000/chat/completions \ +-H "Content-Type: application/json" \ +-H "Authorization: Bearer sk-1234" \ +-d '{ +"model": "fake-openai-endpoint", +"messages": [ + {"role": "user", "content": "Hello, Claude"} +] +}' +``` + + +**Un-successfull Request** + +```shell +curl --location 'http://0.0.0.0:4000/embeddings' \ +--header 'Content-Type: application/json' \ +-H "Authorization: Bearer sk-1234" \ +--data ' { +"model": "text-embedding-ada-002", +"input": ["write a litellm poem"] +}' +``` + +**Expected Response** + +```json +{ + "error": { + "message": "Route /embeddings not allowed", + "type": "auth_error", + "param": "None", + "code": "403" + } +} +``` + + + + + + + + + ## Guardrails - Secret Detection/Redaction ❓ Use this to REDACT API Keys, Secrets sent in requests to an LLM. diff --git a/litellm/proxy/auth/auth_checks.py b/litellm/proxy/auth/auth_checks.py index 6b4be7261..180fa309b 100644 --- a/litellm/proxy/auth/auth_checks.py +++ b/litellm/proxy/auth/auth_checks.py @@ -26,7 +26,7 @@ from litellm.proxy._types import ( LitellmUserRoles, UserAPIKeyAuth, ) -from litellm.proxy.auth.auth_utils import is_llm_api_route +from litellm.proxy.auth.route_checks import is_llm_api_route from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry from litellm.types.services import ServiceLoggerPayload, ServiceTypes @@ -40,22 +40,6 @@ else: all_routes = LiteLLMRoutes.openai_routes.value + LiteLLMRoutes.management_routes.value -def is_request_body_safe(request_body: dict) -> bool: - """ - Check if the request body is safe. - - A malicious user can set the api_base to their own domain and invoke POST /chat/completions to intercept and steal the OpenAI API key. - Relevant issue: https://huntr.com/bounties/4001e1a2-7b7a-4776-a3ae-e6692ec3d997 - """ - banned_params = ["api_base", "base_url"] - - for param in banned_params: - if param in request_body: - raise ValueError(f"BadRequest: {param} is not allowed in request body") - - return True - - def common_checks( request_body: dict, team_object: Optional[LiteLLM_TeamTable], diff --git a/litellm/proxy/auth/auth_utils.py b/litellm/proxy/auth/auth_utils.py index 6b9c9cefd..cdc397b1a 100644 --- a/litellm/proxy/auth/auth_utils.py +++ b/litellm/proxy/auth/auth_utils.py @@ -1,13 +1,123 @@ import re import sys import traceback +from typing import List, Optional, Tuple -from fastapi import Request +from fastapi import HTTPException, Request, status from litellm._logging import verbose_proxy_logger from litellm.proxy._types import * +def _get_request_ip_address( + request: Request, use_x_forwarded_for: Optional[bool] = False +) -> Optional[str]: + + client_ip = None + if use_x_forwarded_for is True and "x-forwarded-for" in request.headers: + client_ip = request.headers["x-forwarded-for"] + elif request.client is not None: + client_ip = request.client.host + else: + client_ip = "" + + return client_ip + + +def _check_valid_ip( + allowed_ips: Optional[List[str]], + request: Request, + use_x_forwarded_for: Optional[bool] = False, +) -> Tuple[bool, Optional[str]]: + """ + Returns if ip is allowed or not + """ + if allowed_ips is None: # if not set, assume true + return True, None + + # if general_settings.get("use_x_forwarded_for") is True then use x-forwarded-for + client_ip = _get_request_ip_address( + request=request, use_x_forwarded_for=use_x_forwarded_for + ) + + # Check if IP address is allowed + if client_ip not in allowed_ips: + return False, client_ip + + return True, client_ip + + +def is_request_body_safe(request_body: dict) -> bool: + """ + Check if the request body is safe. + + A malicious user can set the api_base to their own domain and invoke POST /chat/completions to intercept and steal the OpenAI API key. + Relevant issue: https://huntr.com/bounties/4001e1a2-7b7a-4776-a3ae-e6692ec3d997 + """ + banned_params = ["api_base", "base_url"] + + for param in banned_params: + if param in request_body: + raise ValueError(f"BadRequest: {param} is not allowed in request body") + + return True + + +async def pre_db_read_auth_checks( + request: Request, + request_data: dict, + route: str, +): + """ + 1. Checks if request size is under max_request_size_mb (if set) + 2. Check if request body is safe (example user has not set api_base in request body) + 3. Check if IP address is allowed (if set) + 4. Check if request route is an allowed route on the proxy (if set) + + Returns: + - True + + Raises: + - HTTPException if request fails initial auth checks + """ + from litellm.proxy.proxy_server import general_settings, premium_user + + # Check 1. request size + await check_if_request_size_is_safe(request=request) + + # Check 2. Request body is safe + is_request_body_safe(request_body=request_data) + + # Check 3. Check if IP address is allowed + is_valid_ip, passed_in_ip = _check_valid_ip( + allowed_ips=general_settings.get("allowed_ips", None), + use_x_forwarded_for=general_settings.get("use_x_forwarded_for", False), + request=request, + ) + + if not is_valid_ip: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"Access forbidden: IP address {passed_in_ip} not allowed.", + ) + + # Check 4. Check if request route is an allowed route on the proxy + if "allowed_routes" in general_settings: + _allowed_routes = general_settings["allowed_routes"] + if premium_user is not True: + verbose_proxy_logger.error( + f"Trying to set allowed_routes. This is an Enterprise feature. {CommonProxyErrors.not_premium_user.value}" + ) + if route not in _allowed_routes: + verbose_proxy_logger.error( + f"Route {route} not in allowed_routes={_allowed_routes}" + ) + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"Access forbidden: Route {route} not allowed", + ) + + def route_in_additonal_public_routes(current_route: str): """ Helper to check if the user defined public_routes on config.yaml @@ -50,48 +160,6 @@ def route_in_additonal_public_routes(current_route: str): return False -def is_llm_api_route(route: str) -> bool: - """ - Helper to checks if provided route is an OpenAI route - - - Returns: - - True: if route is an OpenAI route - - False: if route is not an OpenAI route - """ - - if route in LiteLLMRoutes.openai_routes.value: - return True - - if route in LiteLLMRoutes.anthropic_routes.value: - return True - - # fuzzy match routes like "/v1/threads/thread_49EIN5QF32s4mH20M7GFKdlZ" - # Check for routes with placeholders - for openai_route in LiteLLMRoutes.openai_routes.value: - # Replace placeholders with regex pattern - # placeholders are written as "/threads/{thread_id}" - if "{" in openai_route: - pattern = re.sub(r"\{[^}]+\}", r"[^/]+", openai_route) - # Anchor the pattern to match the entire string - pattern = f"^{pattern}$" - if re.match(pattern, route): - return True - - # Pass through Bedrock, VertexAI, and Cohere Routes - if "/bedrock/" in route: - return True - if "/vertex-ai/" in route: - return True - if "/gemini/" in route: - return True - if "/cohere/" in route: - return True - if "/langfuse/" in route: - return True - return False - - def get_request_route(request: Request) -> str: """ Helper to get the route from the request @@ -259,3 +327,21 @@ def should_run_auth_on_pass_through_provider_route(route: str) -> bool: """ # by default we do not run virtual key auth checks on /vertex-ai/{endpoint} routes return False + + +def _has_user_setup_sso(): + """ + Check if the user has set up single sign-on (SSO) by verifying the presence of Microsoft client ID, Google client ID, and UI username environment variables. + Returns a boolean indicating whether SSO has been set up. + """ + microsoft_client_id = os.getenv("MICROSOFT_CLIENT_ID", None) + google_client_id = os.getenv("GOOGLE_CLIENT_ID", None) + ui_username = os.getenv("UI_USERNAME", None) + + sso_setup = ( + (microsoft_client_id is not None) + or (google_client_id is not None) + or (ui_username is not None) + ) + + return sso_setup diff --git a/litellm/proxy/auth/route_checks.py b/litellm/proxy/auth/route_checks.py new file mode 100644 index 000000000..6c48a7f35 --- /dev/null +++ b/litellm/proxy/auth/route_checks.py @@ -0,0 +1,180 @@ +import re +from typing import Optional + +from fastapi import HTTPException, Request, status + +from litellm._logging import verbose_proxy_logger +from litellm.proxy._types import ( + CommonProxyErrors, + LiteLLM_UserTable, + LiteLLMRoutes, + LitellmUserRoles, + UserAPIKeyAuth, +) +from litellm.proxy.utils import hash_token + +from .auth_utils import _has_user_setup_sso + + +def non_admin_allowed_routes_check( + user_obj: Optional[LiteLLM_UserTable], + _user_role: Optional[LitellmUserRoles], + route: str, + request: Request, + valid_token: UserAPIKeyAuth, + api_key: str, + request_data: dict, +): + """ + Checks if Non-Admin User is allowed to access the route + """ + + # Check user has defined custom admin routes + custom_admin_only_route_check( + route=route, + ) + + if is_llm_api_route(route=route): + pass + elif ( + route in LiteLLMRoutes.info_routes.value + ): # check if user allowed to call an info route + if route == "/key/info": + # check if user can access this route + query_params = request.query_params + key = query_params.get("key") + if key is not None and hash_token(token=key) != api_key: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="user not allowed to access this key's info", + ) + elif route == "/user/info": + # check if user can access this route + query_params = request.query_params + user_id = query_params.get("user_id") + verbose_proxy_logger.debug( + f"user_id: {user_id} & valid_token.user_id: {valid_token.user_id}" + ) + if user_id != valid_token.user_id: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="key not allowed to access this user's info. user_id={}, key's user_id={}".format( + user_id, valid_token.user_id + ), + ) + elif route == "/model/info": + # /model/info just shows models user has access to + pass + elif route == "/team/info": + pass # handled by function itself + elif _has_user_setup_sso() and route in LiteLLMRoutes.sso_only_routes.value: + pass + elif ( + route in LiteLLMRoutes.global_spend_tracking_routes.value + and getattr(valid_token, "permissions", None) is not None + and "get_spend_routes" in getattr(valid_token, "permissions", []) + ): + + pass + elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value: + if is_llm_api_route(route=route): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"user not allowed to access this OpenAI routes, role= {_user_role}", + ) + if route in LiteLLMRoutes.management_routes.value: + # the Admin Viewer is only allowed to call /user/update for their own user_id and can only update + if route == "/user/update": + + # Check the Request params are valid for PROXY_ADMIN_VIEW_ONLY + if request_data is not None and isinstance(request_data, dict): + _params_updated = request_data.keys() + for param in _params_updated: + if param not in ["user_email", "password"]: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"user not allowed to access this route, role= {_user_role}. Trying to access: {route} and updating invalid param: {param}. only user_email and password can be updated", + ) + else: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"user not allowed to access this route, role= {_user_role}. Trying to access: {route}", + ) + + elif ( + _user_role == LitellmUserRoles.INTERNAL_USER.value + and route in LiteLLMRoutes.internal_user_routes.value + ): + pass + elif ( + route in LiteLLMRoutes.self_managed_routes.value + ): # routes that manage their own allowed/disallowed logic + pass + else: + user_role = "unknown" + user_id = "unknown" + if user_obj is not None: + user_role = user_obj.user_role or "unknown" + user_id = user_obj.user_id or "unknown" + raise Exception( + f"Only proxy admin can be used to generate, delete, update info for new keys/users/teams. Route={route}. Your role={user_role}. Your user_id={user_id}" + ) + + +def custom_admin_only_route_check(route: str): + from litellm.proxy.proxy_server import general_settings, premium_user + + if "admin_only_routes" in general_settings: + if premium_user is not True: + verbose_proxy_logger.error( + f"Trying to use 'admin_only_routes' this is an Enterprise only feature. {CommonProxyErrors.not_premium_user.value}" + ) + return + if route in general_settings["admin_only_routes"]: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"user not allowed to access this route. Route={route} is an admin only route", + ) + pass + + +def is_llm_api_route(route: str) -> bool: + """ + Helper to checks if provided route is an OpenAI route + + + Returns: + - True: if route is an OpenAI route + - False: if route is not an OpenAI route + """ + + if route in LiteLLMRoutes.openai_routes.value: + return True + + if route in LiteLLMRoutes.anthropic_routes.value: + return True + + # fuzzy match routes like "/v1/threads/thread_49EIN5QF32s4mH20M7GFKdlZ" + # Check for routes with placeholders + for openai_route in LiteLLMRoutes.openai_routes.value: + # Replace placeholders with regex pattern + # placeholders are written as "/threads/{thread_id}" + if "{" in openai_route: + pattern = re.sub(r"\{[^}]+\}", r"[^/]+", openai_route) + # Anchor the pattern to match the entire string + pattern = f"^{pattern}$" + if re.match(pattern, route): + return True + + # Pass through Bedrock, VertexAI, and Cohere Routes + if "/bedrock/" in route: + return True + if "/vertex-ai/" in route: + return True + if "/gemini/" in route: + return True + if "/cohere/" in route: + return True + if "/langfuse/" in route: + return True + return False diff --git a/litellm/proxy/auth/user_api_key_auth.py b/litellm/proxy/auth/user_api_key_auth.py index 00b89edb9..4c16c0345 100644 --- a/litellm/proxy/auth/user_api_key_auth.py +++ b/litellm/proxy/auth/user_api_key_auth.py @@ -54,19 +54,20 @@ from litellm.proxy.auth.auth_checks import ( get_org_object, get_team_object, get_user_object, - is_request_body_safe, log_to_opentelemetry, ) from litellm.proxy.auth.auth_utils import ( - check_if_request_size_is_safe, + _get_request_ip_address, + _has_user_setup_sso, get_request_route, - is_llm_api_route, is_pass_through_provider_route, + pre_db_read_auth_checks, route_in_additonal_public_routes, should_run_auth_on_pass_through_provider_route, ) from litellm.proxy.auth.oauth2_check import check_oauth2_token from litellm.proxy.auth.oauth2_proxy_hook import handle_oauth2_proxy_request +from litellm.proxy.auth.route_checks import non_admin_allowed_routes_check from litellm.proxy.common_utils.http_parsing_utils import _read_request_body from litellm.proxy.utils import _to_ns @@ -128,25 +129,11 @@ async def user_api_key_auth( route: str = get_request_route(request=request) # get the request body request_data = await _read_request_body(request=request) - is_request_body_safe(request_body=request_data) - - ### LiteLLM Enterprise Security Checks - # Check 1. Check if request size is under max_request_size_mb - # Check 2. FILTER IP ADDRESS - await check_if_request_size_is_safe(request=request) - - is_valid_ip, passed_in_ip = _check_valid_ip( - allowed_ips=general_settings.get("allowed_ips", None), - use_x_forwarded_for=general_settings.get("use_x_forwarded_for", False), + await pre_db_read_auth_checks( + request_data=request_data, request=request, + route=route, ) - - if not is_valid_ip: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail=f"Access forbidden: IP address {passed_in_ip} not allowed.", - ) - pass_through_endpoints: Optional[List[dict]] = general_settings.get( "pass_through_endpoints", None ) @@ -200,6 +187,7 @@ async def user_api_key_auth( ``` """ + ######## Route Checks Before Reading DB / Cache for "token" ################ if ( route in LiteLLMRoutes.public_routes.value or route_in_additonal_public_routes(current_route=route) @@ -211,6 +199,9 @@ async def user_api_key_auth( return UserAPIKeyAuth( user_role=LitellmUserRoles.INTERNAL_USER_VIEW_ONLY ) + + ########## End of Route Checks Before Reading DB / Cache for "token" ######## + if general_settings.get("enable_oauth2_auth", False) is True: # return UserAPIKeyAuth object # helper to check if the api_key is a valid oauth2 token @@ -985,96 +976,15 @@ async def user_api_key_auth( _user_role = _get_user_role(user_obj=user_obj) if not _is_user_proxy_admin(user_obj=user_obj): # if non-admin - if is_llm_api_route(route=route): - pass - elif ( - route in LiteLLMRoutes.info_routes.value - ): # check if user allowed to call an info route - if route == "/key/info": - # check if user can access this route - query_params = request.query_params - key = query_params.get("key") - if key is not None and hash_token(token=key) != api_key: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="user not allowed to access this key's info", - ) - elif route == "/user/info": - # check if user can access this route - query_params = request.query_params - user_id = query_params.get("user_id") - verbose_proxy_logger.debug( - f"user_id: {user_id} & valid_token.user_id: {valid_token.user_id}" - ) - if user_id != valid_token.user_id: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="key not allowed to access this user's info. user_id={}, key's user_id={}".format( - user_id, valid_token.user_id - ), - ) - elif route == "/model/info": - # /model/info just shows models user has access to - pass - elif route == "/team/info": - pass # handled by function itself - elif ( - _has_user_setup_sso() - and route in LiteLLMRoutes.sso_only_routes.value - ): - pass - elif ( - route in LiteLLMRoutes.global_spend_tracking_routes.value - and getattr(valid_token, "permissions", None) is not None - and "get_spend_routes" in getattr(valid_token, "permissions", []) - ): - - pass - elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value: - if is_llm_api_route(route=route): - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail=f"user not allowed to access this OpenAI routes, role= {_user_role}", - ) - if route in LiteLLMRoutes.management_routes.value: - # the Admin Viewer is only allowed to call /user/update for their own user_id and can only update - if route == "/user/update": - - # Check the Request params are valid for PROXY_ADMIN_VIEW_ONLY - if request_data is not None and isinstance( - request_data, dict - ): - _params_updated = request_data.keys() - for param in _params_updated: - if param not in ["user_email", "password"]: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail=f"user not allowed to access this route, role= {_user_role}. Trying to access: {route} and updating invalid param: {param}. only user_email and password can be updated", - ) - else: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail=f"user not allowed to access this route, role= {_user_role}. Trying to access: {route}", - ) - - elif ( - _user_role == LitellmUserRoles.INTERNAL_USER.value - and route in LiteLLMRoutes.internal_user_routes.value - ): - pass - elif ( - route in LiteLLMRoutes.self_managed_routes.value - ): # routes that manage their own allowed/disallowed logic - pass - else: - user_role = "unknown" - user_id = "unknown" - if user_obj is not None: - user_role = user_obj.user_role or "unknown" - user_id = user_obj.user_id or "unknown" - raise Exception( - f"Only proxy admin can be used to generate, delete, update info for new keys/users/teams. Route={route}. Your role={user_role}. Your user_id={user_id}" - ) + non_admin_allowed_routes_check( + user_obj=user_obj, + _user_role=_user_role, + route=route, + request=request, + request_data=request_data, + api_key=api_key, + valid_token=valid_token, + ) # check if token is from litellm-ui, litellm ui makes keys to allow users to login with sso. These keys can only be used for LiteLLM UI functions # sso/login, ui/login, /key functions and /user functions @@ -1228,24 +1138,6 @@ def _return_user_api_key_auth_obj( ) -def _has_user_setup_sso(): - """ - Check if the user has set up single sign-on (SSO) by verifying the presence of Microsoft client ID, Google client ID, and UI username environment variables. - Returns a boolean indicating whether SSO has been set up. - """ - microsoft_client_id = os.getenv("MICROSOFT_CLIENT_ID", None) - google_client_id = os.getenv("GOOGLE_CLIENT_ID", None) - ui_username = os.getenv("UI_USERNAME", None) - - sso_setup = ( - (microsoft_client_id is not None) - or (google_client_id is not None) - or (ui_username is not None) - ) - - return sso_setup - - def _is_user_proxy_admin(user_obj: Optional[LiteLLM_UserTable]): if user_obj is None: return False @@ -1282,44 +1174,6 @@ def _get_user_role( return role -def _get_request_ip_address( - request: Request, use_x_forwarded_for: Optional[bool] = False -) -> Optional[str]: - - client_ip = None - if use_x_forwarded_for is True and "x-forwarded-for" in request.headers: - client_ip = request.headers["x-forwarded-for"] - elif request.client is not None: - client_ip = request.client.host - else: - client_ip = "" - - return client_ip - - -def _check_valid_ip( - allowed_ips: Optional[List[str]], - request: Request, - use_x_forwarded_for: Optional[bool] = False, -) -> Tuple[bool, Optional[str]]: - """ - Returns if ip is allowed or not - """ - if allowed_ips is None: # if not set, assume true - return True, None - - # if general_settings.get("use_x_forwarded_for") is True then use x-forwarded-for - client_ip = _get_request_ip_address( - request=request, use_x_forwarded_for=use_x_forwarded_for - ) - - # Check if IP address is allowed - if client_ip not in allowed_ips: - return False, client_ip - - return True, client_ip - - def get_api_key_from_custom_header( request: Request, custom_litellm_key_header_name: str ): diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index 5a8a4cad4..7566f348a 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -15,6 +15,6 @@ litellm_settings: success_callback: ["prometheus"] failure_callback: ["prometheus"] -general_settings: - master_key: sk-1234 - key_management_system: "google_secret_manager" \ No newline at end of file +general_settings: + master_key: sk-1234 + diff --git a/litellm/tests/test_key_generate_prisma.py b/litellm/tests/test_key_generate_prisma.py index afde12393..708025d1d 100644 --- a/litellm/tests/test_key_generate_prisma.py +++ b/litellm/tests/test_key_generate_prisma.py @@ -3092,3 +3092,71 @@ async def test_team_tags(prisma_client): print("team_info_response", team_info_response) assert team_info_response["team_info"].metadata["tags"] == ["teamA", "teamB"] + + +@pytest.mark.asyncio +async def test_admin_only_routes(prisma_client): + """ + Tests if setting admin_only_routes works + + only an admin should be able to access admin only routes + """ + litellm.set_verbose = True + setattr(litellm.proxy.proxy_server, "prisma_client", prisma_client) + setattr(litellm.proxy.proxy_server, "master_key", "sk-1234") + await litellm.proxy.proxy_server.prisma_client.connect() + general_settings = { + "allowed_routes": ["/embeddings", "/key/generate"], + "admin_only_routes": ["/key/generate"], + } + from litellm.proxy import proxy_server + + initial_general_settings = getattr(proxy_server, "general_settings") + + setattr(proxy_server, "general_settings", general_settings) + + admin_user = await new_user( + data=NewUserRequest( + user_name="admin", + user_role=LitellmUserRoles.PROXY_ADMIN, + ), + user_api_key_dict=UserAPIKeyAuth(user_role=LitellmUserRoles.PROXY_ADMIN), + ) + + non_admin_user = await new_user( + data=NewUserRequest( + user_name="non-admin", + user_role=LitellmUserRoles.INTERNAL_USER, + ), + user_api_key_dict=UserAPIKeyAuth(user_role=LitellmUserRoles.PROXY_ADMIN), + ) + + admin_user_key = admin_user.key + non_admin_user_key = non_admin_user.key + + assert admin_user_key is not None + assert non_admin_user_key is not None + + # assert non-admin can not access admin routes + request = Request(scope={"type": "http"}) + request._url = URL(url="/key/generate") + await user_api_key_auth( + request=request, + api_key="Bearer " + admin_user_key, + ) + + # this should pass + + try: + await user_api_key_auth( + request=request, + api_key="Bearer " + non_admin_user_key, + ) + pytest.fail("Expected this call to fail. User is over limit.") + except Exception as e: + print("error str=", str(e.message)) + error_str = str(e.message) + assert "Route" in error_str and "admin only route" in error_str + pass + + setattr(proxy_server, "general_settings", initial_general_settings) diff --git a/litellm/tests/test_proxy_routes.py b/litellm/tests/test_proxy_routes.py index 4064e5e0f..41ea0e1b4 100644 --- a/litellm/tests/test_proxy_routes.py +++ b/litellm/tests/test_proxy_routes.py @@ -21,7 +21,8 @@ from starlette.datastructures import URL, Headers, QueryParams import litellm from litellm.proxy._types import LiteLLMRoutes -from litellm.proxy.auth.auth_utils import get_request_route, is_llm_api_route +from litellm.proxy.auth.auth_utils import get_request_route +from litellm.proxy.auth.route_checks import is_llm_api_route from litellm.proxy.proxy_server import app # Configure logging diff --git a/litellm/tests/test_user_api_key_auth.py b/litellm/tests/test_user_api_key_auth.py index 5a292bb4a..e7b01aa3f 100644 --- a/litellm/tests/test_user_api_key_auth.py +++ b/litellm/tests/test_user_api_key_auth.py @@ -42,7 +42,7 @@ class Request: def test_check_valid_ip( allowed_ips: Optional[List[str]], client_ip: Optional[str], expected_result: bool ): - from litellm.proxy.auth.user_api_key_auth import _check_valid_ip + from litellm.proxy.auth.auth_utils import _check_valid_ip request = Request(client_ip) @@ -70,7 +70,7 @@ def test_check_valid_ip( def test_check_valid_ip_sent_with_x_forwarded_for( allowed_ips: Optional[List[str]], client_ip: Optional[str], expected_result: bool ): - from litellm.proxy.auth.user_api_key_auth import _check_valid_ip + from litellm.proxy.auth.auth_utils import _check_valid_ip request = Request(client_ip, headers={"X-Forwarded-For": client_ip}) @@ -246,3 +246,46 @@ async def test_user_api_key_auth_fails_with_prohibited_params(prohibited_param): error_message = str(e.message) print("error message=", error_message) assert "is not allowed in request body" in error_message + + +@pytest.mark.asyncio() +@pytest.mark.parametrize( + "route, should_raise_error", + [ + ("/embeddings", False), + ("/chat/completions", True), + ("/completions", True), + ("/models", True), + ("/v1/embeddings", True), + ], +) +async def test_auth_with_allowed_routes(route, should_raise_error): + # Setup + user_key = "sk-1234" + + general_settings = {"allowed_routes": ["/embeddings"]} + from fastapi import Request + + from litellm.proxy import proxy_server + + initial_general_settings = getattr(proxy_server, "general_settings") + + setattr(proxy_server, "master_key", "sk-1234") + setattr(proxy_server, "general_settings", general_settings) + + request = Request(scope={"type": "http"}) + request._url = URL(url=route) + + if should_raise_error: + try: + await user_api_key_auth(request=request, api_key="Bearer " + user_key) + pytest.fail("Expected this call to fail. User is over limit.") + except Exception as e: + print("error str=", str(e.message)) + error_str = str(e.message) + assert "Route" in error_str and "not allowed" in error_str + pass + else: + await user_api_key_auth(request=request, api_key="Bearer " + user_key) + + setattr(proxy_server, "general_settings", initial_general_settings)