diff --git a/litellm/proxy/auth/auth_checks.py b/litellm/proxy/auth/auth_checks.py index e404a1d406..655de7964e 100644 --- a/litellm/proxy/auth/auth_checks.py +++ b/litellm/proxy/auth/auth_checks.py @@ -24,6 +24,7 @@ from litellm.proxy._types import ( LitellmUserRoles, UserAPIKeyAuth, ) +from litellm.proxy.auth.auth_utils import is_openai_route from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry from litellm.types.services import ServiceLoggerPayload, ServiceTypes @@ -105,7 +106,7 @@ def common_checks( general_settings.get("enforce_user_param", None) is not None and general_settings["enforce_user_param"] == True ): - if route in LiteLLMRoutes.openai_routes.value and "user" not in request_body: + if is_openai_route(route=route) and "user" not in request_body: raise Exception( f"'user' param not passed in. 'enforce_user_param'={general_settings['enforce_user_param']}" ) @@ -121,7 +122,7 @@ def common_checks( + CommonProxyErrors.not_premium_user.value ) - if route in LiteLLMRoutes.openai_routes.value: + if is_openai_route(route=route): # loop through each enforced param # example enforced_params ['user', 'metadata', 'metadata.generation_name'] for enforced_param in general_settings["enforced_params"]: @@ -149,7 +150,7 @@ def common_checks( and global_proxy_spend is not None # only run global budget checks for OpenAI routes # Reason - the Admin UI should continue working if the proxy crosses it's global budget - and route in LiteLLMRoutes.openai_routes.value + and is_openai_route(route=route) and route != "/v1/models" and route != "/models" ): diff --git a/litellm/proxy/auth/user_api_key_auth.py b/litellm/proxy/auth/user_api_key_auth.py index bc774816f6..7889cf86fe 100644 --- a/litellm/proxy/auth/user_api_key_auth.py +++ b/litellm/proxy/auth/user_api_key_auth.py @@ -56,7 +56,10 @@ from litellm.proxy.auth.auth_checks import ( get_user_object, log_to_opentelemetry, ) -from litellm.proxy.auth.auth_utils import route_in_additonal_public_routes +from litellm.proxy.auth.auth_utils import ( + is_openai_route, + route_in_additonal_public_routes, +) from litellm.proxy.common_utils.http_parsing_utils import _read_request_body from litellm.proxy.utils import _to_ns @@ -920,9 +923,9 @@ async def user_api_key_auth( _user_role = _get_user_role(user_id_information=user_id_information) if not _is_user_proxy_admin(user_id_information): # if non-admin - if route in LiteLLMRoutes.openai_routes.value: + if is_openai_route(route=route): pass - elif request["route"].name in LiteLLMRoutes.openai_route_names.value: + elif is_openai_route(route=request["route"].name): pass elif ( route in LiteLLMRoutes.info_routes.value @@ -975,7 +978,7 @@ async def user_api_key_auth( pass elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value: - if route in LiteLLMRoutes.openai_routes.value: + if is_openai_route(route=route): raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail=f"user not allowed to access this OpenAI routes, role= {_user_role}",