check is_llm_api_route

This commit is contained in:
Ishaan Jaff 2024-07-22 14:43:30 -07:00
parent b54b1d958b
commit b64755d2a1
3 changed files with 12 additions and 9 deletions

View file

@ -24,7 +24,7 @@ from litellm.proxy._types import (
LitellmUserRoles,
UserAPIKeyAuth,
)
from litellm.proxy.auth.auth_utils import is_openai_route
from litellm.proxy.auth.auth_utils import is_llm_api_route
from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry
from litellm.types.services import ServiceLoggerPayload, ServiceTypes
@ -106,7 +106,7 @@ def common_checks(
general_settings.get("enforce_user_param", None) is not None
and general_settings["enforce_user_param"] == True
):
if is_openai_route(route=route) and "user" not in request_body:
if is_llm_api_route(route=route) and "user" not in request_body:
raise Exception(
f"'user' param not passed in. 'enforce_user_param'={general_settings['enforce_user_param']}"
)
@ -122,7 +122,7 @@ def common_checks(
+ CommonProxyErrors.not_premium_user.value
)
if is_openai_route(route=route):
if is_llm_api_route(route=route):
# loop through each enforced param
# example enforced_params ['user', 'metadata', 'metadata.generation_name']
for enforced_param in general_settings["enforced_params"]:
@ -150,7 +150,7 @@ def common_checks(
and global_proxy_spend is not None
# only run global budget checks for OpenAI routes
# Reason - the Admin UI should continue working if the proxy crosses it's global budget
and is_openai_route(route=route)
and is_llm_api_route(route=route)
and route != "/v1/models"
and route != "/models"
):

View file

@ -46,7 +46,7 @@ def route_in_additonal_public_routes(current_route: str):
return False
def is_openai_route(route: str) -> bool:
def is_llm_api_route(route: str) -> bool:
"""
Helper to checks if provided route is an OpenAI route
@ -59,6 +59,9 @@ def is_openai_route(route: str) -> bool:
if route in LiteLLMRoutes.openai_routes.value:
return True
if route in LiteLLMRoutes.anthropic_routes.value:
return True
# fuzzy match routes like "/v1/threads/thread_49EIN5QF32s4mH20M7GFKdlZ"
# Check for routes with placeholders
for openai_route in LiteLLMRoutes.openai_routes.value:

View file

@ -57,7 +57,7 @@ from litellm.proxy.auth.auth_checks import (
log_to_opentelemetry,
)
from litellm.proxy.auth.auth_utils import (
is_openai_route,
is_llm_api_route,
route_in_additonal_public_routes,
)
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
@ -994,9 +994,9 @@ async def user_api_key_auth(
_user_role = _get_user_role(user_id_information=user_id_information)
if not _is_user_proxy_admin(user_id_information): # if non-admin
if is_openai_route(route=route):
if is_llm_api_route(route=route):
pass
elif is_openai_route(route=request["route"].name):
elif is_llm_api_route(route=request["route"].name):
pass
elif (
route in LiteLLMRoutes.info_routes.value
@ -1049,7 +1049,7 @@ async def user_api_key_auth(
pass
elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value:
if is_openai_route(route=route):
if is_llm_api_route(route=route):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"user not allowed to access this OpenAI routes, role= {_user_role}",