forked from phoenix/litellm-mirror
check is_llm_api_route
This commit is contained in:
parent
b54b1d958b
commit
b64755d2a1
3 changed files with 12 additions and 9 deletions
|
@ -24,7 +24,7 @@ from litellm.proxy._types import (
|
||||||
LitellmUserRoles,
|
LitellmUserRoles,
|
||||||
UserAPIKeyAuth,
|
UserAPIKeyAuth,
|
||||||
)
|
)
|
||||||
from litellm.proxy.auth.auth_utils import is_openai_route
|
from litellm.proxy.auth.auth_utils import is_llm_api_route
|
||||||
from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry
|
from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry
|
||||||
from litellm.types.services import ServiceLoggerPayload, ServiceTypes
|
from litellm.types.services import ServiceLoggerPayload, ServiceTypes
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ def common_checks(
|
||||||
general_settings.get("enforce_user_param", None) is not None
|
general_settings.get("enforce_user_param", None) is not None
|
||||||
and general_settings["enforce_user_param"] == True
|
and general_settings["enforce_user_param"] == True
|
||||||
):
|
):
|
||||||
if is_openai_route(route=route) and "user" not in request_body:
|
if is_llm_api_route(route=route) and "user" not in request_body:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"'user' param not passed in. 'enforce_user_param'={general_settings['enforce_user_param']}"
|
f"'user' param not passed in. 'enforce_user_param'={general_settings['enforce_user_param']}"
|
||||||
)
|
)
|
||||||
|
@ -122,7 +122,7 @@ def common_checks(
|
||||||
+ CommonProxyErrors.not_premium_user.value
|
+ CommonProxyErrors.not_premium_user.value
|
||||||
)
|
)
|
||||||
|
|
||||||
if is_openai_route(route=route):
|
if is_llm_api_route(route=route):
|
||||||
# loop through each enforced param
|
# loop through each enforced param
|
||||||
# example enforced_params ['user', 'metadata', 'metadata.generation_name']
|
# example enforced_params ['user', 'metadata', 'metadata.generation_name']
|
||||||
for enforced_param in general_settings["enforced_params"]:
|
for enforced_param in general_settings["enforced_params"]:
|
||||||
|
@ -150,7 +150,7 @@ def common_checks(
|
||||||
and global_proxy_spend is not None
|
and global_proxy_spend is not None
|
||||||
# only run global budget checks for OpenAI routes
|
# only run global budget checks for OpenAI routes
|
||||||
# Reason - the Admin UI should continue working if the proxy crosses it's global budget
|
# Reason - the Admin UI should continue working if the proxy crosses it's global budget
|
||||||
and is_openai_route(route=route)
|
and is_llm_api_route(route=route)
|
||||||
and route != "/v1/models"
|
and route != "/v1/models"
|
||||||
and route != "/models"
|
and route != "/models"
|
||||||
):
|
):
|
||||||
|
|
|
@ -46,7 +46,7 @@ def route_in_additonal_public_routes(current_route: str):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def is_openai_route(route: str) -> bool:
|
def is_llm_api_route(route: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Helper to checks if provided route is an OpenAI route
|
Helper to checks if provided route is an OpenAI route
|
||||||
|
|
||||||
|
@ -59,6 +59,9 @@ def is_openai_route(route: str) -> bool:
|
||||||
if route in LiteLLMRoutes.openai_routes.value:
|
if route in LiteLLMRoutes.openai_routes.value:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
if route in LiteLLMRoutes.anthropic_routes.value:
|
||||||
|
return True
|
||||||
|
|
||||||
# fuzzy match routes like "/v1/threads/thread_49EIN5QF32s4mH20M7GFKdlZ"
|
# fuzzy match routes like "/v1/threads/thread_49EIN5QF32s4mH20M7GFKdlZ"
|
||||||
# Check for routes with placeholders
|
# Check for routes with placeholders
|
||||||
for openai_route in LiteLLMRoutes.openai_routes.value:
|
for openai_route in LiteLLMRoutes.openai_routes.value:
|
||||||
|
|
|
@ -57,7 +57,7 @@ from litellm.proxy.auth.auth_checks import (
|
||||||
log_to_opentelemetry,
|
log_to_opentelemetry,
|
||||||
)
|
)
|
||||||
from litellm.proxy.auth.auth_utils import (
|
from litellm.proxy.auth.auth_utils import (
|
||||||
is_openai_route,
|
is_llm_api_route,
|
||||||
route_in_additonal_public_routes,
|
route_in_additonal_public_routes,
|
||||||
)
|
)
|
||||||
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
|
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
|
||||||
|
@ -994,9 +994,9 @@ async def user_api_key_auth(
|
||||||
_user_role = _get_user_role(user_id_information=user_id_information)
|
_user_role = _get_user_role(user_id_information=user_id_information)
|
||||||
|
|
||||||
if not _is_user_proxy_admin(user_id_information): # if non-admin
|
if not _is_user_proxy_admin(user_id_information): # if non-admin
|
||||||
if is_openai_route(route=route):
|
if is_llm_api_route(route=route):
|
||||||
pass
|
pass
|
||||||
elif is_openai_route(route=request["route"].name):
|
elif is_llm_api_route(route=request["route"].name):
|
||||||
pass
|
pass
|
||||||
elif (
|
elif (
|
||||||
route in LiteLLMRoutes.info_routes.value
|
route in LiteLLMRoutes.info_routes.value
|
||||||
|
@ -1049,7 +1049,7 @@ async def user_api_key_auth(
|
||||||
|
|
||||||
pass
|
pass
|
||||||
elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value:
|
elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value:
|
||||||
if is_openai_route(route=route):
|
if is_llm_api_route(route=route):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
detail=f"user not allowed to access this OpenAI routes, role= {_user_role}",
|
detail=f"user not allowed to access this OpenAI routes, role= {_user_role}",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue