mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
fix - use helper to check if a route is openai route
This commit is contained in:
parent
e380954de1
commit
b30fa64e2a
2 changed files with 11 additions and 7 deletions
|
@ -24,6 +24,7 @@ from litellm.proxy._types import (
|
||||||
LitellmUserRoles,
|
LitellmUserRoles,
|
||||||
UserAPIKeyAuth,
|
UserAPIKeyAuth,
|
||||||
)
|
)
|
||||||
|
from litellm.proxy.auth.auth_utils import is_openai_route
|
||||||
from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry
|
from litellm.proxy.utils import PrismaClient, ProxyLogging, log_to_opentelemetry
|
||||||
from litellm.types.services import ServiceLoggerPayload, ServiceTypes
|
from litellm.types.services import ServiceLoggerPayload, ServiceTypes
|
||||||
|
|
||||||
|
@ -105,7 +106,7 @@ def common_checks(
|
||||||
general_settings.get("enforce_user_param", None) is not None
|
general_settings.get("enforce_user_param", None) is not None
|
||||||
and general_settings["enforce_user_param"] == True
|
and general_settings["enforce_user_param"] == True
|
||||||
):
|
):
|
||||||
if route in LiteLLMRoutes.openai_routes.value and "user" not in request_body:
|
if is_openai_route(route=route) and "user" not in request_body:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
f"'user' param not passed in. 'enforce_user_param'={general_settings['enforce_user_param']}"
|
f"'user' param not passed in. 'enforce_user_param'={general_settings['enforce_user_param']}"
|
||||||
)
|
)
|
||||||
|
@ -121,7 +122,7 @@ def common_checks(
|
||||||
+ CommonProxyErrors.not_premium_user.value
|
+ CommonProxyErrors.not_premium_user.value
|
||||||
)
|
)
|
||||||
|
|
||||||
if route in LiteLLMRoutes.openai_routes.value:
|
if is_openai_route(route=route):
|
||||||
# loop through each enforced param
|
# loop through each enforced param
|
||||||
# example enforced_params ['user', 'metadata', 'metadata.generation_name']
|
# example enforced_params ['user', 'metadata', 'metadata.generation_name']
|
||||||
for enforced_param in general_settings["enforced_params"]:
|
for enforced_param in general_settings["enforced_params"]:
|
||||||
|
@ -149,7 +150,7 @@ def common_checks(
|
||||||
and global_proxy_spend is not None
|
and global_proxy_spend is not None
|
||||||
# only run global budget checks for OpenAI routes
|
# only run global budget checks for OpenAI routes
|
||||||
# Reason - the Admin UI should continue working if the proxy crosses it's global budget
|
# Reason - the Admin UI should continue working if the proxy crosses it's global budget
|
||||||
and route in LiteLLMRoutes.openai_routes.value
|
and is_openai_route(route=route)
|
||||||
and route != "/v1/models"
|
and route != "/v1/models"
|
||||||
and route != "/models"
|
and route != "/models"
|
||||||
):
|
):
|
||||||
|
|
|
@ -56,7 +56,10 @@ from litellm.proxy.auth.auth_checks import (
|
||||||
get_user_object,
|
get_user_object,
|
||||||
log_to_opentelemetry,
|
log_to_opentelemetry,
|
||||||
)
|
)
|
||||||
from litellm.proxy.auth.auth_utils import route_in_additonal_public_routes
|
from litellm.proxy.auth.auth_utils import (
|
||||||
|
is_openai_route,
|
||||||
|
route_in_additonal_public_routes,
|
||||||
|
)
|
||||||
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
|
from litellm.proxy.common_utils.http_parsing_utils import _read_request_body
|
||||||
from litellm.proxy.utils import _to_ns
|
from litellm.proxy.utils import _to_ns
|
||||||
|
|
||||||
|
@ -920,9 +923,9 @@ async def user_api_key_auth(
|
||||||
_user_role = _get_user_role(user_id_information=user_id_information)
|
_user_role = _get_user_role(user_id_information=user_id_information)
|
||||||
|
|
||||||
if not _is_user_proxy_admin(user_id_information): # if non-admin
|
if not _is_user_proxy_admin(user_id_information): # if non-admin
|
||||||
if route in LiteLLMRoutes.openai_routes.value:
|
if is_openai_route(route=route):
|
||||||
pass
|
pass
|
||||||
elif request["route"].name in LiteLLMRoutes.openai_route_names.value:
|
elif is_openai_route(route=request["route"].name):
|
||||||
pass
|
pass
|
||||||
elif (
|
elif (
|
||||||
route in LiteLLMRoutes.info_routes.value
|
route in LiteLLMRoutes.info_routes.value
|
||||||
|
@ -975,7 +978,7 @@ async def user_api_key_auth(
|
||||||
|
|
||||||
pass
|
pass
|
||||||
elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value:
|
elif _user_role == LitellmUserRoles.PROXY_ADMIN_VIEW_ONLY.value:
|
||||||
if route in LiteLLMRoutes.openai_routes.value:
|
if is_openai_route(route=route):
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_403_FORBIDDEN,
|
status_code=status.HTTP_403_FORBIDDEN,
|
||||||
detail=f"user not allowed to access this OpenAI routes, role= {_user_role}",
|
detail=f"user not allowed to access this OpenAI routes, role= {_user_role}",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue