diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index 2713977878..2117ee9a75 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -240,8 +240,13 @@ class LiteLLMRoutes(enum.Enum): mapped_pass_through_routes = [ "/bedrock", "/vertex-ai", + "/vertex_ai", + "/cohere", "/gemini", + "/anthropic", "/langfuse", + "/azure", + "/openai", ] anthropic_routes = [ diff --git a/litellm/proxy/auth/route_checks.py b/litellm/proxy/auth/route_checks.py index 26b9eb73a9..12ced50a53 100644 --- a/litellm/proxy/auth/route_checks.py +++ b/litellm/proxy/auth/route_checks.py @@ -180,23 +180,10 @@ class RouteChecks: if RouteChecks._is_azure_openai_route(route=route): return True - # Pass through Bedrock, VertexAI, and Cohere Routes - if "/bedrock/" in route: - return True - if "/vertex-ai/" in route: - return True - if "/gemini/" in route: - return True - if "/cohere/" in route: - return True - if "/langfuse/" in route: - return True - if "/anthropic/" in route: - return True - if "/azure/" in route: - return True - if "/openai/" in route: - return True + for _llm_passthrough_route in LiteLLMRoutes.mapped_pass_through_routes.value: + if _llm_passthrough_route in route: + return True + return False @staticmethod diff --git a/tests/proxy_admin_ui_tests/test_route_check_unit_tests.py b/tests/proxy_admin_ui_tests/test_route_check_unit_tests.py index 79c9d194e5..718f707755 100644 --- a/tests/proxy_admin_ui_tests/test_route_check_unit_tests.py +++ b/tests/proxy_admin_ui_tests/test_route_check_unit_tests.py @@ -30,6 +30,9 @@ from litellm.proxy._types import LiteLLM_UserTable, LitellmUserRoles, UserAPIKey from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import ( router as llm_passthrough_router, ) +from litellm.proxy.vertex_ai_endpoints.vertex_endpoints import ( + router as vertex_router, +) # Replace the actual hash_token function with our mock import litellm.proxy.auth.route_checks @@ -93,8 +96,11 @@ def test_is_llm_api_route(): assert RouteChecks.is_llm_api_route("/key/regenerate/82akk800000000jjsk") is False assert RouteChecks.is_llm_api_route("/key/82akk800000000jjsk/delete") is False + all_llm_api_routes = vertex_router.routes + llm_passthrough_router.routes + # check all routes in llm_passthrough_router, ensure they are considered llm api routes - for route in llm_passthrough_router.routes: + for route in all_llm_api_routes: + print("route", route) route_path = str(route.path) print("route_path", route_path) assert RouteChecks.is_llm_api_route(route_path) is True