[Bug Fix] - /vertex_ai/ was not detected as llm_api_route on pass through but vertex-ai was (#8186)

* fix mapped_pass_through_routes

* fix route checks

* update test_is_llm_api_route
This commit is contained in:
Ishaan Jaff 2025-02-01 17:26:08 -08:00 committed by GitHub
parent 4e9c2d5b21
commit c0f3100934
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 16 additions and 18 deletions

View file

@ -240,8 +240,13 @@ class LiteLLMRoutes(enum.Enum):
mapped_pass_through_routes = [
"/bedrock",
"/vertex-ai",
"/vertex_ai",
"/cohere",
"/gemini",
"/anthropic",
"/langfuse",
"/azure",
"/openai",
]
anthropic_routes = [

View file

@ -180,23 +180,10 @@ class RouteChecks:
if RouteChecks._is_azure_openai_route(route=route):
return True
# Pass through Bedrock, VertexAI, and Cohere Routes
if "/bedrock/" in route:
return True
if "/vertex-ai/" in route:
return True
if "/gemini/" in route:
return True
if "/cohere/" in route:
return True
if "/langfuse/" in route:
return True
if "/anthropic/" in route:
return True
if "/azure/" in route:
return True
if "/openai/" in route:
for _llm_passthrough_route in LiteLLMRoutes.mapped_pass_through_routes.value:
if _llm_passthrough_route in route:
return True
return False
@staticmethod

View file

@ -30,6 +30,9 @@ from litellm.proxy._types import LiteLLM_UserTable, LitellmUserRoles, UserAPIKey
from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
router as llm_passthrough_router,
)
from litellm.proxy.vertex_ai_endpoints.vertex_endpoints import (
router as vertex_router,
)
# Replace the actual hash_token function with our mock
import litellm.proxy.auth.route_checks
@ -93,8 +96,11 @@ def test_is_llm_api_route():
assert RouteChecks.is_llm_api_route("/key/regenerate/82akk800000000jjsk") is False
assert RouteChecks.is_llm_api_route("/key/82akk800000000jjsk/delete") is False
all_llm_api_routes = vertex_router.routes + llm_passthrough_router.routes
# check all routes in llm_passthrough_router, ensure they are considered llm api routes
for route in llm_passthrough_router.routes:
for route in all_llm_api_routes:
print("route", route)
route_path = str(route.path)
print("route_path", route_path)
assert RouteChecks.is_llm_api_route(route_path) is True