test: update tests

This commit is contained in:
Krrish Dholakia 2025-03-22 12:56:42 -07:00
parent fe004bc45c
commit 6b2f385ddf
3 changed files with 4 additions and 11 deletions

View file

@ -339,9 +339,6 @@ def test_pass_through_routes_support_all_methods():
from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import ( from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
router as llm_router, router as llm_router,
) )
from litellm.proxy.vertex_ai_endpoints.vertex_endpoints import (
router as vertex_router,
)
# Expected HTTP methods # Expected HTTP methods
expected_methods = {"GET", "POST", "PUT", "DELETE", "PATCH"} expected_methods = {"GET", "POST", "PUT", "DELETE", "PATCH"}
@ -361,7 +358,6 @@ def test_pass_through_routes_support_all_methods():
# Check both routers # Check both routers
check_router_methods(llm_router) check_router_methods(llm_router)
check_router_methods(vertex_router)
def test_is_bedrock_agent_runtime_route(): def test_is_bedrock_agent_runtime_route():

View file

@ -30,9 +30,6 @@ from litellm.proxy._types import LiteLLM_UserTable, LitellmUserRoles, UserAPIKey
from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import ( from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
router as llm_passthrough_router, router as llm_passthrough_router,
) )
from litellm.proxy.vertex_ai_endpoints.vertex_endpoints import (
router as vertex_router,
)
# Replace the actual hash_token function with our mock # Replace the actual hash_token function with our mock
import litellm.proxy.auth.route_checks import litellm.proxy.auth.route_checks
@ -96,7 +93,7 @@ def test_is_llm_api_route():
assert RouteChecks.is_llm_api_route("/key/regenerate/82akk800000000jjsk") is False assert RouteChecks.is_llm_api_route("/key/regenerate/82akk800000000jjsk") is False
assert RouteChecks.is_llm_api_route("/key/82akk800000000jjsk/delete") is False assert RouteChecks.is_llm_api_route("/key/82akk800000000jjsk/delete") is False
all_llm_api_routes = vertex_router.routes + llm_passthrough_router.routes all_llm_api_routes = llm_passthrough_router.routes
# check all routes in llm_passthrough_router, ensure they are considered llm api routes # check all routes in llm_passthrough_router, ensure they are considered llm api routes
for route in all_llm_api_routes: for route in all_llm_api_routes:

View file

@ -36,11 +36,11 @@ def test_initialize_deployment_for_pass_through_success():
) )
# Verify the credentials were properly set # Verify the credentials were properly set
from litellm.proxy.vertex_ai_endpoints.vertex_endpoints import ( from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
vertex_pass_through_router, passthrough_endpoint_router,
) )
vertex_creds = vertex_pass_through_router.get_vertex_credentials( vertex_creds = passthrough_endpoint_router.get_vertex_credentials(
project_id="test-project", location="us-central1" project_id="test-project", location="us-central1"
) )
assert vertex_creds.vertex_project == "test-project" assert vertex_creds.vertex_project == "test-project"