Compare commits

...
Sign in to create a new pull request.

3 commits

Author SHA1 Message Date
Ishaan Jaff
337f168dc0 fix test_gemini_pass_through_endpoint 2024-11-20 22:09:35 -08:00
Ishaan Jaff
19ae080d3f test llm_passthrough_router 2024-11-20 21:24:37 -08:00
Ishaan Jaff
6e0c000ea9 fix /anthropic/ 2024-11-20 21:10:05 -08:00
5 changed files with 22 additions and 8 deletions

View file

@ -192,6 +192,10 @@ class RouteChecks:
return True
if "/langfuse/" in route:
return True
if "/anthropic/" in route:
return True
if "/azure/" in route:
return True
return False
@staticmethod

View file

@ -2,10 +2,8 @@
What is this?
Provider-specific Pass-Through Endpoints
"""
"""
1. Create pass-through endpoints for any LITELLM_BASE_URL/gemini/<endpoint> map to https://generativelanguage.googleapis.com/<endpoint>
Use litellm with Anthropic SDK, Vertex AI SDK, Cohere SDK, etc.
"""
import ast

View file

@ -203,6 +203,9 @@ from litellm.proxy.openai_files_endpoints.files_endpoints import (
router as openai_files_router,
)
from litellm.proxy.openai_files_endpoints.files_endpoints import set_files_config
from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
router as llm_passthrough_router,
)
from litellm.proxy.pass_through_endpoints.pass_through_endpoints import (
initialize_pass_through_endpoints,
)
@ -233,9 +236,6 @@ from litellm.proxy.utils import (
reset_budget,
update_spend,
)
from litellm.proxy.vertex_ai_endpoints.google_ai_studio_endpoints import (
router as gemini_router,
)
from litellm.proxy.vertex_ai_endpoints.langfuse_endpoints import (
router as langfuse_router,
)
@ -9128,7 +9128,7 @@ app.include_router(router)
app.include_router(rerank_router)
app.include_router(fine_tuning_router)
app.include_router(vertex_router)
app.include_router(gemini_router)
app.include_router(llm_passthrough_router)
app.include_router(langfuse_router)
app.include_router(pass_through_router)
app.include_router(health_router)

View file

@ -27,6 +27,9 @@ from fastapi import HTTPException, Request
import pytest
from litellm.proxy.auth.route_checks import RouteChecks
from litellm.proxy._types import LiteLLM_UserTable, LitellmUserRoles, UserAPIKeyAuth
from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
router as llm_passthrough_router,
)
# Replace the actual hash_token function with our mock
import litellm.proxy.auth.route_checks
@ -56,12 +59,21 @@ def test_is_llm_api_route():
assert RouteChecks.is_llm_api_route("/vertex-ai/text") is True
assert RouteChecks.is_llm_api_route("/gemini/generate") is True
assert RouteChecks.is_llm_api_route("/cohere/generate") is True
assert RouteChecks.is_llm_api_route("/anthropic/messages") is True
assert RouteChecks.is_llm_api_route("/anthropic/v1/messages") is True
assert RouteChecks.is_llm_api_route("/azure/endpoint") is True
# check non-matching routes
assert RouteChecks.is_llm_api_route("/some/random/route") is False
assert RouteChecks.is_llm_api_route("/key/regenerate/82akk800000000jjsk") is False
assert RouteChecks.is_llm_api_route("/key/82akk800000000jjsk/delete") is False
# check all routes in llm_passthrough_router, ensure they are considered llm api routes
for route in llm_passthrough_router.routes:
route_path = str(route.path)
print("route_path", route_path)
assert RouteChecks.is_llm_api_route(route_path) is True
# Test _route_matches_pattern
def test_route_matches_pattern():

View file

@ -1794,7 +1794,7 @@ async def test_add_callback_via_key_litellm_pre_call_utils_langsmith(
async def test_gemini_pass_through_endpoint():
from starlette.datastructures import URL
from litellm.proxy.vertex_ai_endpoints.google_ai_studio_endpoints import (
from litellm.proxy.pass_through_endpoints.llm_passthrough_endpoints import (
Request,
Response,
gemini_proxy_route,