[Bug fix]: Proxy Auth Layer - Allow Azure Realtime routes as llm_api_routes (#7684)

* fix route check azure realtime endpoints

* test_is_llm_api_route

* fix /realtime

* test_routes_on_litellm_proxy
This commit is contained in:
Ishaan Jaff 2025-01-10 20:38:06 -08:00 committed by GitHub
parent 2d1c90b688
commit 02f5c44a35
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 18 additions and 2 deletions

View file

@ -229,6 +229,11 @@ class LiteLLMRoutes(enum.Enum):
# rerank
"/rerank",
"/v1/rerank",
# realtime
"/realtime",
"/v1/realtime",
"/realtime?{model}",
"/v1/realtime?{model}",
]
mapped_pass_through_routes = [

View file

@ -4389,6 +4389,7 @@ from litellm import _arealtime
@app.websocket("/v1/realtime")
@app.websocket("/realtime")
async def websocket_endpoint(
websocket: WebSocket,
model: str,

View file

@ -62,7 +62,13 @@ def test_is_llm_api_route():
assert RouteChecks.is_llm_api_route("/anthropic/messages") is True
assert RouteChecks.is_llm_api_route("/anthropic/v1/messages") is True
assert RouteChecks.is_llm_api_route("/azure/endpoint") is True
assert (
RouteChecks.is_llm_api_route("/v1/realtime?model=gpt-4o-realtime-preview")
is True
)
assert (
RouteChecks.is_llm_api_route("/realtime?model=gpt-4o-realtime-preview") is True
)
assert (
RouteChecks.is_llm_api_route(
"/openai/deployments/vertex_ai/gemini-1.5-flash/chat/completions"

View file

@ -53,7 +53,11 @@ def test_routes_on_litellm_proxy():
print("ALL OPENAI ROUTES:", LiteLLMRoutes.openai_routes.value)
for route in LiteLLMRoutes.openai_routes.value:
assert route in _all_routes
# realtime routes - /realtime?model=gpt-4o
if "realtime" in route:
assert "/realtime" in _all_routes
else:
assert route in _all_routes
@pytest.mark.parametrize(