From cbf0fa44b4a8093688a0c3972e82f29f48cdbe01 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Fri, 14 Mar 2025 21:05:51 -0700 Subject: [PATCH] undo changes to route llm request --- litellm/proxy/route_llm_request.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/litellm/proxy/route_llm_request.py b/litellm/proxy/route_llm_request.py index d5c2e2c087..ac9332b219 100644 --- a/litellm/proxy/route_llm_request.py +++ b/litellm/proxy/route_llm_request.py @@ -94,7 +94,9 @@ async def route_request( ) elif data["model"] not in router_model_names: - if ( + if llm_router.router_general_settings.pass_through_all_models: + return getattr(litellm, f"{route_type}")(**data) + elif ( llm_router.default_deployment is not None or len(llm_router.pattern_router.patterns) > 0 ): @@ -102,8 +104,6 @@ async def route_request( elif route_type == "amoderation": # moderation endpoint does not require `model` parameter return getattr(llm_router, f"{route_type}")(**data) - else: - return getattr(litellm, f"{route_type}")(**data) elif user_model is not None: return getattr(litellm, f"{route_type}")(**data)