From ae7f0ae0b6344dfb842feb938bf19a2a8b01c26a Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 28 Nov 2023 15:43:42 -0800 Subject: [PATCH] (feat) proxy: add logs on router performance --- litellm/proxy/proxy_server.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index a9104ae0e3..ed036070a6 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -739,6 +739,18 @@ async def chat_completion(request: Request, model: Optional[str] = None, user_ap return response except Exception as e: print(f"\033[1;31mAn error occurred: {e}\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`") + if llm_router is not None and data["model"] in router_model_names: + print("Results from router") + print("\nRouter stats") + print("\nTotal Calls made") + for key, value in llm_router.total_calls.items(): + print(f"{key}: {value}") + print("\nSuccess Calls made") + for key, value in llm_router.success_calls.items(): + print(f"{key}: {value}") + print("\nFail Calls made") + for key, value in llm_router.fail_calls.items(): + print(f"{key}: {value}") if user_debug: traceback.print_exc() error_traceback = traceback.format_exc()