From b2c35bbd5048e091c5b1fccd12d2414c4e3e765e Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 21 Nov 2023 12:38:09 -0800 Subject: [PATCH] (test) load test proxy --- cookbook/litellm_router/error_log.txt | 1004 ++++++++++++++++++ cookbook/litellm_router/load_test_queuing.py | 18 +- cookbook/litellm_router/request_log.txt | 0 cookbook/litellm_router/response_log.txt | 0 4 files changed, 1019 insertions(+), 3 deletions(-) create mode 100644 cookbook/litellm_router/error_log.txt create mode 100644 cookbook/litellm_router/request_log.txt create mode 100644 cookbook/litellm_router/response_log.txt diff --git a/cookbook/litellm_router/error_log.txt b/cookbook/litellm_router/error_log.txt new file mode 100644 index 0000000000..6853ef4659 --- /dev/null +++ b/cookbook/litellm_router/error_log.txt @@ -0,0 +1,1004 @@ +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: Expecting value: line 1 column 1 (char 0) + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: Expecting value: line 1 column 1 (char 0) + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: Given this context, what is litellm? LiteLLM about: About +Call all LLM APIs using the OpenAI format. +Exception: 'Response' object has no attribute 'get' + +Question: What endpoints does the litellm proxy have 💥 OpenAI Proxy Server +LiteLLM Server manages: + +Calling 10 +Exception: 'Response' object has no attribute 'get' + +Question: Does litellm support ooobagooba llms? how can i call oobagooba llms. Call all LLM APIs using the Ope +Exception: 'Response' object has no attribute 'get' + diff --git a/cookbook/litellm_router/load_test_queuing.py b/cookbook/litellm_router/load_test_queuing.py index b00a95f5fe..9da78cfdbb 100644 --- a/cookbook/litellm_router/load_test_queuing.py +++ b/cookbook/litellm_router/load_test_queuing.py @@ -82,14 +82,26 @@ def make_openai_completion(question): ], } response = requests.post("http://0.0.0.0:8000/queue/request", json=data) - print(response) + response = response.json() end_time = time.time() - # Log the request details with open("request_log.txt", "a") as log_file: log_file.write( - f"Question: {question[:100]}\nResponse ID:{response.id} Content:{response.choices[0].message.content[:10]}\nTime: {end_time - start_time:.2f} seconds\n\n" + f"Question: {question[:100]}\nResponse ID: {response.get('id', 'N/A')} Url: {response.get('url', 'N/A')}\nTime: {end_time - start_time:.2f} seconds\n\n" ) + + # polling the url + url = response["url"] + polling_url = f"http://0.0.0.0:8000{url}" + print(f"POLLING JOB{polling_url}") + response = requests.get(polling_url) + response = response.json() + status = response["status"] + print(f"POLLING JOB{polling_url}\nSTATUS: {status}, \n Response {response}") + # if status == "finished": + # print() + + return response except Exception as e: diff --git a/cookbook/litellm_router/request_log.txt b/cookbook/litellm_router/request_log.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/cookbook/litellm_router/response_log.txt b/cookbook/litellm_router/response_log.txt new file mode 100644 index 0000000000..e69de29bb2