From 5aae0e4bb0a34e69840d947332372bb96fbbb2bf Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Tue, 19 Nov 2024 12:39:33 -0800 Subject: [PATCH] test_provider_budgets_e2e_test_expect_to_fail --- tests/local_testing/test_provider_budgets.py | 45 ++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/tests/local_testing/test_provider_budgets.py b/tests/local_testing/test_provider_budgets.py index 40630c130..a26302e6d 100644 --- a/tests/local_testing/test_provider_budgets.py +++ b/tests/local_testing/test_provider_budgets.py @@ -82,3 +82,48 @@ async def test_provider_budgets_e2e_test(): await asyncio.sleep(0.5) assert response._hidden_params.get("custom_llm_provider") == "azure" + + +@pytest.mark.asyncio +async def test_provider_budgets_e2e_test_expect_to_fail(): + """ + Expected behavior: + - first request passes, all subsequent requests fail + + """ + provider_budget_config: ProviderBudgetConfigType = { + "anthropic": ProviderBudgetInfo(time_period="1d", budget_limit=0.000000000001), + } + + router = Router( + routing_strategy="provider-budget-routing", + routing_strategy_args=provider_budget_config, + model_list=[ + { + "model_name": "anthropic/*", # openai model name + "litellm_params": { + "model": "anthropic/*", + }, + }, + ], + ) + + response = await router.acompletion( + messages=[{"role": "user", "content": "Hello, how are you?"}], + model="anthropic/claude-3-5-sonnet-20240620", + ) + print(response) + + await asyncio.sleep(0.5) + + for _ in range(3): + with pytest.raises(Exception) as exc_info: + response = await router.acompletion( + messages=[{"role": "user", "content": "Hello, how are you?"}], + model="anthropic/claude-3-5-sonnet-20240620", + ) + print(response) + print("response.hidden_params", response._hidden_params) + + await asyncio.sleep(0.5) + # Verify the error is related to budget exceeded