diff --git a/litellm/tests/test_router.py b/litellm/tests/test_router.py index f9d16a55b..6c5e8ee7d 100644 --- a/litellm/tests/test_router.py +++ b/litellm/tests/test_router.py @@ -942,27 +942,21 @@ def test_reading_openai_keys_os_environ(): # test_reading_openai_keys_os_environ() -# def test_router_timeout(): -# model_list = [ -# { -# "model_name": "gpt-3.5-turbo", -# "litellm_params": { -# "model": "gpt-3.5-turbo", -# "api_key": "os.environ/OPENAI_API_KEY", -# "timeout": "os.environ/AZURE_TIMEOUT", -# "stream_timeout": "os.environ/AZURE_STREAM_TIMEOUT", -# "max_retries": "os.environ/AZURE_MAX_RETRIES", -# }, -# } -# ] -# router = Router(model_list=model_list) -# messages = [{"role": "user", "content": "Hey, how's it going?"}] -# start_time = time.time() -# try: -# router.completion( -# model="gpt-3.5-turbo", messages=messages, max_tokens=500, timeout=1 -# ) -# except litellm.exceptions.Timeout as e: -# pass -# end_time = time.time() -# assert end_time - start_time < 1.1 + + +def test_router_anthropic_key_dynamic(): + anthropic_api_key = os.environ.pop("ANTHROPIC_API_KEY") + model_list = [ + { + "model_name": "anthropic-claude", + "litellm_params": { + "model": "claude-instant-1", + "api_key": anthropic_api_key, + }, + } + ] + + router = Router(model_list=model_list) + messages = [{"role": "user", "content": "Hey, how's it going?"}] + router.completion(model="anthropic-claude", messages=messages) + os.environ["ANTHROPIC_API_KEY"] = anthropic_api_key