From a7c31d54fa5192912b440d41f5f4ef69a6600616 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Wed, 15 Nov 2023 17:40:15 -0800 Subject: [PATCH] (fix) testing model alias map --- litellm/tests/test_model_alias_map.py | 30 ++++++++++++++++----------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/litellm/tests/test_model_alias_map.py b/litellm/tests/test_model_alias_map.py index 80f70ba41..1b743b68c 100644 --- a/litellm/tests/test_model_alias_map.py +++ b/litellm/tests/test_model_alias_map.py @@ -9,23 +9,29 @@ sys.path.insert( ) # Adds the parent directory to the system path import litellm from litellm import embedding, completion +import pytest litellm.set_verbose = True model_alias_map = { - "llama2": "replicate/llama-2-70b-chat:2796ee9483c3fd7aa2e171d38f4ca12251a30609463dcfd4cd76703f22e96cdf" + "good-model": "anyscale/meta-llama/Llama-2-7b-chat-hf" } litellm.model_alias_map = model_alias_map -try: - completion( - "llama2", - messages=[{"role": "user", "content": "Hey, how's it going?"}], - top_p=0.1, - temperature=0.01, - num_beams=4, - max_tokens=60, - ) -except Exception as e: - print(e) +def test_model_alias_map(): + try: + response = completion( + "good-model", + messages=[{"role": "user", "content": "Hey, how's it going?"}], + top_p=0.1, + temperature=0.01, + max_tokens=60, + ) + print(response.model) + assert "Llama-2-7b-chat-hf" in response.model + except Exception as e: + pytest.fail(f"Error occurred: {e}") + + +test_model_alias_map() \ No newline at end of file