From 02e97acefa33aff85560e738921dd9d92ec1e16e Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Sat, 14 Oct 2023 16:47:23 -0700 Subject: [PATCH] (test) deep infra testing for mistral --- litellm/tests/test_completion.py | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index adb6a4be0..1694f2ae3 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -129,7 +129,7 @@ def test_completion_perplexity_api(): except Exception as e: pytest.fail(f"Error occurred: {e}") -test_completion_perplexity_api() +# test_completion_perplexity_api() # commenting out as this is a flaky test on circle ci # def test_completion_nlp_cloud(): # try: @@ -1145,13 +1145,35 @@ def test_completion_deep_infra(): # litellm.set_verbose = True model_name = "deepinfra/meta-llama/Llama-2-70b-chat-hf" try: - response = completion(model=model_name, messages=messages) + response = completion( + model=model_name, + messages=messages, + temperature=0, + max_tokens=10 + ) # Add any assertions here to check the response print(response) print(response.response_ms) except Exception as e: pytest.fail(f"Error occurred: {e}") # test_completion_deep_infra() +def test_completion_deep_infra_mistral(): + print("deep infra test with temp=0") + model_name = "deepinfra/mistralai/Mistral-7B-Instruct-v0.1" + try: + response = completion( + model=model_name, + messages=messages, + temperature=0, # mistrail fails with temperature 0.001 + max_tokens=10 + ) + # Add any assertions here to check the response + print(response) + print(response.response_ms) + except Exception as e: + pytest.fail(f"Error occurred: {e}") +# test_completion_deep_infra_mistral() + # Palm tests def test_completion_palm(): # litellm.set_verbose = True