diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index adb6a4be0..1694f2ae3 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -129,7 +129,7 @@ def test_completion_perplexity_api(): except Exception as e: pytest.fail(f"Error occurred: {e}") -test_completion_perplexity_api() +# test_completion_perplexity_api() # commenting out as this is a flaky test on circle ci # def test_completion_nlp_cloud(): # try: @@ -1145,13 +1145,35 @@ def test_completion_deep_infra(): # litellm.set_verbose = True model_name = "deepinfra/meta-llama/Llama-2-70b-chat-hf" try: - response = completion(model=model_name, messages=messages) + response = completion( + model=model_name, + messages=messages, + temperature=0, + max_tokens=10 + ) # Add any assertions here to check the response print(response) print(response.response_ms) except Exception as e: pytest.fail(f"Error occurred: {e}") # test_completion_deep_infra() +def test_completion_deep_infra_mistral(): + print("deep infra test with temp=0") + model_name = "deepinfra/mistralai/Mistral-7B-Instruct-v0.1" + try: + response = completion( + model=model_name, + messages=messages, + temperature=0, # mistrail fails with temperature 0.001 + max_tokens=10 + ) + # Add any assertions here to check the response + print(response) + print(response.response_ms) + except Exception as e: + pytest.fail(f"Error occurred: {e}") +# test_completion_deep_infra_mistral() + # Palm tests def test_completion_palm(): # litellm.set_verbose = True