diff --git a/litellm/tests/test_text_completion.py b/litellm/tests/test_text_completion.py index 2c69b6e87c..6ab2338cca 100644 --- a/litellm/tests/test_text_completion.py +++ b/litellm/tests/test_text_completion.py @@ -103,6 +103,27 @@ def test_completion_text_003_prompt_array(): # test_text_completion_with_proxy() ##### hugging face tests +def test_completion_hf_prompt_array(): + try: + litellm.set_verbose=False + print("\n testing hf mistral\n") + response = text_completion( + model="huggingface/mistralai/Mistral-7B-v0.1", + prompt=token_prompt, # token prompt is a 2d list, + max_tokens=0, + temperature=0.0, + echo=True, + ) + print("\n\n response") + + print(response) + print(response.choices) + assert(len(response.choices)==2) + # response_str = response["choices"][0]["text"] + except Exception as e: + pytest.fail(f"Error occurred: {e}") +test_completion_hf_prompt_array() + def test_completion_hf_prompt_array(): try: litellm.set_verbose=False diff --git a/pyproject.toml b/pyproject.toml index ff7d9f2faf..3ee91b8dcb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.13.7" +version = "0.13.8" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License" @@ -26,7 +26,7 @@ requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" [tool.commitizen] -version = "0.13.7" +version = "0.13.8" version_files = [ "pyproject.toml:^version" ]