diff --git a/dist/litellm-0.1.781-py3-none-any.whl b/dist/litellm-0.1.781-py3-none-any.whl new file mode 100644 index 0000000000..fca74e797d Binary files /dev/null and b/dist/litellm-0.1.781-py3-none-any.whl differ diff --git a/dist/litellm-0.1.781.tar.gz b/dist/litellm-0.1.781.tar.gz new file mode 100644 index 0000000000..08354c9ba9 Binary files /dev/null and b/dist/litellm-0.1.781.tar.gz differ diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index c3ec8b87e6..9773545ee7 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index ec29fd6b83..24d6ebe36c 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ diff --git a/litellm/main.py b/litellm/main.py index 2ce471902a..20e554c768 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -274,7 +274,7 @@ def completion( top_k=top_k, task=task, remove_input=remove_input, - return_full_text=return_full_text + return_full_text=return_full_text, ) # For logging - save the values of the litellm-specific params passed in litellm_params = get_litellm_params( diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index 424d8d7e43..79273a7a30 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -200,6 +200,22 @@ def hf_test_completion(): # hf_test_completion() +def hf_test_completion_task_none(): + try: + litellm.set_verbose=True + user_message = "My name is Merve and my favorite" + messages = [{ "content": user_message,"role": "user"}] + model = "huggingface/cerebras/btlm-3b-8k-base" + response = completion( + model=model, + messages=messages, + task=None, + ) + # Add any assertions here to check the response + print(response) + except Exception as e: + pytest.fail(f"Error occurred: {e}") +hf_test_completion_task_none() # this should throw an exception, to trigger https://logs.litellm.ai/ # def hf_test_error_logs(): diff --git a/litellm/utils.py b/litellm/utils.py index 045b8931d1..2a1cbe0c2c 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -907,7 +907,7 @@ def get_optional_params( # use the openai defaults custom_llm_provider="", top_k=40, return_full_text=False, - task=None + task=None, ): optional_params = {} if model in litellm.anthropic_models: diff --git a/pyproject.toml b/pyproject.toml index c01da3a957..804521f051 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.780" +version = "0.1.782" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"