diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index 6281df1f8..9a5063aa0 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index 9cef80ae0..5a3174ddd 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index c01b605d9..b091dc94b 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -726,7 +726,7 @@ def test_completion_replicate_vicuna(): def test_completion_together_ai(): model_name = "together_ai/togethercomputer/llama-2-70b-chat" try: - response = completion(model=model_name, messages=messages, max_tokens=256, logger_fn=logger_fn) + response = completion(model=model_name, messages=messages, max_tokens=256, n=1, logger_fn=logger_fn) # Add any assertions here to check the response print(response) cost = completion_cost(completion_response=response) diff --git a/litellm/utils.py b/litellm/utils.py index 122659829..97bf3d2a5 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -980,7 +980,13 @@ def get_optional_params( # use the openai defaults print_verbose(f"checking params for {model}") print_verbose(f"params passed in {passed_params}") print_verbose(f"non-default params passed in {non_default_params}") - unsupported_params = [k for k in non_default_params.keys() if k not in supported_params] + unsupported_params = [] + for k in non_default_params.keys(): + if k not in supported_params: + if k == "n" and n == 1: # langchain sends n=1 as a default value + pass + else: + unsupported_params.append(k) if unsupported_params: raise ValueError("LiteLLM.Exception: Unsupported parameters passed: {}".format(', '.join(unsupported_params))) diff --git a/pyproject.toml b/pyproject.toml index 1bc0aa326..ee9f5cdec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.816" +version = "0.1.817" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"