diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index d4e524aeb0..6459cd2d8a 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ diff --git a/litellm/tests/test_bad_params.py b/litellm/tests/test_bad_params.py index 9d3d65d411..b563b02af0 100644 --- a/litellm/tests/test_bad_params.py +++ b/litellm/tests/test_bad_params.py @@ -47,13 +47,14 @@ def test_completion_return_full_text_hf(): def test_completion_invalid_param_cohere(): try: response = completion(model="command-nightly", messages=messages, top_p=1) + print(f"response: {response}") except Exception as e: if "Unsupported parameters passed: top_p" in str(e): pass else: pytest.fail(f'An error occurred {e}') -# test_completion_invalid_param_cohere() +test_completion_invalid_param_cohere() def test_completion_function_call_cohere(): try: diff --git a/litellm/utils.py b/litellm/utils.py index cd48ab7180..b7995863dd 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -1003,7 +1003,7 @@ def get_optional_params( # use the openai defaults optional_params["max_tokens_to_sample"] = max_tokens elif custom_llm_provider == "cohere": ## check if unsupported param passed in - supported_params = ["stream", "temperature", "max_tokens", "logit_bias"] + supported_params = ["stream", "temperature", "max_tokens", "logit_bias", "top_p"] _check_valid_arg(supported_params=supported_params) # handle cohere params if stream: @@ -1014,6 +1014,8 @@ def get_optional_params( # use the openai defaults optional_params["max_tokens"] = max_tokens if logit_bias != {}: optional_params["logit_bias"] = logit_bias + if top_p: + optional_params["p"] = top_p elif custom_llm_provider == "replicate": ## check if unsupported param passed in supported_params = ["stream", "temperature", "max_tokens", "top_p", "stop", "seed"] diff --git a/pyproject.toml b/pyproject.toml index 8fc37b24a8..eedbb81a75 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.814" +version = "0.1.815" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"