From 9024a47dc2f2a3d5f46a85994ee2b9bef44c8c29 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Wed, 29 Nov 2023 08:08:38 -0800 Subject: [PATCH] fix(utils.py): bedrock/cohere optional params --- litellm/utils.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/litellm/utils.py b/litellm/utils.py index 7d60480eac..95fa27439e 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -2222,18 +2222,6 @@ def get_optional_params( # use the openai defaults optional_params["temperature"] = temperature if max_tokens is not None: optional_params["max_tokens"] = max_tokens - if n is not None: - optional_params["num_generations"] = n - if logit_bias is not None: - optional_params["logit_bias"] = logit_bias - if top_p is not None: - optional_params["p"] = top_p - if frequency_penalty is not None: - optional_params["frequency_penalty"] = frequency_penalty - if presence_penalty is not None: - optional_params["presence_penalty"] = presence_penalty - if stop is not None: - optional_params["stop_sequences"] = stop elif custom_llm_provider == "aleph_alpha": supported_params = ["max_tokens", "stream", "top_p", "temperature", "presence_penalty", "frequency_penalty", "n", "stop"] _check_valid_arg(supported_params=supported_params)