From 535a547b669703c88dd6bb8d9957f20daa690b0e Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Sat, 30 Dec 2023 12:00:14 +0530 Subject: [PATCH] (fix) use cloudflare optional params --- litellm/utils.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/litellm/utils.py b/litellm/utils.py index afd544c6b..1dc2b8470 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3419,14 +3419,13 @@ def get_optional_params( optional_params["n"] = n if stop is not None: optional_params["stop_sequences"] = stop - elif ( - custom_llm_provider == "cloudlfare" - ): # https://developers.cloudflare.com/workers-ai/models/text-generation/#input + elif custom_llm_provider == "cloudflare": + # https://developers.cloudflare.com/workers-ai/models/text-generation/#input supported_params = ["max_tokens", "stream"] _check_valid_arg(supported_params=supported_params) if max_tokens is not None: - optional_params["max_tokens"] = temperature + optional_params["max_tokens"] = max_tokens if stream is not None: optional_params["stream"] = stream elif custom_llm_provider == "ollama":