diff --git a/litellm/utils.py b/litellm/utils.py index afd544c6b..1dc2b8470 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3419,14 +3419,13 @@ def get_optional_params( optional_params["n"] = n if stop is not None: optional_params["stop_sequences"] = stop - elif ( - custom_llm_provider == "cloudlfare" - ): # https://developers.cloudflare.com/workers-ai/models/text-generation/#input + elif custom_llm_provider == "cloudflare": + # https://developers.cloudflare.com/workers-ai/models/text-generation/#input supported_params = ["max_tokens", "stream"] _check_valid_arg(supported_params=supported_params) if max_tokens is not None: - optional_params["max_tokens"] = temperature + optional_params["max_tokens"] = max_tokens if stream is not None: optional_params["stream"] = stream elif custom_llm_provider == "ollama":