From dde6bc4fb6d2a7b8711a5de277a5ef5c99a5c2de Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Fri, 29 Dec 2023 11:47:58 +0530 Subject: [PATCH] (feat) cloudflare - add optional params --- litellm/llms/cloudflare.py | 2 ++ litellm/utils.py | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/litellm/llms/cloudflare.py b/litellm/llms/cloudflare.py index 9f1c390e6..a9e60bb7e 100644 --- a/litellm/llms/cloudflare.py +++ b/litellm/llms/cloudflare.py @@ -23,10 +23,12 @@ class CloudflareError(Exception): class CloudflareConfig: max_tokens: Optional[int] = None + stream: Optional[bool] = None def __init__( self, max_tokens: Optional[int] = None, + stream: Optional[bool] = None, ) -> None: locals_ = locals() for key, value in locals_.items(): diff --git a/litellm/utils.py b/litellm/utils.py index 4a983f9be..a1f074931 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3398,6 +3398,16 @@ def get_optional_params( optional_params["n"] = n if stop is not None: optional_params["stop_sequences"] = stop + elif ( + custom_llm_provider == "cloudlfare" + ): # https://developers.cloudflare.com/workers-ai/models/text-generation/#input + supported_params = ["max_tokens", "stream"] + _check_valid_arg(supported_params=supported_params) + + if max_tokens is not None: + optional_params["max_tokens"] = temperature + if stream is not None: + optional_params["stream"] = stream elif custom_llm_provider == "ollama": supported_params = [ "max_tokens",