(feat) cloudflare - add optional params

This commit is contained in:
ishaan-jaff 2023-12-29 11:47:58 +05:30
parent 3d7b1b9468
commit dde6bc4fb6
2 changed files with 12 additions and 0 deletions

View file

@ -23,10 +23,12 @@ class CloudflareError(Exception):
class CloudflareConfig: class CloudflareConfig:
max_tokens: Optional[int] = None max_tokens: Optional[int] = None
stream: Optional[bool] = None
def __init__( def __init__(
self, self,
max_tokens: Optional[int] = None, max_tokens: Optional[int] = None,
stream: Optional[bool] = None,
) -> None: ) -> None:
locals_ = locals() locals_ = locals()
for key, value in locals_.items(): for key, value in locals_.items():

View file

@ -3398,6 +3398,16 @@ def get_optional_params(
optional_params["n"] = n optional_params["n"] = n
if stop is not None: if stop is not None:
optional_params["stop_sequences"] = stop optional_params["stop_sequences"] = stop
elif (
custom_llm_provider == "cloudlfare"
): # https://developers.cloudflare.com/workers-ai/models/text-generation/#input
supported_params = ["max_tokens", "stream"]
_check_valid_arg(supported_params=supported_params)
if max_tokens is not None:
optional_params["max_tokens"] = temperature
if stream is not None:
optional_params["stream"] = stream
elif custom_llm_provider == "ollama": elif custom_llm_provider == "ollama":
supported_params = [ supported_params = [
"max_tokens", "max_tokens",