forked from phoenix/litellm-mirror
(feat) cloudflare - add optional params
This commit is contained in:
parent
3d7b1b9468
commit
dde6bc4fb6
2 changed files with 12 additions and 0 deletions
|
@ -23,10 +23,12 @@ class CloudflareError(Exception):
|
||||||
|
|
||||||
class CloudflareConfig:
|
class CloudflareConfig:
|
||||||
max_tokens: Optional[int] = None
|
max_tokens: Optional[int] = None
|
||||||
|
stream: Optional[bool] = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
max_tokens: Optional[int] = None,
|
max_tokens: Optional[int] = None,
|
||||||
|
stream: Optional[bool] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
locals_ = locals()
|
locals_ = locals()
|
||||||
for key, value in locals_.items():
|
for key, value in locals_.items():
|
||||||
|
|
|
@ -3398,6 +3398,16 @@ def get_optional_params(
|
||||||
optional_params["n"] = n
|
optional_params["n"] = n
|
||||||
if stop is not None:
|
if stop is not None:
|
||||||
optional_params["stop_sequences"] = stop
|
optional_params["stop_sequences"] = stop
|
||||||
|
elif (
|
||||||
|
custom_llm_provider == "cloudlfare"
|
||||||
|
): # https://developers.cloudflare.com/workers-ai/models/text-generation/#input
|
||||||
|
supported_params = ["max_tokens", "stream"]
|
||||||
|
_check_valid_arg(supported_params=supported_params)
|
||||||
|
|
||||||
|
if max_tokens is not None:
|
||||||
|
optional_params["max_tokens"] = temperature
|
||||||
|
if stream is not None:
|
||||||
|
optional_params["stream"] = stream
|
||||||
elif custom_llm_provider == "ollama":
|
elif custom_llm_provider == "ollama":
|
||||||
supported_params = [
|
supported_params = [
|
||||||
"max_tokens",
|
"max_tokens",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue