forked from phoenix/litellm-mirror
(fix) bedrock meta llama optional params
This commit is contained in:
parent
23d560071b
commit
9e072f87bd
1 changed files with 3 additions and 3 deletions
|
@ -2048,11 +2048,11 @@ def get_optional_params( # use the openai defaults
|
|||
supported_params = ["max_tokens", "temperature", "top_p", "stream"]
|
||||
_check_valid_arg(supported_params=supported_params)
|
||||
# see https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=titan-large
|
||||
if max_tokens:
|
||||
if max_tokens is not None:
|
||||
optional_params["max_gen_len"] = max_tokens
|
||||
if temperature:
|
||||
if temperature is not None:
|
||||
optional_params["temperature"] = temperature
|
||||
if top_p:
|
||||
if top_p is not None:
|
||||
optional_params["top_p"] = top_p
|
||||
if stream:
|
||||
optional_params["stream"] = stream
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue