added support for bedrock llama models

This commit is contained in:
David Christian 2023-11-13 15:41:21 -08:00
parent d4de55b053
commit fe7e3ff038
4 changed files with 68 additions and 0 deletions

View file

@ -1875,6 +1875,18 @@ def get_optional_params( # use the openai defaults
optional_params["topP"] = top_p
if stream:
optional_params["stream"] = stream
elif "meta" in model: # amazon / meta llms
supported_params = ["max_tokens", "temperature", "top_p", "stream"]
_check_valid_arg(supported_params=supported_params)
# see https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=titan-large
if max_tokens:
optional_params["max_gen_len"] = max_tokens
if temperature:
optional_params["temperature"] = temperature
if top_p:
optional_params["top_p"] = top_p
if stream:
optional_params["stream"] = stream
elif "cohere" in model: # cohere models on bedrock
supported_params = ["stream", "temperature", "max_tokens", "logit_bias", "top_p", "frequency_penalty", "presence_penalty", "stop"]
_check_valid_arg(supported_params=supported_params)