mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
support optional params for bedrock amazon
This commit is contained in:
parent
e5fff9bada
commit
29e3b4fdd2
3 changed files with 19 additions and 9 deletions
|
@ -931,6 +931,21 @@ def get_optional_params( # use the openai defaults
|
|||
optional_params["temperature"] = temperature
|
||||
if top_p != 1:
|
||||
optional_params["top_p"] = top_p
|
||||
elif custom_llm_provider == "bedrock":
|
||||
if "ai21" in model or "anthropic" in model:
|
||||
pass
|
||||
|
||||
elif "amazon" in model: # amazon titan llms
|
||||
# see https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=titan-large
|
||||
if max_tokens != float("inf"):
|
||||
optional_params["maxTokenCount"] = max_tokens
|
||||
if temperature != 1:
|
||||
optional_params["temperature"] = temperature
|
||||
if stop != None:
|
||||
optional_params["stopSequences"] = stop
|
||||
if top_p != 1:
|
||||
optional_params["topP"] = top_p
|
||||
|
||||
elif model in litellm.aleph_alpha_models:
|
||||
if max_tokens != float("inf"):
|
||||
optional_params["maximum_tokens"] = max_tokens
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue