From 64adae6e7fd57a89e7c4693d833c705e169ac579 Mon Sep 17 00:00:00 2001 From: David Manouchehri Date: Thu, 25 Jul 2024 21:06:58 +0000 Subject: [PATCH] Check for converse support first. --- litellm/utils.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/litellm/utils.py b/litellm/utils.py index e104de958..a597643a6 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3121,7 +3121,19 @@ def get_optional_params( supported_params = get_supported_openai_params( model=model, custom_llm_provider=custom_llm_provider ) - if "ai21" in model: + if model in litellm.BEDROCK_CONVERSE_MODELS: + _check_valid_arg(supported_params=supported_params) + optional_params = litellm.AmazonConverseConfig().map_openai_params( + model=model, + non_default_params=non_default_params, + optional_params=optional_params, + drop_params=( + drop_params + if drop_params is not None and isinstance(drop_params, bool) + else False + ), + ) + elif "ai21" in model: _check_valid_arg(supported_params=supported_params) # params "maxTokens":200,"temperature":0,"topP":250,"stop_sequences":[], # https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=j2-ultra @@ -3143,17 +3155,6 @@ def get_optional_params( optional_params=optional_params, ) ) - elif model in litellm.BEDROCK_CONVERSE_MODELS: - optional_params = litellm.AmazonConverseConfig().map_openai_params( - model=model, - non_default_params=non_default_params, - optional_params=optional_params, - drop_params=( - drop_params - if drop_params is not None and isinstance(drop_params, bool) - else False - ), - ) else: optional_params = litellm.AmazonAnthropicConfig().map_openai_params( non_default_params=non_default_params,