From 97cf32630d77b02473c38aea035fc3c013fa1889 Mon Sep 17 00:00:00 2001 From: Ravi N Date: Mon, 12 Aug 2024 21:09:50 -0400 Subject: [PATCH] remove aws_sagemaker_allow_zero_temp from the parameters passed to inference --- litellm/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/litellm/utils.py b/litellm/utils.py index 46bedb274..29ad99545 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3134,6 +3134,7 @@ def get_optional_params( if max_tokens == 0: max_tokens = 1 optional_params["max_new_tokens"] = max_tokens + passed_params.pop("aws_sagemaker_allow_zero_temp", None) elif custom_llm_provider == "bedrock": supported_params = get_supported_openai_params( model=model, custom_llm_provider=custom_llm_provider