remove aws_sagemaker_allow_zero_temp from the parameters passed to inference

This commit is contained in:
Ravi N 2024-08-12 21:09:50 -04:00
parent b321f2988b
commit 97cf32630d

View file

@ -3134,6 +3134,7 @@ def get_optional_params(
if max_tokens == 0:
max_tokens = 1
optional_params["max_new_tokens"] = max_tokens
passed_params.pop("aws_sagemaker_allow_zero_temp", None)
elif custom_llm_provider == "bedrock":
supported_params = get_supported_openai_params(
model=model, custom_llm_provider=custom_llm_provider