mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
remove aws_sagemaker_allow_zero_temp from the parameters passed to inference
This commit is contained in:
parent
b321f2988b
commit
97cf32630d
1 changed files with 1 additions and 0 deletions
|
@ -3134,6 +3134,7 @@ def get_optional_params(
|
|||
if max_tokens == 0:
|
||||
max_tokens = 1
|
||||
optional_params["max_new_tokens"] = max_tokens
|
||||
passed_params.pop("aws_sagemaker_allow_zero_temp", None)
|
||||
elif custom_llm_provider == "bedrock":
|
||||
supported_params = get_supported_openai_params(
|
||||
model=model, custom_llm_provider=custom_llm_provider
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue