add claude max_tokens_to_sample

This commit is contained in:
ishaan-jaff 2023-09-22 20:57:52 -07:00
parent cd91312252
commit d12fadd032
2 changed files with 4 additions and 5 deletions

View file

@ -910,6 +910,8 @@ def get_optional_params( # use the openai defaults
optional_params["temperature"] = temperature
if top_p != 1:
optional_params["top_p"] = top_p
if max_tokens != float("inf"):
optional_params["max_tokens_to_sample"] = max_tokens
return optional_params
elif model in litellm.cohere_models:
# handle cohere params