add claude max_tokens_to_sample

This commit is contained in:
ishaan-jaff 2023-09-22 20:57:52 -07:00
parent cd91312252
commit d12fadd032
2 changed files with 4 additions and 5 deletions

View file

@ -59,14 +59,11 @@ def completion(
else:
prompt += f"{AnthropicConstants.HUMAN_PROMPT.value}{message['content']}"
prompt += f"{AnthropicConstants.AI_PROMPT.value}"
if "max_tokens" in optional_params and optional_params["max_tokens"] != float("inf"):
max_tokens = optional_params["max_tokens"]
else:
max_tokens = 256 # required anthropic param, default to 256 if user does not provide an input
max_tokens_to_sample = optional_params.get("max_tokens_to_sample", 256) # required anthropic param, default to 256 if user does not provide an input
data = {
"model": model,
"prompt": prompt,
"max_tokens_to_sample": max_tokens,
"max_tokens_to_sample": max_tokens_to_sample,
**optional_params,
}

View file

@ -910,6 +910,8 @@ def get_optional_params( # use the openai defaults
optional_params["temperature"] = temperature
if top_p != 1:
optional_params["top_p"] = top_p
if max_tokens != float("inf"):
optional_params["max_tokens_to_sample"] = max_tokens
return optional_params
elif model in litellm.cohere_models:
# handle cohere params