mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
add claude max_tokens_to_sample
This commit is contained in:
parent
cd91312252
commit
d12fadd032
2 changed files with 4 additions and 5 deletions
|
@ -59,14 +59,11 @@ def completion(
|
|||
else:
|
||||
prompt += f"{AnthropicConstants.HUMAN_PROMPT.value}{message['content']}"
|
||||
prompt += f"{AnthropicConstants.AI_PROMPT.value}"
|
||||
if "max_tokens" in optional_params and optional_params["max_tokens"] != float("inf"):
|
||||
max_tokens = optional_params["max_tokens"]
|
||||
else:
|
||||
max_tokens = 256 # required anthropic param, default to 256 if user does not provide an input
|
||||
max_tokens_to_sample = optional_params.get("max_tokens_to_sample", 256) # required anthropic param, default to 256 if user does not provide an input
|
||||
data = {
|
||||
"model": model,
|
||||
"prompt": prompt,
|
||||
"max_tokens_to_sample": max_tokens,
|
||||
"max_tokens_to_sample": max_tokens_to_sample,
|
||||
**optional_params,
|
||||
}
|
||||
|
||||
|
|
|
@ -910,6 +910,8 @@ def get_optional_params( # use the openai defaults
|
|||
optional_params["temperature"] = temperature
|
||||
if top_p != 1:
|
||||
optional_params["top_p"] = top_p
|
||||
if max_tokens != float("inf"):
|
||||
optional_params["max_tokens_to_sample"] = max_tokens
|
||||
return optional_params
|
||||
elif model in litellm.cohere_models:
|
||||
# handle cohere params
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue