mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
add claude max_tokens_to_sample
This commit is contained in:
parent
cd91312252
commit
d12fadd032
2 changed files with 4 additions and 5 deletions
|
@ -59,14 +59,11 @@ def completion(
|
||||||
else:
|
else:
|
||||||
prompt += f"{AnthropicConstants.HUMAN_PROMPT.value}{message['content']}"
|
prompt += f"{AnthropicConstants.HUMAN_PROMPT.value}{message['content']}"
|
||||||
prompt += f"{AnthropicConstants.AI_PROMPT.value}"
|
prompt += f"{AnthropicConstants.AI_PROMPT.value}"
|
||||||
if "max_tokens" in optional_params and optional_params["max_tokens"] != float("inf"):
|
max_tokens_to_sample = optional_params.get("max_tokens_to_sample", 256) # required anthropic param, default to 256 if user does not provide an input
|
||||||
max_tokens = optional_params["max_tokens"]
|
|
||||||
else:
|
|
||||||
max_tokens = 256 # required anthropic param, default to 256 if user does not provide an input
|
|
||||||
data = {
|
data = {
|
||||||
"model": model,
|
"model": model,
|
||||||
"prompt": prompt,
|
"prompt": prompt,
|
||||||
"max_tokens_to_sample": max_tokens,
|
"max_tokens_to_sample": max_tokens_to_sample,
|
||||||
**optional_params,
|
**optional_params,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -910,6 +910,8 @@ def get_optional_params( # use the openai defaults
|
||||||
optional_params["temperature"] = temperature
|
optional_params["temperature"] = temperature
|
||||||
if top_p != 1:
|
if top_p != 1:
|
||||||
optional_params["top_p"] = top_p
|
optional_params["top_p"] = top_p
|
||||||
|
if max_tokens != float("inf"):
|
||||||
|
optional_params["max_tokens_to_sample"] = max_tokens
|
||||||
return optional_params
|
return optional_params
|
||||||
elif model in litellm.cohere_models:
|
elif model in litellm.cohere_models:
|
||||||
# handle cohere params
|
# handle cohere params
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue