From d12fadd032303b2829f93a82499a25d4aa5883a3 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Fri, 22 Sep 2023 20:57:52 -0700 Subject: [PATCH] add claude max_tokens_to_sample --- litellm/llms/anthropic.py | 7 ++----- litellm/utils.py | 2 ++ 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/litellm/llms/anthropic.py b/litellm/llms/anthropic.py index e1634afe0..5f4659d8c 100644 --- a/litellm/llms/anthropic.py +++ b/litellm/llms/anthropic.py @@ -59,14 +59,11 @@ def completion( else: prompt += f"{AnthropicConstants.HUMAN_PROMPT.value}{message['content']}" prompt += f"{AnthropicConstants.AI_PROMPT.value}" - if "max_tokens" in optional_params and optional_params["max_tokens"] != float("inf"): - max_tokens = optional_params["max_tokens"] - else: - max_tokens = 256 # required anthropic param, default to 256 if user does not provide an input + max_tokens_to_sample = optional_params.get("max_tokens_to_sample", 256) # required anthropic param, default to 256 if user does not provide an input data = { "model": model, "prompt": prompt, - "max_tokens_to_sample": max_tokens, + "max_tokens_to_sample": max_tokens_to_sample, **optional_params, } diff --git a/litellm/utils.py b/litellm/utils.py index 12510e01a..0d2ce8c95 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -910,6 +910,8 @@ def get_optional_params( # use the openai defaults optional_params["temperature"] = temperature if top_p != 1: optional_params["top_p"] = top_p + if max_tokens != float("inf"): + optional_params["max_tokens_to_sample"] = max_tokens return optional_params elif model in litellm.cohere_models: # handle cohere params