fixing claude max token testing

This commit is contained in:
Krrish Dholakia 2023-09-27 21:52:30 -07:00
parent dd61a5b35e
commit 54409a2a30
5 changed files with 17 additions and 8 deletions

View file

@ -60,6 +60,8 @@ def completion(
prompt += f"{AnthropicConstants.HUMAN_PROMPT.value}{message['content']}"
prompt += f"{AnthropicConstants.AI_PROMPT.value}"
max_tokens_to_sample = optional_params.get("max_tokens_to_sample", 256) # required anthropic param, default to 256 if user does not provide an input
if max_tokens_to_sample != 256: # not default - print for testing
print_verbose(f"LiteLLM.Anthropic: Max Tokens Set")
data = {
"model": model,
"prompt": prompt,