make default max tokens a controllable param

This commit is contained in:
Krrish Dholakia 2023-08-02 19:59:25 -07:00
parent 6cd1960b82
commit a964e326f1
2 changed files with 2 additions and 2 deletions

View file

@ -2,7 +2,7 @@ success_callback = []
failure_callback = []
set_verbose=False
telemetry=True
max_tokens = 256 # OpenAI Defaults
####### PROXY PARAMS ################### configurable params if you use proxy models like Helicone
api_base = None
headers = None

View file

@ -189,7 +189,7 @@ def completion(
if max_tokens != float('inf'):
max_tokens_to_sample = max_tokens
else:
max_tokens_to_sample = 300 # default in Anthropic docs https://docs.anthropic.com/claude/reference/client-libraries
max_tokens_to_sample = litellm.max_tokens # default in Anthropic docs https://docs.anthropic.com/claude/reference/client-libraries
## LOGGING
logging(model=model, input=prompt, azure=azure, additional_args={"max_tokens": max_tokens}, logger_fn=logger_fn)
## COMPLETION CALL