mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
[Feat] Add max_completion_tokens
param (#5691)
* add max_completion_tokens * add max_completion_tokens * add max_completion_tokens support for OpenAI models * add max_completion_tokens param * add max_completion_tokens for bedrock converse models * add test for converse maxTokens * fix openai o1 param mapping test * move test optional params * add max_completion_tokens for anthropic api * fix conftest * add max_completion tokens for vertex ai partner models * add max_completion_tokens for fireworks ai * add max_completion_tokens for hf rest api * add test for param mapping * add param mapping for vertex, gemini + testing * predibase is the most unstable and unusable llm api in prod, can't handle our ci/cd * add max_completion_tokens to openai supported params * fix fireworks ai param mapping
This commit is contained in:
parent
767a0648e9
commit
c93364ab2e
31 changed files with 591 additions and 35 deletions
|
@ -73,6 +73,7 @@ class FireworksAIConfig:
|
|||
"stream",
|
||||
"tools",
|
||||
"tool_choice",
|
||||
"max_completion_tokens",
|
||||
"max_tokens",
|
||||
"temperature",
|
||||
"top_p",
|
||||
|
@ -102,6 +103,8 @@ class FireworksAIConfig:
|
|||
else:
|
||||
# pass through the value of tool choice
|
||||
optional_params["tool_choice"] = value
|
||||
elif param == "max_completion_tokens":
|
||||
optional_params["max_tokens"] = value
|
||||
elif param in supported_openai_params:
|
||||
if value is not None:
|
||||
optional_params[param] = value
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue