[Feat] Add max_completion_tokens param (#5691)

* add max_completion_tokens

* add max_completion_tokens

* add max_completion_tokens support for OpenAI models

* add max_completion_tokens param

* add max_completion_tokens for bedrock converse models

* add test for converse maxTokens

* fix openai o1 param mapping test

* move test optional params

* add max_completion_tokens for anthropic api

* fix conftest

* add max_completion tokens for vertex ai partner models

* add max_completion_tokens for fireworks ai

* add max_completion_tokens for hf rest api

* add test for param mapping

* add param mapping for vertex, gemini + testing

* predibase is the most unstable and unusable llm api in prod, can't handle our ci/cd

* add max_completion_tokens to openai supported params

* fix fireworks ai param mapping
This commit is contained in:
Ishaan Jaff 2024-09-14 14:57:01 -07:00 committed by GitHub
parent 415a3ede9e
commit 85acdb9193
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 591 additions and 35 deletions

View file

@ -5,7 +5,7 @@ Common utilities used across bedrock chat/embedding/image generation
import os
import types
from enum import Enum
from typing import List, Optional, Union, Tuple
from typing import List, Optional, Tuple, Union
import httpx
@ -158,6 +158,7 @@ class AmazonAnthropicClaude3Config:
def get_supported_openai_params(self):
return [
"max_tokens",
"max_completion_tokens",
"tools",
"tool_choice",
"stream",
@ -169,7 +170,7 @@ class AmazonAnthropicClaude3Config:
def map_openai_params(self, non_default_params: dict, optional_params: dict):
for param, value in non_default_params.items():
if param == "max_tokens":
if param == "max_tokens" or param == "max_completion_tokens":
optional_params["max_tokens"] = value
if param == "tools":
optional_params["tools"] = value
@ -240,11 +241,18 @@ class AmazonAnthropicConfig:
def get_supported_openai_params(
self,
):
return ["max_tokens", "temperature", "stop", "top_p", "stream"]
return [
"max_tokens",
"max_completion_tokens",
"temperature",
"stop",
"top_p",
"stream",
]
def map_openai_params(self, non_default_params: dict, optional_params: dict):
for param, value in non_default_params.items():
if param == "max_tokens":
if param == "max_tokens" or param == "max_completion_tokens":
optional_params["max_tokens_to_sample"] = value
if param == "temperature":
optional_params["temperature"] = value