mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Litellm dev 04 05 2025 p2 (#9774)
* test: move test to just checking async * fix(transformation.py): handle function call with no schema * fix(utils.py): handle pydantic base model in message tool calls Fix https://github.com/BerriAI/litellm/issues/9321 * fix(vertex_and_google_ai_studio.py): handle tools=[] Fixes https://github.com/BerriAI/litellm/issues/9080 * test: remove max token restriction * test: fix basic test * fix(get_supported_openai_params.py): fix check * fix(converse_transformation.py): support fake streaming for meta.llama3-3-70b-instruct-v1:0 * fix: fix test * fix: parse out empty dictionary on dbrx streaming + tool calls * fix(handle-'strict'-param-when-calling-fireworks-ai): fireworks ai does not support 'strict' param * fix: fix ruff check ' * fix: handle no strict in function * fix: revert bedrock change - handle in separate PR
This commit is contained in:
parent
d8f47fc9e5
commit
fcf17d114f
10 changed files with 214 additions and 11 deletions
|
@ -30,6 +30,7 @@ from litellm.types.llms.openai import (
|
|||
ChatCompletionToolParam,
|
||||
ChatCompletionToolParamFunctionChunk,
|
||||
ChatCompletionUserMessage,
|
||||
OpenAIChatCompletionToolParam,
|
||||
OpenAIMessageContentListBlock,
|
||||
)
|
||||
from litellm.types.utils import ModelResponse, PromptTokensDetailsWrapper, Usage
|
||||
|
@ -211,6 +212,23 @@ class AmazonConverseConfig(BaseConfig):
|
|||
)
|
||||
return _tool
|
||||
|
||||
def _apply_tool_call_transformation(
|
||||
self,
|
||||
tools: List[OpenAIChatCompletionToolParam],
|
||||
model: str,
|
||||
non_default_params: dict,
|
||||
optional_params: dict,
|
||||
):
|
||||
optional_params = self._add_tools_to_optional_params(
|
||||
optional_params=optional_params, tools=tools
|
||||
)
|
||||
|
||||
if (
|
||||
"meta.llama3-3-70b-instruct-v1:0" in model
|
||||
and non_default_params.get("stream", False) is True
|
||||
):
|
||||
optional_params["fake_stream"] = True
|
||||
|
||||
def map_openai_params(
|
||||
self,
|
||||
non_default_params: dict,
|
||||
|
@ -286,8 +304,11 @@ class AmazonConverseConfig(BaseConfig):
|
|||
if param == "top_p":
|
||||
optional_params["topP"] = value
|
||||
if param == "tools" and isinstance(value, list):
|
||||
optional_params = self._add_tools_to_optional_params(
|
||||
optional_params=optional_params, tools=value
|
||||
self._apply_tool_call_transformation(
|
||||
tools=cast(List[OpenAIChatCompletionToolParam], value),
|
||||
model=model,
|
||||
non_default_params=non_default_params,
|
||||
optional_params=optional_params,
|
||||
)
|
||||
if param == "tool_choice":
|
||||
_tool_choice_value = self.map_tool_choice_values(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue