Litellm dev 04 05 2025 p2 (#9774)

* test: move test to just checking async

* fix(transformation.py): handle function call with no schema

* fix(utils.py): handle pydantic base model in message tool calls

Fix https://github.com/BerriAI/litellm/issues/9321

* fix(vertex_and_google_ai_studio.py): handle tools=[]

Fixes https://github.com/BerriAI/litellm/issues/9080

* test: remove max token restriction

* test: fix basic test

* fix(get_supported_openai_params.py): fix check

* fix(converse_transformation.py): support fake streaming for meta.llama3-3-70b-instruct-v1:0

* fix: fix test

* fix: parse out empty dictionary on dbrx streaming + tool calls

* fix(handle-'strict'-param-when-calling-fireworks-ai): fireworks ai does not support 'strict' param

* fix: fix ruff check

'

* fix: handle no strict in function

* fix: revert bedrock change - handle in separate PR
This commit is contained in:
Krish Dholakia 2025-04-07 21:02:52 -07:00 committed by GitHub
parent d8f47fc9e5
commit fcf17d114f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 214 additions and 11 deletions

View file

@ -2,7 +2,11 @@ from typing import List, Literal, Optional, Tuple, Union, cast
import litellm
from litellm.secret_managers.main import get_secret_str
from litellm.types.llms.openai import AllMessageValues, ChatCompletionImageObject
from litellm.types.llms.openai import (
AllMessageValues,
ChatCompletionImageObject,
OpenAIChatCompletionToolParam,
)
from litellm.types.utils import ProviderSpecificModelInfo
from ...openai.chat.gpt_transformation import OpenAIGPTConfig
@ -150,6 +154,14 @@ class FireworksAIConfig(OpenAIGPTConfig):
] = f"{content['image_url']['url']}#transform=inline"
return content
def _transform_tools(
self, tools: List[OpenAIChatCompletionToolParam]
) -> List[OpenAIChatCompletionToolParam]:
for tool in tools:
if tool.get("type") == "function":
tool["function"].pop("strict", None)
return tools
def _transform_messages_helper(
self, messages: List[AllMessageValues], model: str, litellm_params: dict
) -> List[AllMessageValues]:
@ -196,6 +208,9 @@ class FireworksAIConfig(OpenAIGPTConfig):
messages = self._transform_messages_helper(
messages=messages, model=model, litellm_params=litellm_params
)
if "tools" in optional_params and optional_params["tools"] is not None:
tools = self._transform_tools(tools=optional_params["tools"])
optional_params["tools"] = tools
return super().transform_request(
model=model,
messages=messages,