mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
rename transform_openai_tool_call_request_to_mcp_tool_call_request
This commit is contained in:
parent
7dd54112cb
commit
815263f7bc
3 changed files with 10 additions and 8 deletions
|
@ -76,7 +76,7 @@ def _get_function_arguments(function: FunctionDefinition) -> dict:
|
|||
return arguments if isinstance(arguments, dict) else {}
|
||||
|
||||
|
||||
def _transform_openai_tool_call_to_mcp_tool_call_request(
|
||||
def transform_openai_tool_call_request_to_mcp_tool_call_request(
|
||||
openai_tool: ChatCompletionMessageToolCall,
|
||||
) -> MCPCallToolRequestParams:
|
||||
"""Convert an OpenAI ChatCompletionMessageToolCall to an MCP CallToolRequestParams."""
|
||||
|
@ -100,8 +100,10 @@ async def call_openai_tool(
|
|||
Returns:
|
||||
The result of the MCP tool call.
|
||||
"""
|
||||
mcp_tool_call_request_params = _transform_openai_tool_call_to_mcp_tool_call_request(
|
||||
openai_tool=openai_tool,
|
||||
mcp_tool_call_request_params = (
|
||||
transform_openai_tool_call_request_to_mcp_tool_call_request(
|
||||
openai_tool=openai_tool,
|
||||
)
|
||||
)
|
||||
return await call_mcp_tool(
|
||||
session=session,
|
||||
|
|
|
@ -19,11 +19,11 @@ from mcp.types import Tool as MCPTool
|
|||
|
||||
from litellm.experimental_mcp_client.tools import (
|
||||
_get_function_arguments,
|
||||
_transform_openai_tool_call_to_mcp_tool_call_request,
|
||||
call_mcp_tool,
|
||||
call_openai_tool,
|
||||
load_mcp_tools,
|
||||
transform_mcp_tool_to_openai_tool,
|
||||
transform_openai_tool_call_request_to_mcp_tool_call_request,
|
||||
)
|
||||
|
||||
|
||||
|
@ -76,11 +76,11 @@ def test_transform_mcp_tool_to_openai_tool(mock_mcp_tool):
|
|||
}
|
||||
|
||||
|
||||
def test_transform_openai_tool_call_to_mcp_tool_call_request(mock_mcp_tool):
|
||||
def testtransform_openai_tool_call_request_to_mcp_tool_call_request(mock_mcp_tool):
|
||||
openai_tool = {
|
||||
"function": {"name": "test_tool", "arguments": json.dumps({"test": "value"})}
|
||||
}
|
||||
mcp_tool_call_request = _transform_openai_tool_call_to_mcp_tool_call_request(
|
||||
mcp_tool_call_request = transform_openai_tool_call_request_to_mcp_tool_call_request(
|
||||
openai_tool
|
||||
)
|
||||
assert mcp_tool_call_request.name == "test_tool"
|
||||
|
|
|
@ -10,7 +10,7 @@ from mcp import ClientSession
|
|||
from mcp.client.sse import sse_client
|
||||
from litellm.experimental_mcp_client.tools import (
|
||||
transform_mcp_tool_to_openai_tool,
|
||||
_transform_openai_tool_call_to_mcp_tool_call_request,
|
||||
transform_openai_tool_call_request_to_mcp_tool_call_request,
|
||||
)
|
||||
import json
|
||||
|
||||
|
@ -73,7 +73,7 @@ async def test_mcp_routes_with_vertex_ai():
|
|||
|
||||
# Call the tool using MCP client
|
||||
mcp_tool_call_request = (
|
||||
_transform_openai_tool_call_to_mcp_tool_call_request(
|
||||
transform_openai_tool_call_request_to_mcp_tool_call_request(
|
||||
openai_tool.model_dump()
|
||||
)
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue