mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
test fix
This commit is contained in:
parent
194327bb7c
commit
3919e24256
1 changed files with 22 additions and 47 deletions
|
@ -1,60 +1,35 @@
|
|||
# Create server parameters for stdio connection
|
||||
import asyncio
|
||||
from openai import AsyncOpenAI
|
||||
from openai.types.chat import ChatCompletionUserMessageParam
|
||||
import os
|
||||
|
||||
from langchain_mcp_adapters.tools import load_mcp_tools
|
||||
from langchain_openai import ChatOpenAI
|
||||
from langgraph.prebuilt import create_react_agent
|
||||
from mcp import ClientSession
|
||||
from mcp.client.sse import sse_client
|
||||
from litellm.experimental_mcp_client.tools import (
|
||||
transform_mcp_tool_to_openai_tool,
|
||||
transform_openai_tool_call_request_to_mcp_tool_call_request,
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
# Initialize clients
|
||||
client = AsyncOpenAI(api_key="sk-1234", base_url="http://localhost:4000")
|
||||
model = ChatOpenAI(model="gpt-4o", api_key="sk-12")
|
||||
|
||||
# Connect to MCP
|
||||
async with sse_client("http://localhost:4000/mcp/") as (read, write):
|
||||
async with sse_client(url="http://localhost:4000/mcp/") as (read, write):
|
||||
async with ClientSession(read, write) as session:
|
||||
# Initialize the connection
|
||||
print("Initializing session")
|
||||
await session.initialize()
|
||||
mcp_tools = await session.list_tools()
|
||||
print("List of MCP tools for MCP server:", mcp_tools.tools)
|
||||
print("Session initialized")
|
||||
|
||||
# Create message
|
||||
messages = [
|
||||
ChatCompletionUserMessageParam(
|
||||
content="Send an email about LiteLLM supporting MCP", role="user"
|
||||
)
|
||||
]
|
||||
# Get tools
|
||||
print("Loading tools")
|
||||
tools = await load_mcp_tools(session)
|
||||
print("Tools loaded")
|
||||
print(tools)
|
||||
|
||||
# Request with tools
|
||||
response = await client.chat.completions.create(
|
||||
model="gpt-4o",
|
||||
messages=messages,
|
||||
tools=[
|
||||
transform_mcp_tool_to_openai_tool(tool) for tool in mcp_tools.tools
|
||||
],
|
||||
tool_choice="auto",
|
||||
)
|
||||
|
||||
# Handle tool call
|
||||
if response.choices[0].message.tool_calls:
|
||||
tool_call = response.choices[0].message.tool_calls[0]
|
||||
if tool_call:
|
||||
# Convert format
|
||||
mcp_call = (
|
||||
transform_openai_tool_call_request_to_mcp_tool_call_request(
|
||||
openai_tool=tool_call.model_dump()
|
||||
)
|
||||
)
|
||||
|
||||
# Execute tool
|
||||
result = await session.call_tool(
|
||||
name=mcp_call.name, arguments=mcp_call.arguments
|
||||
)
|
||||
|
||||
print("Result:", result)
|
||||
# # Create and run the agent
|
||||
# agent = create_react_agent(model, tools)
|
||||
# agent_response = await agent.ainvoke({"messages": "what's (3 + 5) x 12?"})
|
||||
|
||||
|
||||
# Run it
|
||||
asyncio.run(main())
|
||||
# Run the async function
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue