mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
test fix
This commit is contained in:
parent
194327bb7c
commit
3919e24256
1 changed files with 22 additions and 47 deletions
|
@ -1,60 +1,35 @@
|
||||||
|
# Create server parameters for stdio connection
|
||||||
import asyncio
|
import asyncio
|
||||||
from openai import AsyncOpenAI
|
import os
|
||||||
from openai.types.chat import ChatCompletionUserMessageParam
|
|
||||||
|
from langchain_mcp_adapters.tools import load_mcp_tools
|
||||||
|
from langchain_openai import ChatOpenAI
|
||||||
|
from langgraph.prebuilt import create_react_agent
|
||||||
from mcp import ClientSession
|
from mcp import ClientSession
|
||||||
from mcp.client.sse import sse_client
|
from mcp.client.sse import sse_client
|
||||||
from litellm.experimental_mcp_client.tools import (
|
|
||||||
transform_mcp_tool_to_openai_tool,
|
|
||||||
transform_openai_tool_call_request_to_mcp_tool_call_request,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
# Initialize clients
|
model = ChatOpenAI(model="gpt-4o", api_key="sk-12")
|
||||||
client = AsyncOpenAI(api_key="sk-1234", base_url="http://localhost:4000")
|
|
||||||
|
|
||||||
# Connect to MCP
|
async with sse_client(url="http://localhost:4000/mcp/") as (read, write):
|
||||||
async with sse_client("http://localhost:4000/mcp/") as (read, write):
|
|
||||||
async with ClientSession(read, write) as session:
|
async with ClientSession(read, write) as session:
|
||||||
|
# Initialize the connection
|
||||||
|
print("Initializing session")
|
||||||
await session.initialize()
|
await session.initialize()
|
||||||
mcp_tools = await session.list_tools()
|
print("Session initialized")
|
||||||
print("List of MCP tools for MCP server:", mcp_tools.tools)
|
|
||||||
|
|
||||||
# Create message
|
# Get tools
|
||||||
messages = [
|
print("Loading tools")
|
||||||
ChatCompletionUserMessageParam(
|
tools = await load_mcp_tools(session)
|
||||||
content="Send an email about LiteLLM supporting MCP", role="user"
|
print("Tools loaded")
|
||||||
)
|
print(tools)
|
||||||
]
|
|
||||||
|
|
||||||
# Request with tools
|
# # Create and run the agent
|
||||||
response = await client.chat.completions.create(
|
# agent = create_react_agent(model, tools)
|
||||||
model="gpt-4o",
|
# agent_response = await agent.ainvoke({"messages": "what's (3 + 5) x 12?"})
|
||||||
messages=messages,
|
|
||||||
tools=[
|
|
||||||
transform_mcp_tool_to_openai_tool(tool) for tool in mcp_tools.tools
|
|
||||||
],
|
|
||||||
tool_choice="auto",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle tool call
|
|
||||||
if response.choices[0].message.tool_calls:
|
|
||||||
tool_call = response.choices[0].message.tool_calls[0]
|
|
||||||
if tool_call:
|
|
||||||
# Convert format
|
|
||||||
mcp_call = (
|
|
||||||
transform_openai_tool_call_request_to_mcp_tool_call_request(
|
|
||||||
openai_tool=tool_call.model_dump()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Execute tool
|
|
||||||
result = await session.call_tool(
|
|
||||||
name=mcp_call.name, arguments=mcp_call.arguments
|
|
||||||
)
|
|
||||||
|
|
||||||
print("Result:", result)
|
|
||||||
|
|
||||||
|
|
||||||
# Run it
|
# Run the async function
|
||||||
asyncio.run(main())
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue