mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
litellm MCP client 1
This commit is contained in:
parent
d61febc053
commit
d3279d114e
1 changed files with 12 additions and 7 deletions
|
@ -10,9 +10,10 @@ sys.path.insert(
|
|||
from mcp import ClientSession, StdioServerParameters
|
||||
from mcp.client.stdio import stdio_client
|
||||
import os
|
||||
from langchain_openai import ChatOpenAI
|
||||
|
||||
from litellm.mcp_client.tools import load_mcp_tools
|
||||
import litellm
|
||||
import pytest
|
||||
import json
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -34,10 +35,14 @@ async def test_mcp_agent():
|
|||
|
||||
# Create and run the agent
|
||||
print(os.getenv("OPENAI_API_KEY"))
|
||||
model = ChatOpenAI(model="gpt-4o", api_key=os.getenv("OPENAI_API_KEY"))
|
||||
agent = create_react_agent(model, tools)
|
||||
agent_response = await agent.ainvoke({"messages": "what's (3 + 5) x 12?"})
|
||||
llm_response = await litellm.acompletion(
|
||||
model="gpt-4o",
|
||||
api_key=os.getenv("OPENAI_API_KEY"),
|
||||
messages=[{"role": "user", "content": "what's (3 + 5) x 12?"}],
|
||||
tools=tools,
|
||||
)
|
||||
print("LLM RESPONSE: ", json.dumps(llm_response, indent=4, default=str))
|
||||
|
||||
# Add assertions to verify the response
|
||||
assert isinstance(agent_response, dict)
|
||||
print(agent_response)
|
||||
assert isinstance(llm_response, dict)
|
||||
print(llm_response)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue