langgraph_client.pyβ’1.85 kB
import asyncio
import os
from pathlib import Path
from dotenv import load_dotenv
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from langchain_mcp_adapters.tools import load_mcp_tools
from langgraph.prebuilt import create_react_agent
from langchain_openai import ChatOpenAI
load_dotenv()
async def main() -> None:
if not os.getenv("OPENAI_API_KEY"):
print("β Error: OPENAI_API_KEY not found in environment variables")
print("Please add OPENAI_API_KEY to your .env file")
return
server_path = str(Path(__file__).parent / "server.py")
server_params = StdioServerParameters(
command="uv",
args=["run", server_path],
)
print("π Connecting to MCP server...")
async with stdio_client(server_params) as (read, write):
async with ClientSession(read, write) as session:
await session.initialize()
print("π οΈ Loading tools from MCP server...")
tools = await load_mcp_tools(session)
print(f"β
Loaded {len(tools)} tools: {[tool.name for tool in tools]}\n")
model = ChatOpenAI(model="gpt-4o")
agent = create_react_agent(model, tools)
queries = [
"Roll 3d6 dice for me",
"What's an interesting fact about the number 42?",
"Tell me a fun fact about cats",
]
for i, query in enumerate(queries, 1):
print("=" * 60)
print(f"Query {i}: {query}")
print("=" * 60)
response = await agent.ainvoke({"messages": [("user", query)]})
final_message = response["messages"][-1]
print(f"\nπ€ Agent: {final_message.content}\n")
if __name__ == "__main__":
asyncio.run(main())