client.pyβ’1.38 kB
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
from langchain_groq import ChatGroq
from dotenv import load_dotenv
load_dotenv()
import asyncio
async def main():
client=MultiServerMCPClient(
{
"math":{
"command":"python",
"args":["math_server.py"], ## Ensure correct absolute path
"transport":"stdio",
},
"weather": {
"url": "http://localhost:8000/mcp", # Ensure server is running here
"transport": "streamable_http",
}
}
)
tools=await client.get_tools()
model=ChatGroq(model="qwen-qwq-32b")
agent=create_react_agent(
model,tools
)
loop = True
while(loop):
messages = []
print("Type 'quit' to exit")
user_input=input("You: ")
if(user_input.lower()=="quit"):
loop=False
break
else:
message = {"role": "user", "content": user_input}
messages.append(message)
response = await agent.ainvoke(
{"messages": messages}
)
print(response['messages'][-1].content)
messages.append(response['messages'][-1])
asyncio.run(main())