Skip to main content
Glama

Learn_MCP Math Server

client.py1.03 kB
from langchain_mcp_adapters.client import MultiServerMCPClient from langgraph.prebuilt import create_react_agent from langchain_groq import ChatGroq from dotenv import load_dotenv load_dotenv() import asyncio async def main(): client = MultiServerMCPClient( { "math":{ "command": "python", "args": ["mathserver.py"], "transport": "stdio" } } ) import os os.environ["GROQ_API_KEY"] = os.getenv("GROQ_API_KEY") tools = await client.get_tools() model = ChatGroq(model="qwen-qwq-32b") agent = create_react_agent( model, tools ) while True: user_question = input("Enter a ReqRes question (or 'q' to quit): ") if user_question.lower() == 'q': break reqres_response = await agent.ainvoke( {"messages": [{"role": "user", "content": user_question}]}, ) print("ReqRes response:", reqres_response['messages'][-1].content) asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/naveenkumarr1812/MCPServer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server