Skip to main content
Glama

Multi-Agent Tools Platform

agent_tools.py1.71 kB
from fastmcp import FastMCP import requests import os from langchain_openai.chat_models.azure import AzureChatOpenAI llm = AzureChatOpenAI( azure_deployment=os.getenv("AZURE_DEPLOYMENT"), openai_api_version=os.getenv("OPENAI_API_VERSION"), temperature=0.2 ) mcp = FastMCP("AgentToolbox", instructions="Wrap research, math, meteo APIs and dispatch.") def call_api(path, payload): r = requests.post(f"http://localhost:8000/{path}", json=payload) return r.json()["result"] @mcp.tool() def research_agent(query: str, prompt: str = None) -> str: return call_api("research", {"query":query, "prompt":prompt}) @mcp.tool() def math_agent(query: str, prompt: str = None) -> str: return call_api("math", {"query":query, "prompt":prompt}) @mcp.tool() def meteo_agent(query: str, prompt: str = None) -> str: return call_api("meteo", {"query":query, "prompt":prompt}) @mcp.tool() def supervisor(query: str) -> str: content = llm.invoke([ {"role":"system","content":( "You are a dispatcher. For incoming queries, decide whether it requires math, research, or weather. " "Then output a JSON with fields tool, prompt, query." )}, {"role":"user","content":query} ]).content data = __import__("json").loads(content) if data["tool"] == "math": return math_agent(data["query"], prompt=data.get("prompt")) if data["tool"] == "research": return research_agent(data["query"], prompt=data.get("prompt")) if data["tool"] == "meteo": return meteo_agent(data["query"], prompt=data.get("prompt")) return "Unable to determine appropriate tool." if __name__=="__main__": mcp.run(transport="stdio")

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/kingrishabdugar/MCP_Demo'

If you have feedback or need assistance with the MCP directory API, please join our Discord server