agent_tools.py•1.71 kB
from fastmcp import FastMCP
import requests
import os
from langchain_openai.chat_models.azure import AzureChatOpenAI
llm = AzureChatOpenAI(
azure_deployment=os.getenv("AZURE_DEPLOYMENT"),
openai_api_version=os.getenv("OPENAI_API_VERSION"),
temperature=0.2
)
mcp = FastMCP("AgentToolbox", instructions="Wrap research, math, meteo APIs and dispatch.")
def call_api(path, payload):
r = requests.post(f"http://localhost:8000/{path}", json=payload)
return r.json()["result"]
@mcp.tool()
def research_agent(query: str, prompt: str = None) -> str:
return call_api("research", {"query":query, "prompt":prompt})
@mcp.tool()
def math_agent(query: str, prompt: str = None) -> str:
return call_api("math", {"query":query, "prompt":prompt})
@mcp.tool()
def meteo_agent(query: str, prompt: str = None) -> str:
return call_api("meteo", {"query":query, "prompt":prompt})
@mcp.tool()
def supervisor(query: str) -> str:
content = llm.invoke([
{"role":"system","content":(
"You are a dispatcher. For incoming queries, decide whether it requires math, research, or weather. "
"Then output a JSON with fields tool, prompt, query."
)},
{"role":"user","content":query}
]).content
data = __import__("json").loads(content)
if data["tool"] == "math":
return math_agent(data["query"], prompt=data.get("prompt"))
if data["tool"] == "research":
return research_agent(data["query"], prompt=data.get("prompt"))
if data["tool"] == "meteo":
return meteo_agent(data["query"], prompt=data.get("prompt"))
return "Unable to determine appropriate tool."
if __name__=="__main__":
mcp.run(transport="stdio")