Skip to main content
Glama

Ops MCP Server

by johnohhh1
server.js4.01 kB
import { Server } from "@modelcontextprotocol/sdk/server/index.js"; import { WebSocketServerTransport } from "@modelcontextprotocol/sdk/server/websocket/server.js"; import WebSocket, { WebSocketServer } from "ws"; import dotenv from "dotenv"; import fetch from "node-fetch"; dotenv.config(); const PORT = process.env.PORT || 3334; const VERCEL_BASE = "https://my-ops-runner.vercel.app"; const TOKEN = process.env.AI_SECRET_TOKEN; // Create WebSocket server const wss = new WebSocketServer({ port: PORT }); console.log(`🚀 MCP Server running on ws://localhost:${PORT}`); console.log(` Ready for Agent Builder connection`); wss.on("connection", (socket) => { console.log("✅ Client connected"); // Create MCP server instance const server = new Server( { name: "ops-gmail-runner", version: "1.0.0" }, { capabilities: { tools: {} } } ); // Register tools server.setRequestHandler("tools/list", async () => ({ tools: [ { name: "fetch_ops_report", description: "Fetch HotSchedules 12h call-offs and Brinker/Chili's 24h deadlines report", inputSchema: { type: "object", properties: {} } }, { name: "mark_task_done", description: "Mark a task as complete by ID", inputSchema: { type: "object", properties: { id: { type: "string", description: "Task ID" } }, required: ["id"] } } ] })); // Handle tool calls server.setRequestHandler("tools/call", async (request) => { const { name, arguments: args } = request.params; try { if (name === "fetch_ops_report") { const response = await fetch(`${VERCEL_BASE}/api/ops/fetch`, { method: "POST", headers: { "Authorization": `Bearer ${TOKEN}`, "Content-Type": "application/json" } }); if (!response.ok) { throw new Error(`API returned ${response.status}`); } const data = await response.json(); // Format the report let report = "# Talked to Gmail\n\n"; if (data.counts?.hs911 > 0) { report += "## 🚨 911 - HotSchedules Alert\n"; report += "Call-offs or coverage issues detected\n\n"; } else { report += "## ✅ No 911 Issues\n"; report += "No call-offs in last 12 hours\n\n"; } if (data.counts?.deliverables > 0) { report += "## 📅 Deadlines & Deliverables\n"; report += `${data.counts.deliverables} items requiring attention\n\n`; } report += "## Full Report\n"; report += data.report || "No additional details"; return { content: [{ type: "text", text: report }] }; } if (name === "mark_task_done") { if (!args?.id) { throw new Error("Task ID required"); } const response = await fetch(`${VERCEL_BASE}/api/ops/mark`, { method: "POST", headers: { "Authorization": `Bearer ${TOKEN}`, "Content-Type": "application/json" }, body: JSON.stringify({ id: args.id }) }); const data = await response.json(); return { content: [{ type: "text", text: `✅ Task ${args.id} marked complete` }] }; } throw new Error(`Unknown tool: ${name}`); } catch (error) { return { content: [{ type: "text", text: `Error: ${error.message}` }], isError: true }; } }); // Set up transport const transport = new WebSocketServerTransport(socket, server); server.connect(transport); socket.on("close", () => { console.log("❌ Client disconnected"); }); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/johnohhh1/ops-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server