Skip to main content
Glama
server.ts4.73 kB
import { WebSocketServer, WebSocket } from 'ws'; import { jsonrpc } from 'jsonrpc-lite'; import { CONFIG } from './config.js'; import { handleMCPRequest } from './handlers.js'; import { MCPRequestSchema } from './validation.js'; // @ts-ignore - JS module in TS project import { WebDashboardServer } from './web-server.js'; const server = new WebSocketServer({ port: CONFIG.PORT, host: '0.0.0.0', // Accept external connections for AI coordination path: '/mcp' // Add the MCP path - FIXED }); console.log(`MCP WebSocket server starting on ws://0.0.0.0:${CONFIG.PORT}/mcp`); // Connection tracking for rate limiting const connections = new Map<WebSocket, { lastMessage: number; messageCount: number }>(); server.on('connection', (ws: WebSocket) => { console.log('New MCP client connected'); // Initialize connection tracking connections.set(ws, { lastMessage: Date.now(), messageCount: 0 }); // Don't send welcome message - wait for client to initialize // Set up ping/pong keepalive const pingInterval = setInterval(() => { if (ws.readyState === WebSocket.OPEN) { ws.ping(); } }, 30000); ws.on('pong', () => { console.log('Received pong from client'); }); ws.on('message', async (data: Buffer) => { try { const rawMessage = data.toString(); console.log('Received message:', rawMessage); // Basic rate limiting const connInfo = connections.get(ws); if (connInfo) { const now = Date.now(); if (now - connInfo.lastMessage < 100) { // 100ms minimum between messages connInfo.messageCount++; if (connInfo.messageCount > 10) { ws.close(1008, 'Rate limit exceeded'); return; } } else { connInfo.messageCount = 0; } connInfo.lastMessage = now; } // Handle multiple JSON-RPC messages in one frame - FIXED const messages = rawMessage.trim().split('\n').filter(line => line.trim()); for (const messageStr of messages) { try { const parsedMessage = JSON.parse(messageStr); // Validate MCP request structure const validatedRequest = MCPRequestSchema.parse(parsedMessage); // Handle the request const response = await handleMCPRequest(validatedRequest); // Only send response if not null (notifications return null) if (response !== null) { console.log('Sending response:', JSON.stringify(response)); ws.send(JSON.stringify(response)); } } catch (messageError: any) { console.error('Error handling individual message:', messageError); let parsedMessage: any = null; try { parsedMessage = JSON.parse(messageStr); } catch (e) { // JSON parse failed for this message } if (messageError.name === 'SyntaxError') { // JSON parse error const errorResponse = jsonrpc.error(null, { code: -32700, message: 'Parse error' }); ws.send(JSON.stringify(errorResponse)); } else { // Other errors (validation, execution) const errorResponse = jsonrpc.error( parsedMessage?.id || null, { code: -32603, message: messageError.message || 'Internal error' } ); ws.send(JSON.stringify(errorResponse)); } } } } catch (error: any) { console.error('Error handling message frame:', error); // Send generic error for frame-level issues const errorResponse = jsonrpc.error(null, { code: -32603, message: 'Internal server error' }); ws.send(JSON.stringify(errorResponse)); } }); ws.on('close', () => { console.log('Client disconnected'); connections.delete(ws); clearInterval(pingInterval); }); ws.on('error', (error) => { console.error('WebSocket error:', error); connections.delete(ws); clearInterval(pingInterval); }); }); server.on('error', (error) => { console.error('Server error:', error); }); console.log(`MCP server ready at ws://0.0.0.0:${CONFIG.PORT}/mcp`); console.log('Configured for:'); console.log(`- Ollama: ${CONFIG.OLLAMA_BASE_URL}`); console.log(`- LM Studio: ${CONFIG.LMSTUDIO_BASE_URL}`); console.log(`- Allowed executables: ${CONFIG.ALLOWED_EXECUTABLES.join(', ')}`); // Start the web dashboard server const webServer = new WebDashboardServer(); webServer.start(5000); // Start on port 5000 for the dashboard

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/bermingham85/mcp-puppet-pipeline'

If you have feedback or need assistance with the MCP directory API, please join our Discord server