Skip to main content
Glama

MCP Complete Implementation Guide

by saksham0712
chatgpt-proxy.js•8.21 kB
const express = require('express'); const { OpenAI } = require('openai'); const WebSocket = require('ws'); require('dotenv').config(); const app = express(); // CORS for browser requests app.use((req, res, next) => { res.header('Access-Control-Allow-Origin', '*'); res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept'); res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS'); if (req.method === 'OPTIONS') { res.sendStatus(200); } else { next(); } }); app.use(express.json()); const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, }); // MCP tools converted to OpenAI function format const tools = [ { type: 'function', function: { name: 'read_file', description: 'Read the contents of a file', parameters: { type: 'object', properties: { path: { type: 'string', description: 'The path to the file to read', }, }, required: ['path'], }, }, }, { type: 'function', function: { name: 'write_file', description: 'Write content to a file', parameters: { type: 'object', properties: { path: { type: 'string', description: 'The path to the file to write', }, content: { type: 'string', description: 'The content to write to the file', }, }, required: ['path', 'content'], }, }, }, { type: 'function', function: { name: 'list_directory', description: 'List the contents of a directory', parameters: { type: 'object', properties: { path: { type: 'string', description: 'The path to the directory to list', }, }, required: ['path'], }, }, }, { type: 'function', function: { name: 'get_system_info', description: 'Get system information', parameters: { type: 'object', properties: {}, }, }, }, { type: 'function', function: { name: 'execute_command', description: 'Execute a system command (use with caution)', parameters: { type: 'object', properties: { command: { type: 'string', description: 'The command to execute', }, cwd: { type: 'string', description: 'Working directory for the command', }, }, required: ['command'], }, }, }, { type: 'function', function: { name: 'fetch_url', description: 'Fetch content from a URL', parameters: { type: 'object', properties: { url: { type: 'string', description: 'The URL to fetch', }, method: { type: 'string', description: 'HTTP method (GET, POST, etc.)', default: 'GET', }, headers: { type: 'object', description: 'HTTP headers to include', }, }, required: ['url'], }, }, }, ]; // Chat endpoint app.post('/chat', async (req, res) => { try { console.log('šŸ“Ø Received chat request'); const { messages } = req.body; if (!messages || !Array.isArray(messages)) { return res.status(400).json({ error: 'Invalid messages format' }); } console.log('šŸ¤– Calling OpenAI API...'); const completion = await openai.chat.completions.create({ model: 'gpt-3.5-turbo', messages, tools, tool_choice: 'auto', }); const responseMessage = completion.choices[0].message; console.log('āœ… OpenAI API response received'); // Handle tool calls if (responseMessage.tool_calls) { console.log(`šŸ”§ Processing ${responseMessage.tool_calls.length} tool calls`); const toolResults = []; for (const toolCall of responseMessage.tool_calls) { const toolName = toolCall.function.name; const toolArgs = JSON.parse(toolCall.function.arguments); console.log(`šŸ› ļø Executing tool: ${toolName}`, toolArgs); // Call MCP tool const result = await callMCPTool(toolName, toolArgs); console.log(`āœ… Tool ${toolName} completed:`, result.success ? 'SUCCESS' : 'FAILED'); toolResults.push({ tool_call_id: toolCall.id, role: 'tool', content: JSON.stringify(result), }); } console.log('šŸ¤– Getting final response from OpenAI...'); // Get final response with tool results const finalCompletion = await openai.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [ ...messages, responseMessage, ...toolResults, ], }); console.log('āœ… Final response ready'); res.json(finalCompletion.choices[0].message); } else { console.log('šŸ’¬ No tool calls, returning direct response'); res.json(responseMessage); } } catch (error) { console.error('āŒ Chat error:', error.message); console.error('Stack:', error.stack); res.status(500).json({ error: error.message, details: error.stack }); } }); // Function to call MCP server tools directly async function callMCPTool(toolName, args) { const fs = require('fs').promises; const path = require('path'); const os = require('os'); const { exec } = require('child_process'); const { promisify } = require('util'); const execAsync = promisify(exec); const fetch = require('node-fetch'); try { switch (toolName) { case 'read_file': const content = await fs.readFile(args.path, 'utf-8'); return { success: true, content }; case 'write_file': await fs.writeFile(args.path, args.content, 'utf-8'); return { success: true, message: `Successfully wrote to ${args.path}` }; case 'list_directory': const items = await fs.readdir(args.path, { withFileTypes: true }); const listing = items.map(item => ({ name: item.name, type: item.isDirectory() ? 'directory' : 'file', path: path.join(args.path, item.name), })); return { success: true, listing }; case 'get_system_info': const info = { platform: os.platform(), arch: os.arch(), hostname: os.hostname(), cpus: os.cpus().length, totalMemory: Math.round(os.totalmem() / 1024 / 1024 / 1024) + ' GB', freeMemory: Math.round(os.freemem() / 1024 / 1024 / 1024) + ' GB', uptime: Math.round(os.uptime() / 3600) + ' hours', nodeVersion: process.version, currentDirectory: process.cwd(), }; return { success: true, info }; case 'execute_command': const { stdout, stderr } = await execAsync(args.command, { cwd: args.cwd || process.cwd() }); return { success: true, stdout, stderr, command: args.command }; case 'fetch_url': const response = await fetch(args.url, { method: args.method || 'GET', headers: args.headers || {}, }); const responseContent = await response.text(); return { success: true, status: response.status, statusText: response.statusText, headers: Object.fromEntries(response.headers), content: responseContent, }; default: throw new Error(`Unknown tool: ${toolName}`); } } catch (error) { return { success: false, error: error.message }; } } // Health check endpoint app.get('/health', (req, res) => { res.json({ status: 'healthy', timestamp: new Date().toISOString() }); }); const PORT = process.env.CHATGPT_PROXY_PORT || 3001; app.listen(PORT, () => { console.log(`ChatGPT Proxy Server running on port ${PORT}`); console.log(`Health check: http://localhost:${PORT}/health`); console.log(`Chat interface: file://C:\\Users\\Saksham Verma\\MCP\\chatgpt-interface.html`); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/saksham0712/MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server