Skip to main content
Glama

OpenAI MCP Server

by bhjo0930
server.ts4.48 kB
import { Server } from '@modelcontextprotocol/sdk/server/index.js'; import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; import { CallToolRequestSchema, ErrorCode, ListToolsRequestSchema, McpError } from '@modelcontextprotocol/sdk/types.js'; import { z } from 'zod'; import { config, validateConfig } from './config.js'; import { OpenAIClient } from './openai-client.js'; import { PromptOptimizer } from './prompt-optimizer.js'; import { BaseTool, CallGPT5Tool, ListModelsTool, TokenAnalysisTool, ContextOptimizerTool, BatchProcessorTool } from './tools/index.js'; export class OpenAIStdioMCPServer { private server: Server; private openaiClient: OpenAIClient; private promptOptimizer: PromptOptimizer; private tools: Map<string, BaseTool> = new Map(); constructor() { this.server = new Server( { name: config.server.name, version: config.server.version, }, { capabilities: { tools: {}, }, } ); this.openaiClient = new OpenAIClient(); this.promptOptimizer = new PromptOptimizer(); this.initializeTools(); this.setupToolHandlers(); this.setupErrorHandling(); } private initializeTools(): void { // Initialize all tools const callGpt5Tool = new CallGPT5Tool(this.openaiClient, this.promptOptimizer); const listModelsTools = new ListModelsTool(this.openaiClient); const tokenAnalysisTool = new TokenAnalysisTool(); const contextOptimizerTool = new ContextOptimizerTool(); const batchProcessorTool = new BatchProcessorTool(this.openaiClient, this.promptOptimizer); // Register tools this.tools.set(callGpt5Tool.definition.name, callGpt5Tool); this.tools.set(listModelsTools.definition.name, listModelsTools); this.tools.set(tokenAnalysisTool.definition.name, tokenAnalysisTool); this.tools.set(contextOptimizerTool.definition.name, contextOptimizerTool); this.tools.set(batchProcessorTool.definition.name, batchProcessorTool); } private setupToolHandlers(): void { this.server.setRequestHandler(ListToolsRequestSchema, async () => { const tools = Array.from(this.tools.values()).map(tool => ({ name: tool.definition.name, description: tool.definition.description, inputSchema: tool.definition.inputSchema, })); return { tools }; }); this.server.setRequestHandler(CallToolRequestSchema, async (request) => { const tool = this.tools.get(request.params.name); if (!tool) { throw new McpError( ErrorCode.MethodNotFound, `Unknown tool: ${request.params.name}` ); } try { const args = tool.definition.inputSchema.parse(request.params.arguments); const response = await tool.execute(args); return { content: response.content, isError: false }; } catch (error) { if (error instanceof z.ZodError) { throw new McpError( ErrorCode.InvalidParams, `Invalid arguments: ${error.errors.map(e => e.message).join(', ')}` ); } const errorMessage = error instanceof Error ? error.message : 'Unknown error'; throw new McpError(ErrorCode.InternalError, errorMessage); } }); } private setupErrorHandling(): void { this.server.onerror = (error) => { console.error('[MCP Error]', error); }; process.on('SIGINT', async () => { console.error('Shutting down server...'); await this.server.close(); process.exit(0); }); } async start(): Promise<void> { validateConfig(); if (config.server.debug) { console.error('Testing OpenAI connection...'); const connectionOk = await this.openaiClient.testConnection(); if (!connectionOk) { throw new Error('Failed to connect to OpenAI API'); } console.error('OpenAI connection successful'); } const transport = new StdioServerTransport(); await this.server.connect(transport); console.error(`OpenAI MCP Server started (${config.server.name} v${config.server.version})`); console.error(`Using model: ${config.openai.model}`); } } if (require.main === module) { const server = new OpenAIStdioMCPServer(); server.start().catch((error) => { console.error('Failed to start server:', error); process.exit(1); }); }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/bhjo0930/openai_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server