Skip to main content
Glama
egw-chat-cli.js•17.4 kB
#!/usr/bin/env node const { spawn } = require('child_process'); const readline = require('readline'); require('dotenv').config(); class EGWChatCLI { constructor() { this.mcpProcess = null; this.isConnected = false; this.connectionRetries = 0; this.maxRetries = 3; this.llmApiKey = process.env.LLM_API_KEY || process.env.LLM_API_KEY; this.llmBaseUrl = process.env.LLM_BASE_URL || process.env.LLM_BASE_URL || 'https://api.openai.com/v1'; this.llmModel = process.env.LLM_MODEL || process.env.LLM_MODEL || 'gpt-3.5-turbo'; if (!this.llmApiKey) { this.promptForApiKey(); return; } this.setupMCPConnection(); this.setupChatInterface(); } promptForApiKey() { const readline = require('readline'); console.log('\nšŸ” DeepSeek API Key Required'); console.log('ā”Œ'.repeat(50)); console.log('šŸ’” To use EGW Chat CLI, you need a DeepSeek API key'); console.log('šŸ“ Get your free API key at: https://platform.deepseek.com/'); console.log('šŸ”‘ Your API key is used only for this session and not stored'); console.log('ā””'.repeat(50)); const rl = readline.createInterface({ input: process.stdin, output: process.stdout, prompt: 'šŸ”‘ Enter your DeepSeek API key: ' }); rl.question('', (apiKey) => { if (!apiKey || apiKey.trim() === '') { console.error('āŒ No API key provided. Exiting...'); process.exit(1); } this.llmApiKey = apiKey.trim(); rl.close(); console.log('āœ… API key accepted. Starting chat interface...\n'); this.setupMCPConnection(); this.setupChatInterface(); }); } setupMCPConnection() { console.log('šŸ”— Connecting to EGW MCP Server...'); // Connect to our deployed MCP server // Try different approaches for npx let npxCommand = process.platform === 'win32' ? 'npx.cmd' : 'npx'; // Try to find npx in PATH const { execSync } = require('child_process'); try { // Check if npx is available execSync('where npx', { stdio: 'ignore' }); } catch (error) { console.log('āš ļø npx not found. Please install Node.js with npm or ensure npx is in your PATH'); console.log('šŸ’” You can also try: npm install -g @smithery/cli'); process.exit(1); } this.mcpProcess = spawn(npxCommand, [ '-y', '@smithery/cli@latest', 'run', '@pythondev-pro/egw_writings_mcp_server', '--key', '9784f4a0-3ad9-4759-a87f-92e7f1823417' ], { stdio: ['pipe', 'pipe', 'pipe'], shell: true, env: { ...process.env, NODE_OPTIONS: '--max-old-space-size=4096' // Increase memory limit } }); this.mcpProcess.on('error', (error) => { console.error('āŒ Failed to connect to MCP server:', error.message); if (this.connectionRetries < this.maxRetries) { console.log(`šŸ”„ Retrying connection... (${this.connectionRetries + 1}/${this.maxRetries})`); this.connectionRetries++; setTimeout(() => this.setupMCPConnection(), 3000); } else { console.error('āŒ Max connection attempts reached. Please check your internet connection and try again.'); process.exit(1); } }); this.mcpProcess.on('close', (code) => { if (code !== 0) { console.error('āŒ MCP server disconnected'); console.log('šŸ”„ You can try again or type "exit" to quit'); this.isConnected = false; } }); // Add stderr handling for better debugging this.mcpProcess.stderr.on('data', (data) => { const stderr = data.toString(); if (stderr.includes('timeout') || stderr.includes('TimeoutError')) { console.error('ā° Connection timeout detected, retrying...'); this.isConnected = false; } else if (!stderr.includes('Debug:')) { console.error('šŸ” MCP Debug:', stderr); } }); // Initialize MCP connection with longer delay and retry logic this.initializeMCPWithRetry(); } initializeMCPWithRetry() { const initialize = () => { if (!this.mcpProcess || this.mcpProcess.killed) { return; } // Send initialize request this.sendMCPRequest({ jsonrpc: '2.0', id: 1, method: 'initialize', params: { protocolVersion: '2024-11-05', capabilities: {}, clientInfo: { name: 'egw-chat-cli', version: '1.0.0' } } }); // Wait a bit, then get available tools setTimeout(() => { if (!this.mcpProcess || this.mcpProcess.killed) { return; } this.sendMCPRequest({ jsonrpc: '2.0', id: 2, method: 'tools/list' }); }, 2000); }; // Initial connection attempt setTimeout(initialize, 2000); // Set up response handler if (this.mcpProcess) { let mcpBuffer = ''; this.mcpProcess.stdout.on('data', (data) => { mcpBuffer += data.toString(); const lines = mcpBuffer.split('\n'); mcpBuffer = lines.pop() || ''; lines.forEach(line => { if (line.trim()) { try { const response = JSON.parse(line); this.handleMCPResponse(response); } catch (error) { // Ignore parsing errors for now if (!line.includes('Debug:')) { console.error('šŸ” Parse error:', error.message); } } } }); }); } // Set timeout for connection setTimeout(() => { if (!this.isConnected && this.mcpProcess && !this.mcpProcess.killed) { console.log('āš ļø Connection taking longer than expected. Retrying...'); if (this.connectionRetries < this.maxRetries) { this.connectionRetries++; this.mcpProcess.kill(); setTimeout(() => this.setupMCPConnection(), 2000); } else { console.error('āŒ Unable to establish MCP connection. Please try again later.'); } } }, 15000); // 15 second timeout } sendMCPRequest(request) { if (this.mcpProcess && this.mcpProcess.stdin && !this.mcpProcess.stdin.destroyed) { try { this.mcpProcess.stdin.write(JSON.stringify(request) + '\n'); } catch (error) { console.error('āŒ Failed to send MCP request:', error.message); this.isConnected = false; } } else { console.error('āŒ MCP process not available for writing'); this.isConnected = false; } } setupChatInterface() { const rl = readline.createInterface({ input: process.stdin, output: process.stdout, prompt: 'šŸ“š EGW Chat > ' }); console.log('\nšŸŽ‰ Welcome to EGW Writings Chat CLI!'); console.log('šŸ’¬ Ask questions about Ellen G. White writings, search for content, or get book information'); console.log('🚪 Type "exit", "quit", or press Ctrl+C to leave\n'); rl.prompt(); rl.on('line', async (input) => { if (input.toLowerCase() === 'exit' || input.toLowerCase() === 'quit') { console.log('šŸ‘‹ Goodbye!'); if (this.mcpProcess) { this.mcpProcess.kill(); } rl.close(); process.exit(0); } if (input.trim()) { await this.processUserMessage(input.trim()); } rl.prompt(); }); rl.on('close', () => { if (this.mcpProcess) { this.mcpProcess.kill(); } process.exit(0); }); } async handleMCPResponse(response) { // Store tools information when received if (response.id === 2 && response.result && response.result.tools) { this.availableTools = response.result.tools; this.isConnected = true; console.log(`āœ… Connected! Available tools: ${this.availableTools.map(t => t.name).join(', ')}\n`); } } async processUserMessage(userMessage) { try { console.log('šŸ¤” Processing your query...'); // Check if MCP is connected if (!this.isConnected) { console.log('āš ļø MCP server not connected. Please wait a moment and try again.'); // Try to reconnect if disconnected if (this.connectionRetries < this.maxRetries) { console.log('šŸ”„ Attempting to reconnect...'); this.connectionRetries++; setTimeout(() => this.setupMCPConnection(), 2000); } return; } // First, let LLM analyze what MCP tool to use const toolChoice = await this.chooseToolWithLLM(userMessage); if (toolChoice.tool) { console.log(`šŸ” Using ${toolChoice.tool} tool to search EGW writings...`); // Execute chosen MCP tool const result = await this.executeMCPTool(toolChoice.tool, toolChoice.params); // Let LLM format response based on tool results const formattedResponse = await this.formatResponseWithLLM(userMessage, result, toolChoice.tool); console.log('\nšŸ“– Answer:'); console.log('─'.repeat(50)); console.log(formattedResponse); console.log('─'.repeat(50) + '\n'); } else { // Direct chat response without tools const response = await this.callLLM(userMessage); console.log('\nšŸ’¬ Response:'); console.log('─'.repeat(50)); console.log(response); console.log('─'.repeat(50) + '\n'); } } catch (error) { console.error('āŒ Error processing your request:', error.message); this.isConnected = false; // Mark as disconnected on error // Try to reconnect on error if (this.connectionRetries < this.maxRetries) { console.log('šŸ”„ Attempting to reconnect...'); this.connectionRetries++; setTimeout(() => this.setupMCPConnection(), 3000); } } } async chooseToolWithLLM(userMessage) { const systemPrompt = `You are an assistant that helps users interact with EGW (Ellen G. White) writings database. Available MCP tools: 1. search_local - Search EGW writings database for specific content (basic search) 2. get_local_book - Get information about a specific book by ID 3. get_local_content - Get content from a specific book (with pagination) 4. list_local_books - List all available books 5. get_database_stats - Get database statistics 6. find_egw_quotes - Find specific EGW quotes containing a search term with proper filtering for genuine EGW content (BEST for finding quotes) Analyze the user's message and determine if a tool should be used and which one. IMPORTANT: For quote searches or finding specific EGW writings, prefer "find_egw_quotes" over "search_local" as it provides better filtering and formatting. SMART SEARCH PRIORITY: When user asks for quotes on a topic, prioritize the most likely search term: - Single words: "love" -> search for "love" first, then "loving", then "charity" - Single words: "faith" -> search for "faith" first, then "faithful", then "belief" - Single words: "prayer" -> search for "prayer" first, then "praying", then "pray" - Single words: "hope" -> search for "hope" first, then "hopeful", then "trust" - Multi-word phrases: "sunday law" -> search for exact phrase "sunday law" first - Multi-word phrases: Use exact phrase matching for compound terms For multi-word queries, use the exact phrase as the primary search term. If no results are found for the primary term, the MCP tool will automatically try variations. Respond with ONLY a JSON object in this format: { "tool": "tool_name or null", "params": {"param1": "value1", "param2": "value2"} or {} } Examples: - "find quotes about prayer" -> {"tool": "find_egw_quotes", "params": {"query": "prayer", "numQuotes": 3}} - "quotes on faith" -> {"tool": "find_egw_quotes", "params": {"query": "faith", "numQuotes": 3}} - "what did EGW say about faith" -> {"tool": "find_egw_quotes", "params": {"query": "faith", "numQuotes": 3}} - "tell me about book 5" -> {"tool": "get_local_book", "params": {"bookId": 5}} - "list all books" -> {"tool": "list_local_books", "params": {"limit": 20}} - "how many books do you have" -> {"tool": "get_database_stats", "params": {}} - "hello" -> {"tool": null, "params": {}} - general questions about EGW -> {"tool": null, "params": {}}`; try { const response = await this.callLLMAPI(systemPrompt, userMessage); return JSON.parse(response); } catch (error) { console.log('āš ļø Could not determine tool, using direct chat'); return { tool: null, params: {} }; } } async executeMCPTool(toolName, params) { return new Promise((resolve, reject) => { const requestId = Date.now(); const request = { jsonrpc: '2.0', id: requestId, method: 'tools/call', params: { name: toolName, arguments: params } }; let responseHandler; const timeout = setTimeout(() => { if (responseHandler) { this.mcpProcess.stdout.off('data', responseHandler); } reject(new Error('Tool execution timeout (30 seconds)')); }, 30000); // Reduced timeout to 30 seconds let responseBuffer = ''; responseHandler = (data) => { responseBuffer += data.toString(); const responses = responseBuffer.split('\n').filter(line => line.trim()); for (const line of responses) { try { const response = JSON.parse(line); if (response.id === requestId) { clearTimeout(timeout); this.mcpProcess.stdout.off('data', responseHandler); if (response.error) { reject(new Error(response.error.message)); } else { console.log('šŸ” MCP Response received successfully'); resolve(response.result); } break; } } catch (error) { // Continue parsing, don't log every parse error } } }; this.mcpProcess.stdout.on('data', responseHandler); this.sendMCPRequest(request); }); } async formatResponseWithLLM(userMessage, toolResult, toolUsed) { // CRITICAL: For find_egw_quotes, ALWAYS display raw formatted quotes without ANY LLM processing if (toolUsed === 'find_egw_quotes') { // Handle different response structures from MCP let actualResult = toolResult; // If result is wrapped in content array (MCP response format) if (toolResult.content && Array.isArray(toolResult.content) && toolResult.content[0] && toolResult.content[0].text) { try { actualResult = JSON.parse(toolResult.content[0].text); } catch (parseError) { console.log('DEBUG: Could not parse nested content, using original'); } } if (actualResult.success && actualResult.formatted_output) { // Return the exact formatted output from the database tool - NO SUMMARIZATION, NO COMMENTS return actualResult.formatted_output; } else if (actualResult.success === false) { // Return the error message directly without LLM processing return `āŒ ${actualResult.message || 'No quotes found'}`; } else { // Fallback for unexpected result format return JSON.stringify(actualResult, null, 2); } } // For all other tools, use LLM for formatting const systemPrompt = `You are a helpful assistant that provides information about Ellen G. White's writings. The user asked: "${userMessage}" Tool used: ${toolUsed} Tool result: ${JSON.stringify(toolResult)} Please provide a helpful, friendly response based on the tool results. If search results were returned, summarize the findings and provide context. If book information was requested, present it clearly. If statistics were requested, explain what they mean. Be conversational and helpful. Focus on spiritual and practical insights from EGW's writings.`; return await this.callLLMAPI(systemPrompt, ''); } async callLLMAPI(systemPrompt, userMessage) { // Use OpenAI SDK for Z.AI compatibility const OpenAI = require('openai'); const client = new OpenAI({ apiKey: this.llmApiKey, baseURL: this.llmBaseUrl }); const messages = [ { role: 'system', content: systemPrompt }, ...(userMessage ? [{ role: 'user', content: userMessage }] : []) ]; try { const completion = await client.chat.completions.create({ model: this.llmModel, messages: messages, max_tokens: 1000, temperature: 0.7 }); return completion.choices[0].message.content; } catch (error) { throw new Error(`LLM API error: ${error.message}`); } } async callLLM(message) { return await this.callLLMAPI( 'You are a helpful assistant knowledgeable about Ellen G. White and her writings. Provide helpful, accurate information about her spiritual insights, books, and teachings.', message ); } } // Handle graceful shutdown process.on('SIGINT', () => { console.log('\nšŸ‘‹ Goodbye!'); process.exit(0); }); // Start CLI if (require.main === module) { new EGWChatCLI(); } module.exports = EGWChatCLI;

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/pythondev-pro/egw_writings_mcp_server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server