Skip to main content
Glama

AI Conversation Logger

by fablefang
auto-log-hook.js3.45 kB
#!/usr/bin/env node import { readFileSync, existsSync } from 'fs'; import { join, dirname } from 'path'; import { fileURLToPath } from 'url'; import { execSync } from 'child_process'; const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); async function main() { try { // Get hook input from stdin - need to collect all chunks let hookInput = ''; process.stdin.setEncoding('utf8'); for await (const chunk of process.stdin) { hookInput += chunk; } if (!hookInput.trim()) { console.error('No input received from hook'); return; } const hookData = JSON.parse(hookInput); console.error('Hook received data:', JSON.stringify(hookData, null, 2)); // Extract conversation data const projectDir = hookData.workingDirectory || process.cwd(); const projectName = projectDir.split('/').pop() || 'unknown-project'; // Parse transcript to get conversation details const transcript = hookData.transcript || []; // Find last user input and AI response let userInput = ''; let aiResponse = ''; let actions = []; // Process transcript in reverse to find last exchange for (let i = transcript.length - 1; i >= 0; i--) { const entry = transcript[i]; if (entry.type === 'user' && !userInput) { userInput = entry.text || ''; } else if (entry.type === 'assistant' && !aiResponse) { aiResponse = entry.text || ''; // 提取工具使用信息为动作 if (entry.tool_uses) { actions = entry.tool_uses.map(tool => { const actionName = tool.name || 'unknown-tool'; return `调用${actionName}工具`; }); } } // Stop when we have both if (userInput && aiResponse) break; } if (!userInput || !aiResponse) { console.error('No complete conversation found in transcript'); return; } // 优化后的纯保存模式日志记录 const logData = { userInput: userInput.substring(0, 200).trim(), // 符合新的字数要求 aiResponse: aiResponse.substring(0, 300).trim(), // 符合新的字数要求 platform: 'claude-code', actions: actions.length > 0 ? actions : ['自动记录对话'], tags: ['auto-logged'] // project 参数已可选,会自动检测 }; console.error('Logging conversation:', JSON.stringify(logData, null, 2)); // Call MCP server directly via Node.js const { spawn } = require('child_process'); const mcpProcess = spawn('node', [ '/Users/allanf/Projects/ai/mcp/ai-conversation-logger-mcp/dist/index.js' ], { stdio: ['pipe', 'pipe', 'pipe'] }); const mcpRequest = { jsonrpc: '2.0', id: Date.now(), method: 'tools/call', params: { name: 'log_conversation', arguments: logData } }; mcpProcess.stdin.write(JSON.stringify(mcpRequest) + '\n'); mcpProcess.stdin.end(); mcpProcess.stdout.on('data', (data) => { console.error('MCP response:', data.toString()); }); mcpProcess.stderr.on('data', (data) => { console.error('MCP error:', data.toString()); }); } catch (error) { console.error('Auto-log hook error:', error.message); // Don't throw to avoid disrupting Claude Code } } main();

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/fablefang/ai-conversation-logger-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server