Skip to main content
Glama

MySQL MCP Server

by adamosk
logger.js8.92 kB
import fs from 'fs'; import path from 'path'; import zlib from 'zlib'; import os from 'os'; // Structured logger with: // - JSON log lines (optional pretty) // - Levels // - Size-based rotation // - Time-based rotation (interval or boundary) // - Retention (max rotated files) // - Optional gzip compression of rotated logs // - Optional stderr mirroring for human visibility (info and higher) // // Environment variables: // MCP_LOG_FILE / MYSQL_MCP_LOG_FILE -> log file path (default: %TEMP%/mysql-mcp-server-logs/mysql-mcp-server.log) // MCP_LOG_LEVEL / MYSQL_MCP_LOG_LEVEL -> error|warn|info|debug|trace (default: info) // MCP_LOG_STDERR -> 'false' to disable mirror (default mirror info+) // MCP_LOG_MAX_SIZE / MYSQL_MCP_LOG_MAX_SIZE -> max size bytes for rotation (alias: MCP_LOG_ROTATE_SIZE) // MCP_LOG_ROTATE_SIZE -> same as above // MCP_LOG_ROTATE_INTERVAL -> e.g. '1d','24h','6h','15m','60s','daily','midnight','hourly' // MCP_LOG_MAX_FILES -> max rotated files to keep (default 10 when rotation enabled) // MCP_LOG_COMPRESS -> 'true' to gzip rotated logs // MCP_LOG_JSON_PRETTY -> 'true' pretty JSON // MCP_LOG_SYNC_FLUSH -> 'true' to flush synchronously (performance hit) // MCP_LOG_DISABLE_GLOBAL_HANDLERS -> disable uncaught/unhandled capture // // Rotation filename pattern: <logfile>.<YYYYMMDD-HHMMSS>[.gz] const levelNames = ['error','warn','info','debug','trace']; const levelMap = levelNames.reduce((acc, name, idx) => { acc[name] = idx; return acc; }, {}); const configuredLevelName = (process.env.MCP_LOG_LEVEL || process.env.MYSQL_MCP_LOG_LEVEL || 'info').toLowerCase(); const activeLevel = levelMap[configuredLevelName] ?? levelMap['info']; // New: default to user temp directory (avoids cluttering home directory when run globally) const defaultLogDir = path.join(os.tmpdir(), 'mysql-mcp-server-logs'); const logFileRaw = process.env.MCP_LOG_FILE || process.env.MYSQL_MCP_LOG_FILE || path.join(defaultLogDir,'mysql-mcp-server.log'); const pretty = (process.env.MCP_LOG_JSON_PRETTY === 'true'); const mirrorToStderr = process.env.MCP_LOG_STDERR !== 'false'; const maxSize = parseInt(process.env.MCP_LOG_MAX_SIZE || process.env.MYSQL_MCP_LOG_MAX_SIZE || process.env.MCP_LOG_ROTATE_SIZE || '0', 10); const rotateIntervalSpec = process.env.MCP_LOG_ROTATE_INTERVAL || ''; const retentionMaxFiles = parseInt(process.env.MCP_LOG_MAX_FILES || '10', 10); const compressRotated = process.env.MCP_LOG_COMPRESS === 'true'; const syncFlush = process.env.MCP_LOG_SYNC_FLUSH === 'true'; const logFilePath = path.isAbsolute(logFileRaw) ? logFileRaw : path.resolve(process.cwd(), logFileRaw); fs.mkdirSync(path.dirname(logFilePath), { recursive: true }); let stream = fs.createWriteStream(logFilePath, { flags: 'a' }); let writeCount = 0; let nextRotationTime = computeNextRotationTime(rotateIntervalSpec); let rotating = false; // guard function timestamp() { return new Date().toISOString(); } function computeNextRotationTime(spec) { if (!spec) return 0; // disabled const now = new Date(); const lower = spec.toLowerCase(); if (lower === 'daily' || lower === 'midnight') { const local = new Date(); local.setHours(24,0,0,0); // next local midnight return local.getTime(); } if (lower === 'hourly') { const h = new Date(now); h.setMinutes(0,0,0); h.setHours(h.getHours()+1); return h.getTime(); } const match = lower.match(/^(\d+)(d|h|m|s)$/); if (match) { const value = parseInt(match[1],10); const unit = match[2]; const mult = unit === 'd' ? 86400000 : unit === 'h' ? 3600000 : unit === 'm' ? 60000 : 1000; return now.getTime() + value * mult; } return 0; // fallback disable } function shouldRotateForTime() { return nextRotationTime > 0 && Date.now() >= nextRotationTime; } function formatRecord(level, msg, meta) { const rec = { ts: timestamp(), level, msg }; if (meta && Object.keys(meta).length) rec.meta = meta; return pretty ? JSON.stringify(rec, null, 2) : JSON.stringify(rec); } function safeMetaPreview(meta) { try { return JSON.stringify(meta); } catch { return '[unserializable meta]'; } } function log(level, msg, meta = undefined) { if (levelMap[level] > activeLevel) return; try { if (maxSize > 0) { maybeRotateForSize(); } if (shouldRotateForTime()) { rotate('time'); } const line = formatRecord(level, msg, meta) + '\n'; if (syncFlush) { try { fs.appendFileSync(logFilePath, line); } catch (e) { /* ignore */ } } else { stream.write(line); } } catch (e) { try { console.error('LOGGER_FAIL', e.message); } catch(_) {} } if (mirrorToStderr && levelMap[level] <= levelMap['info']) { const metaStr = meta ? ' ' + safeMetaPreview(meta) : ''; console.error(`[${level.toUpperCase()}] ${msg}${metaStr}`); } } function maybeRotateForSize() { if (!maxSize || maxSize <= 0) return; if ((++writeCount % 20) !== 0) return; // throttle stat calls try { const { size } = fs.statSync(logFilePath); if (size >= maxSize) { rotate('size'); } } catch (_) { /* ignore */ } } function rotate(reason) { if (rotating) return; rotating = true; try { stream.end(); } catch(_) {} const ts = new Date().toISOString().replace(/[:.]/g,'-'); const rotatedBase = `${logFilePath}.${ts}`; try { fs.renameSync(logFilePath, rotatedBase); } catch (err) { rotating = false; stream = fs.createWriteStream(logFilePath, { flags: 'a' }); log('warn','Rotation rename failed',{ error: err.message, reason }); return; } stream = fs.createWriteStream(logFilePath, { flags: 'a' }); writeCount = 0; nextRotationTime = computeNextRotationTime(rotateIntervalSpec); const rotateMeta = { rotatedFile: rotatedBase, reason, nextRotationTime }; stream.write(formatRecord('info','Log rotation complete', rotateMeta) + '\n'); if (compressRotated) { compressFile(rotatedBase).then(() => enforceRetention()).catch(() => enforceRetention()); } else { enforceRetention(); } rotating = false; } function compressFile(filePath) { return new Promise((resolve, reject) => { const gzipPath = filePath + '.gz'; const source = fs.createReadStream(filePath); const dest = fs.createWriteStream(gzipPath); const gzip = zlib.createGzip(); let finished = false; function done(err) { if (finished) return; finished = true; if (err) { reject(err); return; } try { fs.unlinkSync(filePath); } catch(_) {} resolve(); } source.pipe(gzip).pipe(dest); dest.on('finish', () => done()); dest.on('error', (e) => done(e)); source.on('error', (e) => done(e)); }); } function enforceRetention() { if (retentionMaxFiles <= 0) return; try { const dir = path.dirname(logFilePath); const base = path.basename(logFilePath); const files = fs.readdirSync(dir) .filter(f => f.startsWith(base + '.')) .map(f => ({ f, full: path.join(dir,f), stat: safeStat(path.join(dir,f)) })) .filter(entry => entry.stat) .sort((a,b) => b.stat.mtimeMs - a.stat.mtimeMs); if (files.length > retentionMaxFiles) { for (const d of files.slice(retentionMaxFiles)) { try { fs.unlinkSync(d.full); } catch(_) {} } } } catch (_) { } } function safeStat(p) { try { return fs.statSync(p); } catch { return null; } } export const logger = { level: configuredLevelName, error: (m, meta) => log('error', m, meta), warn: (m, meta) => log('warn', m, meta), info: (m, meta) => log('info', m, meta), debug: (m, meta) => log('debug', m, meta), trace: (m, meta) => log('trace', m, meta), rotate: (reason='manual') => rotate(reason), child: (extra) => ({ error: (m, meta) => log('error', m, { ...extra, ...meta }), warn: (m, meta) => log('warn', m, { ...extra, ...meta }), info: (m, meta) => log('info', m, { ...extra, ...meta }), debug: (m, meta) => log('debug', m, { ...extra, ...meta }), trace: (m, meta) => log('trace', m, { ...extra, ...meta }), }) }; if (!process.env.MCP_LOG_DISABLE_GLOBAL_HANDLERS) { process.on('uncaughtException', (err) => { log('error', 'UncaughtException', { message: err.message, stack: err.stack }); }); process.on('unhandledRejection', (reason) => { log('error', 'UnhandledRejection', { reason: reason instanceof Error ? { message: reason.message, stack: reason.stack } : reason }); }); } log('info', 'Logger initialized', { file: logFilePath, level: configuredLevelName, usedTempDefault: !(process.env.MCP_LOG_FILE || process.env.MYSQL_MCP_LOG_FILE), rotation: { size: maxSize > 0 ? `${maxSize} bytes` : 'disabled', interval: rotateIntervalSpec || 'disabled', retention: retentionMaxFiles, compress: compressRotated, } });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/adamosk/mysql-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server