/**
* Custom Logger for LearnMCP
* Dedicated logging system with LearnMCP-specific formatting and file handling
*/
import winston from 'winston';
import path from 'path';
import fs from 'fs';
// Custom log levels for LearnMCP
const LEARN_LOG_LEVELS = {
error: 0,
warn: 1,
info: 2,
debug: 3,
extract: 4, // Content extraction operations
process: 5, // Background processing
summarize: 6 // Summarization operations
};
// Colors for console output
const LEARN_LOG_COLORS = {
error: 'red',
warn: 'yellow',
info: 'green',
debug: 'blue',
extract: 'cyan',
process: 'magenta',
summarize: 'white'
};
winston.addColors(LEARN_LOG_COLORS);
/**
* Create LearnMCP-specific logger with custom formatting
*/
export function createLearnLogger(module = 'LearnMCP', options = {}) {
const dataDir = process.env.FOREST_DATA_DIR || './.forest-data';
const logDir = path.join(dataDir, 'logs', 'learn-mcp');
// Ensure log directory exists
if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true });
}
// Custom format for LearnMCP logs
const learnFormat = winston.format.combine(
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss.SSS' }),
winston.format.errors({ stack: true }),
winston.format.printf(({ timestamp, level, message, stack, ...meta }) => {
// Build the log entry with LearnMCP branding
let logEntry = `${timestamp} [LEARN-${level.toUpperCase()}]`;
// Add context information
if (meta.module) {
logEntry += ` [${meta.module}]`;
}
if (meta.component) {
logEntry += ` [${meta.component}]`;
}
if (meta.projectId) {
logEntry += ` [Project:${meta.projectId}]`;
}
if (meta.sourceId) {
logEntry += ` [Source:${meta.sourceId}]`;
}
if (meta.extractorType) {
logEntry += ` [${meta.extractorType.toUpperCase()}]`;
}
if (meta.taskId) {
logEntry += ` [Task:${meta.taskId}]`;
}
logEntry += `: ${message}`;
// Add stack trace for errors
if (stack) {
logEntry += `\n${stack}`;
}
// Add metadata if present
const metaKeys = Object.keys(meta).filter(key =>
!['module', 'component', 'projectId', 'sourceId', 'extractorType', 'taskId', 'timestamp', 'level', 'message'].includes(key)
);
if (metaKeys.length > 0) {
const metaData = {};
metaKeys.forEach(key => {
try {
// Handle circular references and complex objects
const value = meta[key];
if (typeof value === 'object' && value !== null) {
metaData[key] = '[Object]';
} else {
metaData[key] = value;
}
} catch (error) {
metaData[key] = '[Unserializable]';
}
});
try {
logEntry += ` | Meta: ${JSON.stringify(metaData)}`;
} catch (error) {
logEntry += ` | Meta: [Circular Reference]`;
}
}
return logEntry;
})
);
// Console format with colors and LearnMCP branding
const consoleFormat = winston.format.combine(
winston.format.colorize(),
winston.format.timestamp({ format: 'HH:mm:ss.SSS' }),
winston.format.printf(({ timestamp, level, message, ...meta }) => {
let logEntry = `${timestamp} ${level}`;
if (meta.module) {
logEntry += ` [${meta.module}]`;
}
if (meta.component) {
logEntry += ` [${meta.component}]`;
}
if (meta.extractorType) {
logEntry += ` [${meta.extractorType.toUpperCase()}]`;
}
logEntry += `: ${message}`;
return logEntry;
})
);
// Create transports
const transports = [
// Console transport for development
new winston.transports.Console({
level: options.consoleLevel || (process.env.NODE_ENV === 'production' ? 'info' : 'debug'),
format: consoleFormat
})
];
// Add file transports
transports.push(
// Main LearnMCP log file
new winston.transports.File({
filename: path.join(logDir, 'learn-mcp-main.log'),
level: 'info',
format: learnFormat,
maxsize: 10 * 1024 * 1024, // 10MB
maxFiles: 5,
tailable: true
}),
// Error log file
new winston.transports.File({
filename: path.join(logDir, 'learn-mcp-errors.log'),
level: 'error',
format: learnFormat,
maxsize: 5 * 1024 * 1024, // 5MB
maxFiles: 3,
tailable: true
}),
// Extraction operations log
new winston.transports.File({
filename: path.join(logDir, 'learn-mcp-extraction.log'),
level: 'extract',
format: learnFormat,
maxsize: 10 * 1024 * 1024, // 10MB
maxFiles: 3,
tailable: true
}),
// Processing operations log
new winston.transports.File({
filename: path.join(logDir, 'learn-mcp-processing.log'),
level: 'process',
format: learnFormat,
maxsize: 10 * 1024 * 1024, // 10MB
maxFiles: 3,
tailable: true
}),
// Debug log file (only in development)
...(process.env.NODE_ENV !== 'production' ? [
new winston.transports.File({
filename: path.join(logDir, 'learn-mcp-debug.log'),
level: 'debug',
format: learnFormat,
maxsize: 20 * 1024 * 1024, // 20MB
maxFiles: 2,
tailable: true
})
] : [])
);
// Create the logger
const logger = winston.createLogger({
levels: LEARN_LOG_LEVELS,
level: options.level || (process.env.LOG_LEVEL || (process.env.NODE_ENV === 'production' ? 'info' : 'debug')),
format: winston.format.combine(
winston.format.timestamp(),
winston.format.errors({ stack: true })
),
transports,
exitOnError: false
});
// Add module context and custom methods to all log calls
const contextLogger = {
error: (message, meta = {}) => logger.error(message, { module, ...meta }),
warn: (message, meta = {}) => logger.warn(message, { module, ...meta }),
info: (message, meta = {}) => logger.info(message, { module, ...meta }),
debug: (message, meta = {}) => logger.debug(message, { module, ...meta }),
// Custom LearnMCP log levels
extract: (message, meta = {}) => logger.log('extract', message, { module, ...meta }),
process: (message, meta = {}) => logger.log('process', message, { module, ...meta }),
summarize: (message, meta = {}) => logger.log('summarize', message, { module, ...meta }),
// Convenience methods for common operations
extractionStart: (url, extractorType, meta = {}) => {
logger.log('extract', `Starting extraction: ${url}`, {
module,
extractorType,
url,
operation: 'start',
...meta
});
},
extractionComplete: (url, extractorType, duration, meta = {}) => {
logger.log('extract', `Extraction completed: ${url}`, {
module,
extractorType,
url,
duration: `${duration}ms`,
operation: 'complete',
...meta
});
},
extractionFailed: (url, extractorType, error, meta = {}) => {
logger.error(`Extraction failed: ${url}`, {
module,
extractorType,
url,
error: error.message,
operation: 'failed',
...meta
});
},
processingStart: (taskId, taskType, meta = {}) => {
logger.log('process', `Processing started: ${taskType}`, {
module,
taskId,
taskType,
operation: 'start',
...meta
});
},
processingComplete: (taskId, taskType, duration, meta = {}) => {
logger.log('process', `Processing completed: ${taskType}`, {
module,
taskId,
taskType,
duration: `${duration}ms`,
operation: 'complete',
...meta
});
},
summarizationStart: (sourceId, method, meta = {}) => {
logger.log('summarize', `Summarization started: ${method}`, {
module,
sourceId,
method,
operation: 'start',
...meta
});
},
summarizationComplete: (sourceId, method, compressionRatio, meta = {}) => {
logger.log('summarize', `Summarization completed: ${method}`, {
module,
sourceId,
method,
compressionRatio,
operation: 'complete',
...meta
});
}
};
// Add log directory info
contextLogger.getLogDirectory = () => logDir;
contextLogger.getLogFiles = () => {
try {
return fs.readdirSync(logDir).filter(file => file.endsWith('.log'));
} catch (error) {
return [];
}
};
return contextLogger;
}
/**
* Get log statistics for monitoring
*/
export function getLogStats(dataDir = null) {
const logDir = path.join(dataDir || process.env.FOREST_DATA_DIR || './.forest-data', 'logs', 'learn-mcp');
try {
const files = fs.readdirSync(logDir);
const logFiles = files.filter(file => file.endsWith('.log'));
const stats = {};
for (const file of logFiles) {
const filePath = path.join(logDir, file);
const stat = fs.statSync(filePath);
stats[file] = {
size: stat.size,
modified: stat.mtime,
sizeHuman: `${Math.round(stat.size / 1024)}KB`
};
}
return {
logDirectory: logDir,
totalFiles: logFiles.length,
files: stats
};
} catch (error) {
return {
logDirectory: logDir,
error: error.message,
totalFiles: 0,
files: {}
};
}
}