memory-alerts.tsâĸ33.7 kB
/**
* Memory Alert and Notification System for GEPA
*
* Advanced alerting system with configurable thresholds, escalation policies,
* multi-channel notifications, and automated response capabilities.
*/
import { EventEmitter } from 'events';
import * as fs from 'fs/promises';
import * as path from 'path';
/**
* Alert severity levels
*/
export type AlertSeverity = 'info' | 'low' | 'medium' | 'high' | 'critical';
/**
* Alert types
*/
export type AlertType = 'threshold' | 'leak' | 'gc' | 'trend' | 'anomaly' | 'performance' | 'system';
/**
* Notification channels
*/
export type NotificationChannel = 'console' | 'file' | 'webhook' | 'email' | 'dashboard';
/**
* Alert configuration
*/
export interface AlertConfig {
id: string;
name: string;
description: string;
type: AlertType;
severity: AlertSeverity;
component: string;
enabled: boolean;
thresholds: {
warning: number;
critical: number;
unit?: string;
};
conditions: {
operator: 'gt' | 'lt' | 'eq' | 'gte' | 'lte' | 'change';
value: number;
duration?: number; // ms
frequency?: number; // occurrences per time period
};
actions: {
autoFix?: {
enabled: boolean;
maxAttempts: number;
cooldown: number;
};
notifications: NotificationChannel[];
escalation?: {
enabled: boolean;
delays: number[]; // ms between escalation levels
channels: NotificationChannel[][];
};
};
}
/**
* Alert rule for dynamic alert creation
*/
export interface AlertRule {
id: string;
name: string;
condition: string; // JavaScript expression
severity: AlertSeverity;
message: string;
enabled: boolean;
throttle?: number; // minimum time between alerts (ms)
dependencies?: string[]; // other rule IDs
}
/**
* Alert instance
*/
export interface AlertInstance {
id: string;
configId: string;
timestamp: number;
severity: AlertSeverity;
type: AlertType;
component: string;
message: string;
data: {
currentValue: number;
threshold?: number;
previousValue?: number;
change?: number;
unit?: string;
metadata?: Record<string, any>;
};
status: 'active' | 'acknowledged' | 'resolved' | 'suppressed';
acknowledgedBy?: string;
acknowledgedAt?: number;
resolvedAt?: number;
autoFixAttempts: number;
notifications: {
channel: NotificationChannel;
sentAt: number;
success: boolean;
error?: string;
}[];
escalationLevel: number;
nextEscalationAt?: number;
}
/**
* Notification template
*/
export interface NotificationTemplate {
channel: NotificationChannel;
subject?: string;
body: string;
priority: 'low' | 'normal' | 'high' | 'urgent';
format: 'text' | 'html' | 'json';
}
/**
* Escalation policy
*/
export interface EscalationPolicy {
id: string;
name: string;
levels: {
level: number;
delay: number; // ms
channels: NotificationChannel[];
targets?: string[]; // email addresses, webhook URLs, etc.
conditions?: {
severity?: AlertSeverity[];
components?: string[];
types?: AlertType[];
};
}[];
maxLevel: number;
repeatInterval?: number; // repeat final level every X ms
}
/**
* Alert statistics
*/
export interface AlertStatistics {
total: number;
bySeverity: Record<AlertSeverity, number>;
byType: Record<AlertType, number>;
byComponent: Record<string, number>;
active: number;
acknowledged: number;
resolved: number;
suppressed: number;
autoFixSuccess: number;
autoFixFailure: number;
averageResolutionTime: number;
topComponents: Array<{ component: string; count: number }>;
recentTrends: {
hourly: number[];
daily: number[];
};
}
/**
* Webhook configuration
*/
export interface WebhookConfig {
url: string;
method: 'POST' | 'PUT' | 'PATCH';
headers?: Record<string, string>;
timeout: number;
retries: number;
template: string; // JSON template
}
/**
* Main Memory Alert System
*/
export class MemoryAlertSystem extends EventEmitter {
private isEnabled = true;
private alertConfigs = new Map<string, AlertConfig>();
private alertRules = new Map<string, AlertRule>();
private activeAlerts = new Map<string, AlertInstance>();
private alertHistory: AlertInstance[] = [];
private escalationPolicies = new Map<string, EscalationPolicy>();
private webhookConfigs = new Map<string, WebhookConfig>();
private processingInterval?: ReturnType<typeof setInterval>;
private maintenanceInterval?: ReturnType<typeof setInterval>;
private statsInterval?: ReturnType<typeof setInterval>;
// Throttling and rate limiting
private alertThrottles = new Map<string, number>();
private notificationQueues = new Map<NotificationChannel, any[]>();
private rateLimits = new Map<NotificationChannel, { count: number; window: number }>();
// Statistics tracking
private statistics: AlertStatistics = {
total: 0,
bySeverity: { info: 0, low: 0, medium: 0, high: 0, critical: 0 },
byType: { threshold: 0, leak: 0, gc: 0, trend: 0, anomaly: 0, performance: 0, system: 0 },
byComponent: {},
active: 0,
acknowledged: 0,
resolved: 0,
suppressed: 0,
autoFixSuccess: 0,
autoFixFailure: 0,
averageResolutionTime: 0,
topComponents: [],
recentTrends: { hourly: [], daily: [] }
};
constructor() {
super();
this.initializeDefaultConfigs();
this.startProcessing();
this.setupMaintenance();
}
/**
* Initialize default alert configurations
*/
private initializeDefaultConfigs(): void {
// High memory usage alert
this.addAlertConfig({
id: 'high-memory-usage',
name: 'High Memory Usage',
description: 'Triggers when heap usage exceeds threshold',
type: 'threshold',
severity: 'high',
component: 'system',
enabled: true,
thresholds: {
warning: 80,
critical: 95,
unit: 'percent'
},
conditions: {
operator: 'gte',
value: 80,
duration: 30000 // 30 seconds
},
actions: {
autoFix: {
enabled: true,
maxAttempts: 3,
cooldown: 60000 // 1 minute
},
notifications: ['console', 'file'],
escalation: {
enabled: true,
delays: [300000, 600000], // 5 min, 10 min
channels: [['console'], ['console', 'webhook']]
}
}
});
// Memory leak detection alert
this.addAlertConfig({
id: 'memory-leak-detected',
name: 'Memory Leak Detected',
description: 'Triggers when memory leak is detected',
type: 'leak',
severity: 'critical',
component: 'leak-detector',
enabled: true,
thresholds: {
warning: 1,
critical: 1,
unit: 'detection'
},
conditions: {
operator: 'gte',
value: 1
},
actions: {
autoFix: {
enabled: true,
maxAttempts: 2,
cooldown: 120000 // 2 minutes
},
notifications: ['console', 'file', 'webhook'],
escalation: {
enabled: true,
delays: [180000, 360000], // 3 min, 6 min
channels: [['console', 'file'], ['console', 'file', 'webhook']]
}
}
});
// High GC frequency alert
this.addAlertConfig({
id: 'high-gc-frequency',
name: 'High GC Frequency',
description: 'Triggers when GC frequency exceeds normal levels',
type: 'gc',
severity: 'medium',
component: 'gc-optimizer',
enabled: true,
thresholds: {
warning: 10,
critical: 20,
unit: 'per-minute'
},
conditions: {
operator: 'gte',
value: 10,
duration: 60000 // 1 minute
},
actions: {
autoFix: {
enabled: false,
maxAttempts: 0,
cooldown: 0
},
notifications: ['console', 'file']
}
});
// Component growth trend alert
this.addAlertConfig({
id: 'component-growth-trend',
name: 'Component Growth Trend',
description: 'Triggers when component shows sustained growth',
type: 'trend',
severity: 'medium',
component: 'any',
enabled: true,
thresholds: {
warning: 5,
critical: 10,
unit: 'MB/min'
},
conditions: {
operator: 'gte',
value: 5,
duration: 300000 // 5 minutes
},
actions: {
notifications: ['console', 'file']
}
});
// Default escalation policy
this.addEscalationPolicy({
id: 'default-escalation',
name: 'Default Escalation Policy',
levels: [
{
level: 1,
delay: 300000, // 5 minutes
channels: ['console']
},
{
level: 2,
delay: 600000, // 10 minutes
channels: ['console', 'file']
},
{
level: 3,
delay: 1200000, // 20 minutes
channels: ['console', 'file', 'webhook']
}
],
maxLevel: 3,
repeatInterval: 1800000 // 30 minutes
});
}
/**
* Add alert configuration
*/
addAlertConfig(config: AlertConfig): void {
this.alertConfigs.set(config.id, config);
this.emit('configAdded', config);
}
/**
* Add alert rule for dynamic alerting
*/
addAlertRule(rule: AlertRule): void {
this.alertRules.set(rule.id, rule);
this.emit('ruleAdded', rule);
}
/**
* Add escalation policy
*/
addEscalationPolicy(policy: EscalationPolicy): void {
this.escalationPolicies.set(policy.id, policy);
this.emit('policyAdded', policy);
}
/**
* Process memory data and check for alert conditions
*/
processMemoryData(data: {
timestamp: number;
component: string;
metrics: Record<string, number>;
metadata?: Record<string, any>;
}): void {
if (!this.isEnabled) return;
// Check configured alerts
for (const config of Array.from(this.alertConfigs.values())) {
if (!config.enabled) continue;
if (config.component !== 'any' && config.component !== data.component) continue;
this.checkAlertCondition(config, data);
}
// Check dynamic rules
for (const rule of Array.from(this.alertRules.values())) {
if (!rule.enabled) continue;
this.evaluateAlertRule(rule, data);
}
}
/**
* Check if alert condition is met
*/
private checkAlertCondition(
config: AlertConfig,
data: { timestamp: number; component: string; metrics: Record<string, number>; metadata?: Record<string, any> }
): void {
const { conditions, thresholds } = config;
let value: number;
let threshold: number;
// Determine the metric to check based on alert type
switch (config.type) {
case 'threshold':
value = data.metrics.heapUsagePercent || data.metrics.memoryUsage || 0;
threshold = conditions.value;
break;
case 'leak':
value = data.metrics.leakDetected || 0;
threshold = conditions.value;
break;
case 'gc':
value = data.metrics.gcFrequency || 0;
threshold = conditions.value;
break;
case 'trend':
value = data.metrics.growthRate || 0;
threshold = conditions.value;
break;
default:
value = data.metrics.value || 0;
threshold = conditions.value;
}
// Check condition
let conditionMet = false;
switch (conditions.operator) {
case 'gt': conditionMet = value > threshold; break;
case 'gte': conditionMet = value >= threshold; break;
case 'lt': conditionMet = value < threshold; break;
case 'lte': conditionMet = value <= threshold; break;
case 'eq': conditionMet = Math.abs(value - threshold) < 0.001; break;
case 'change': {
const previous = data.metadata?.previousValue || 0;
conditionMet = Math.abs(value - previous) >= threshold;
break;
}
}
if (conditionMet) {
// Check duration if specified
if (conditions.duration) {
const alertKey = `${config.id}-${data.component}`;
const firstTriggered = this.alertThrottles.get(alertKey);
const now = data.timestamp;
if (!firstTriggered) {
this.alertThrottles.set(alertKey, now);
return; // Wait for duration
}
if (now - firstTriggered < conditions.duration) {
return; // Duration not met yet
}
}
// Determine severity based on value
let severity: AlertSeverity = config.severity;
if (value >= thresholds.critical) {
severity = 'critical';
} else if (value >= thresholds.warning) {
severity = severity === 'critical' ? 'high' : severity;
}
this.createAlert(config, data, value, threshold, severity);
} else {
// Clear throttle if condition no longer met
const alertKey = `${config.id}-${data.component}`;
this.alertThrottles.delete(alertKey);
}
}
/**
* Evaluate dynamic alert rule
*/
private evaluateAlertRule(
rule: AlertRule,
data: { timestamp: number; component: string; metrics: Record<string, number>; metadata?: Record<string, any> }
): void {
try {
// Create evaluation context
const context = {
component: data.component,
timestamp: data.timestamp,
metrics: data.metrics,
metadata: data.metadata || {},
Math,
Date
};
// Evaluate condition
const conditionMet = this.evaluateExpression(rule.condition, context);
if (conditionMet) {
// Check throttling
if (rule.throttle) {
const lastAlert = this.alertThrottles.get(rule.id);
if (lastAlert && (data.timestamp - lastAlert) < rule.throttle) {
return; // Still in throttle period
}
}
// Check dependencies
if (rule.dependencies && rule.dependencies.length > 0) {
const dependenciesMet = rule.dependencies.every(depId => {
const activeAlert = Array.from(this.activeAlerts.values())
.find(alert => alert.configId === depId && alert.status === 'active');
return activeAlert !== undefined;
});
if (!dependenciesMet) return;
}
this.createRuleBasedAlert(rule, data);
this.alertThrottles.set(rule.id, data.timestamp);
}
} catch (error) {
this.emit('ruleEvaluationError', { rule, error, data });
}
}
/**
* Create alert from configuration
*/
private createAlert(
config: AlertConfig,
data: { timestamp: number; component: string; metrics: Record<string, number>; metadata?: Record<string, any> },
currentValue: number,
threshold: number,
severity: AlertSeverity
): void {
const alertId = `${config.id}-${data.component}-${Date.now()}`;
const alert: AlertInstance = {
id: alertId,
configId: config.id,
timestamp: data.timestamp,
severity,
type: config.type,
component: data.component,
message: this.generateAlertMessage(config, currentValue, threshold),
data: {
currentValue,
threshold,
...(data.metadata?.previousValue !== undefined && { previousValue: data.metadata.previousValue }),
...(data.metadata?.change !== undefined && { change: data.metadata.change }),
...(config.thresholds.unit && { unit: config.thresholds.unit }),
...(data.metadata && { metadata: data.metadata })
},
status: 'active',
autoFixAttempts: 0,
notifications: [],
escalationLevel: 0
};
this.activeAlerts.set(alertId, alert);
this.alertHistory.push(alert);
this.updateStatistics('created', alert);
// Schedule notifications
this.scheduleNotifications(alert, config);
// Schedule escalation if configured
if (config.actions.escalation?.enabled) {
this.scheduleEscalation(alert, config.actions.escalation);
}
// Attempt auto-fix if configured
if (config.actions.autoFix?.enabled) {
this.attemptAutoFix(alert, config.actions.autoFix);
}
this.emit('alertCreated', alert);
}
/**
* Create alert from rule
*/
private createRuleBasedAlert(
rule: AlertRule,
data: { timestamp: number; component: string; metrics: Record<string, number>; metadata?: Record<string, any> }
): void {
const alertId = `${rule.id}-${data.component}-${Date.now()}`;
const alert: AlertInstance = {
id: alertId,
configId: rule.id,
timestamp: data.timestamp,
severity: rule.severity,
type: 'anomaly',
component: data.component,
message: this.interpolateMessage(rule.message, data),
data: {
currentValue: data.metrics.value || 0,
...(data.metadata && { metadata: data.metadata })
},
status: 'active',
autoFixAttempts: 0,
notifications: [],
escalationLevel: 0
};
this.activeAlerts.set(alertId, alert);
this.alertHistory.push(alert);
this.updateStatistics('created', alert);
// Use default notifications for rule-based alerts
this.scheduleNotifications(alert, { actions: { notifications: ['console', 'file'] } } as AlertConfig);
this.emit('alertCreated', alert);
}
/**
* Schedule notifications for alert
*/
private scheduleNotifications(alert: AlertInstance, config: AlertConfig): void {
for (const channel of config.actions.notifications) {
this.queueNotification(alert, channel);
}
}
/**
* Queue notification for sending
*/
private queueNotification(alert: AlertInstance, channel: NotificationChannel): void {
if (!this.notificationQueues.has(channel)) {
this.notificationQueues.set(channel, []);
}
this.notificationQueues.get(channel)!.push({
alert,
channel,
queuedAt: Date.now()
});
}
/**
* Schedule alert escalation
*/
private scheduleEscalation(alert: AlertInstance, escalation: NonNullable<AlertConfig['actions']['escalation']>): void {
if (escalation.delays.length > 0) {
alert.nextEscalationAt = Date.now() + escalation.delays[0]!;
}
}
/**
* Attempt automatic fix for alert
*/
private async attemptAutoFix(alert: AlertInstance, autoFix: NonNullable<AlertConfig['actions']['autoFix']>): Promise<void> {
if (alert.autoFixAttempts >= autoFix.maxAttempts) return;
try {
alert.autoFixAttempts++;
// Emit auto-fix attempt event for external handlers
const fixResult = await new Promise<boolean>((resolve) => {
this.emit('autoFixAttempt', {
alert,
attempt: alert.autoFixAttempts,
resolve
});
// Default timeout for auto-fix attempts
setTimeout(() => resolve(false), 30000);
});
if (fixResult) {
this.resolveAlert(alert.id);
this.updateStatistics('autoFixSuccess', alert);
this.emit('autoFixSuccess', alert);
} else {
this.updateStatistics('autoFixFailure', alert);
// Schedule retry if attempts remaining
if (alert.autoFixAttempts < autoFix.maxAttempts) {
setTimeout(() => {
this.attemptAutoFix(alert, autoFix);
}, autoFix.cooldown);
}
}
} catch (error) {
this.updateStatistics('autoFixFailure', alert);
this.emit('autoFixError', { alert, error });
}
}
/**
* Process notification queues
*/
private async processNotifications(): Promise<void> {
for (const [channel, queue] of Array.from(this.notificationQueues.entries())) {
if (queue.length === 0) continue;
// Check rate limits
if (this.isRateLimited(channel)) continue;
const notification = queue.shift()!;
await this.sendNotification(notification);
}
}
/**
* Send notification
*/
private async sendNotification(notification: { alert: AlertInstance; channel: NotificationChannel; queuedAt: number }): Promise<void> {
const { alert, channel } = notification;
const startTime = Date.now();
try {
switch (channel) {
case 'console':
await this.sendConsoleNotification(alert);
break;
case 'file':
await this.sendFileNotification(alert);
break;
case 'webhook':
await this.sendWebhookNotification(alert);
break;
case 'dashboard':
await this.sendDashboardNotification(alert);
break;
default:
throw new Error(`Unsupported notification channel: ${channel}`);
}
alert.notifications.push({
channel,
sentAt: startTime,
success: true
});
this.updateRateLimit(channel);
this.emit('notificationSent', { alert, channel, duration: Date.now() - startTime });
} catch (error) {
alert.notifications.push({
channel,
sentAt: startTime,
success: false,
error: error instanceof Error ? error.message : String(error)
});
this.emit('notificationError', { alert, channel, error });
}
}
/**
* Send console notification
*/
private async sendConsoleNotification(alert: AlertInstance): Promise<void> {
const severityEmoji = {
info: 'âšī¸',
low: 'đĩ',
medium: 'đĄ',
high: 'đ ',
critical: 'đ´'
};
const emoji = severityEmoji[alert.severity];
// eslint-disable-next-line no-console
console.log(`\n${emoji} MEMORY ALERT [${alert.severity.toUpperCase()}]`);
// eslint-disable-next-line no-console
console.log(` Component: ${alert.component}`);
// eslint-disable-next-line no-console
console.log(` Message: ${alert.message}`);
// eslint-disable-next-line no-console
console.log(` Time: ${new Date(alert.timestamp).toISOString()}`);
// eslint-disable-next-line no-console
console.log(` Current Value: ${alert.data.currentValue}${alert.data.unit ? ' ' + alert.data.unit : ''}`);
if (alert.data.threshold) {
// eslint-disable-next-line no-console
console.log(` Threshold: ${alert.data.threshold}${alert.data.unit ? ' ' + alert.data.unit : ''}`);
}
// eslint-disable-next-line no-console
console.log(` Alert ID: ${alert.id}\n`);
}
/**
* Send file notification
*/
private async sendFileNotification(alert: AlertInstance): Promise<void> {
const logDir = path.join(process.cwd(), 'logs', 'alerts');
await fs.mkdir(logDir, { recursive: true });
const logFile = path.join(logDir, `memory-alerts-${new Date().toISOString().split('T')[0]}.log`);
const logEntry = {
timestamp: new Date(alert.timestamp).toISOString(),
alertId: alert.id,
severity: alert.severity,
type: alert.type,
component: alert.component,
message: alert.message,
data: alert.data
};
await fs.appendFile(logFile, JSON.stringify(logEntry) + '\n');
}
/**
* Send webhook notification
*/
private async sendWebhookNotification(alert: AlertInstance): Promise<void> {
const webhookConfig = this.webhookConfigs.get('default') || {
url: process.env.MEMORY_ALERT_WEBHOOK_URL || 'http://localhost:3000/alerts',
method: 'POST' as const,
timeout: 5000,
retries: 2,
template: JSON.stringify({
alertId: '{{alert.id}}',
severity: '{{alert.severity}}',
component: '{{alert.component}}',
message: '{{alert.message}}',
timestamp: '{{alert.timestamp}}',
data: '{{alert.data}}'
})
};
const payload = this.interpolateWebhookTemplate(webhookConfig.template, alert);
const response = await fetch(webhookConfig.url, {
method: webhookConfig.method,
headers: {
'Content-Type': 'application/json',
...webhookConfig.headers
},
body: payload,
signal: AbortSignal.timeout(webhookConfig.timeout)
});
if (!response.ok) {
throw new Error(`Webhook failed: ${response.status} ${response.statusText}`);
}
}
/**
* Send dashboard notification
*/
private async sendDashboardNotification(alert: AlertInstance): Promise<void> {
// Emit event for dashboard to pick up
this.emit('dashboardAlert', alert);
}
/**
* Process escalations
*/
private processEscalations(): void {
const now = Date.now();
for (const alert of Array.from(this.activeAlerts.values())) {
if (alert.status !== 'active' || !alert.nextEscalationAt) continue;
if (now < alert.nextEscalationAt) continue;
this.escalateAlert(alert);
}
}
/**
* Escalate alert to next level
*/
private escalateAlert(alert: AlertInstance): void {
const config = this.alertConfigs.get(alert.configId);
if (!config?.actions.escalation) return;
const escalation = config.actions.escalation;
alert.escalationLevel++;
if (escalation.delays && alert.escalationLevel <= escalation.delays.length) {
// Send notifications for this escalation level
const channels = escalation.channels?.[alert.escalationLevel - 1] || escalation.channels?.[0] || [];
for (const channel of channels) {
this.queueNotification(alert, channel);
}
// Schedule next escalation
if (alert.escalationLevel < escalation.delays.length) {
const delay = escalation.delays[alert.escalationLevel];
if (delay !== undefined) {
alert.nextEscalationAt = Date.now() + delay;
}
} else {
// Use escalation policy repeat interval if available
const policy = this.escalationPolicies.get('default-escalation');
if (policy?.repeatInterval) {
alert.nextEscalationAt = Date.now() + policy.repeatInterval;
}
}
this.emit('alertEscalated', { alert, level: alert.escalationLevel });
}
}
/**
* Acknowledge alert
*/
acknowledgeAlert(alertId: string, acknowledgedBy?: string): boolean {
const alert = this.activeAlerts.get(alertId);
if (!alert || alert.status !== 'active') return false;
alert.status = 'acknowledged';
if (acknowledgedBy !== undefined) {
alert.acknowledgedBy = acknowledgedBy;
}
alert.acknowledgedAt = Date.now();
delete alert.nextEscalationAt; // Stop escalation
this.updateStatistics('acknowledged', alert);
this.emit('alertAcknowledged', alert);
return true;
}
/**
* Resolve alert
*/
resolveAlert(alertId: string): boolean {
const alert = this.activeAlerts.get(alertId);
if (!alert) return false;
alert.status = 'resolved';
alert.resolvedAt = Date.now();
delete alert.nextEscalationAt; // Stop escalation
this.activeAlerts.delete(alertId);
this.updateStatistics('resolved', alert);
this.emit('alertResolved', alert);
return true;
}
/**
* Suppress alert
*/
suppressAlert(alertId: string, duration?: number): boolean {
const alert = this.activeAlerts.get(alertId);
if (!alert) return false;
alert.status = 'suppressed';
delete alert.nextEscalationAt; // Stop escalation
if (duration) {
setTimeout(() => {
if (alert.status === 'suppressed') {
alert.status = 'active';
this.emit('alertUnsuppressed', alert);
}
}, duration);
}
this.updateStatistics('suppressed', alert);
this.emit('alertSuppressed', alert);
return true;
}
/**
* Get alert statistics
*/
getStatistics(): AlertStatistics {
// Update real-time stats
this.statistics.active = Array.from(this.activeAlerts.values()).filter(a => a.status === 'active').length;
// Calculate average resolution time
const resolvedAlerts = this.alertHistory.filter(a => a.status === 'resolved' && a.resolvedAt);
if (resolvedAlerts.length > 0) {
const totalResolutionTime = resolvedAlerts.reduce((sum, alert) => {
return sum + (alert.resolvedAt! - alert.timestamp);
}, 0);
this.statistics.averageResolutionTime = totalResolutionTime / resolvedAlerts.length;
}
// Update top components
const componentCounts = Object.entries(this.statistics.byComponent)
.sort(([,a], [,b]) => b - a)
.slice(0, 5)
.map(([component, count]) => ({ component, count }));
this.statistics.topComponents = componentCounts;
return { ...this.statistics };
}
/**
* Get active alerts
*/
getActiveAlerts(): AlertInstance[] {
return Array.from(this.activeAlerts.values()).filter(a => a.status === 'active');
}
/**
* Get alert history
*/
getAlertHistory(limit?: number): AlertInstance[] {
const history = [...this.alertHistory].reverse(); // Most recent first
return limit ? history.slice(0, limit) : history;
}
// Utility methods
private startProcessing(): void {
this.processingInterval = setInterval(async () => {
await this.processNotifications();
this.processEscalations();
}, 5000); // Process every 5 seconds
}
private setupMaintenance(): void {
this.maintenanceInterval = setInterval(() => {
this.cleanupOldAlerts();
this.updateTrendStatistics();
}, 3600000); // Every hour
this.statsInterval = setInterval(() => {
this.emit('statisticsUpdate', this.getStatistics());
}, 60000); // Every minute
}
private cleanupOldAlerts(): void {
const cutoff = Date.now() - (7 * 24 * 60 * 60 * 1000); // 7 days
this.alertHistory = this.alertHistory.filter(alert => alert.timestamp > cutoff);
}
private updateTrendStatistics(): void {
const now = Date.now();
const hourAgo = now - (60 * 60 * 1000);
const dayAgo = now - (24 * 60 * 60 * 1000);
const hourlyCount = this.alertHistory.filter(a => a.timestamp > hourAgo).length;
const dailyCount = this.alertHistory.filter(a => a.timestamp > dayAgo).length;
this.statistics.recentTrends.hourly.push(hourlyCount);
this.statistics.recentTrends.daily.push(dailyCount);
// Keep only last 24 hours and 7 days
if (this.statistics.recentTrends.hourly.length > 24) {
this.statistics.recentTrends.hourly = this.statistics.recentTrends.hourly.slice(-24);
}
if (this.statistics.recentTrends.daily.length > 7) {
this.statistics.recentTrends.daily = this.statistics.recentTrends.daily.slice(-7);
}
}
private updateStatistics(action: string, alert: AlertInstance): void {
switch (action) {
case 'created':
this.statistics.total++;
this.statistics.bySeverity[alert.severity]++;
this.statistics.byType[alert.type]++;
this.statistics.byComponent[alert.component] = (this.statistics.byComponent[alert.component] || 0) + 1;
break;
case 'acknowledged':
this.statistics.acknowledged++;
break;
case 'resolved':
this.statistics.resolved++;
break;
case 'suppressed':
this.statistics.suppressed++;
break;
case 'autoFixSuccess':
this.statistics.autoFixSuccess++;
break;
case 'autoFixFailure':
this.statistics.autoFixFailure++;
break;
}
}
private generateAlertMessage(config: AlertConfig, currentValue: number, threshold: number): string {
const unit = config.thresholds.unit || '';
return `${config.name}: ${currentValue}${unit} exceeds threshold of ${threshold}${unit}`;
}
private interpolateMessage(template: string, data: any): string {
return template.replace(/\{\{(\w+(?:\.\w+)*)\}\}/g, (match, path) => {
const value = this.getNestedValue(data, path);
return value !== undefined ? String(value) : match;
});
}
private interpolateWebhookTemplate(template: string, alert: AlertInstance): string {
return template.replace(/\{\{(alert\.\w+(?:\.\w+)*)\}\}/g, (match, path) => {
const value = this.getNestedValue({ alert }, path);
return value !== undefined ? JSON.stringify(value) : match;
});
}
private getNestedValue(obj: any, path: string): any {
return path.split('.').reduce((current, prop) => current?.[prop], obj);
}
private evaluateExpression(expression: string, context: any): boolean {
try {
const func = new Function(...Object.keys(context), `return ${expression}`);
return func(...Object.values(context));
} catch {
return false;
}
}
private isRateLimited(channel: NotificationChannel): boolean {
const limit = this.rateLimits.get(channel);
if (!limit) return false;
const now = Date.now();
if (now - limit.window > 60000) { // Reset every minute
this.rateLimits.set(channel, { count: 0, window: now });
return false;
}
return limit.count >= 10; // Max 10 notifications per minute per channel
}
private updateRateLimit(channel: NotificationChannel): void {
const limit = this.rateLimits.get(channel) || { count: 0, window: Date.now() };
limit.count++;
this.rateLimits.set(channel, limit);
}
/**
* Enable/disable alert system
*/
setEnabled(enabled: boolean): void {
this.isEnabled = enabled;
this.emit('enabledChanged', enabled);
}
/**
* Add webhook configuration
*/
addWebhookConfig(name: string, config: WebhookConfig): void {
this.webhookConfigs.set(name, config);
}
/**
* Shutdown alert system
*/
shutdown(): void {
this.isEnabled = false;
if (this.processingInterval) clearInterval(this.processingInterval);
if (this.maintenanceInterval) clearInterval(this.maintenanceInterval);
if (this.statsInterval) clearInterval(this.statsInterval);
this.removeAllListeners();
}
}