Skip to main content
Glama

memory_temporal_analysis

Analyze temporal patterns and trends in memory data to identify usage patterns, extract metrics, generate predictions, and derive insights for system optimization.

Instructions

Analyze temporal patterns and trends in memory data

Input Schema

TableJSON Schema
NameRequiredDescriptionDefault
queryNo
analysisTypeNopatterns

Implementation Reference

  • Main handler implementation: TemporalMemoryAnalysis class provides the core logic for temporal analysis, matching the tool input schema with methods for patterns, metrics, predictions, and insights.
    export class TemporalMemoryAnalysis extends EventEmitter { private storage: JSONLStorage; private manager: MemoryManager; private learningSystem: IncrementalLearningSystem; private knowledgeGraph: KnowledgeGraph; private patternCache: Map<string, TemporalPattern[]>; private metricsCache: Map<string, TemporalMetrics>; private predictionCache: Map<string, PredictionResult>; constructor( storage: JSONLStorage, manager: MemoryManager, learningSystem: IncrementalLearningSystem, knowledgeGraph: KnowledgeGraph, ) { super(); this.storage = storage; this.manager = manager; this.learningSystem = learningSystem; this.knowledgeGraph = knowledgeGraph; this.patternCache = new Map(); this.metricsCache = new Map(); this.predictionCache = new Map(); this.setupPeriodicAnalysis(); } /** * Analyze temporal patterns in memory data */ async analyzeTemporalPatterns( query?: TemporalQuery, ): Promise<TemporalPattern[]> { const defaultQuery: TemporalQuery = { granularity: "day", aggregation: "count", timeRange: this.getDefaultTimeRange(), smoothing: { enabled: true, method: "moving_average", window: 7, }, }; const activeQuery = { ...defaultQuery, ...query }; const cacheKey = this.generateCacheKey("patterns", activeQuery); // Check cache first if (this.patternCache.has(cacheKey)) { return this.patternCache.get(cacheKey)!; } try { // Get time series data const timeSeries = await this.buildTimeSeries(activeQuery); // Detect different types of patterns const patterns: TemporalPattern[] = []; // Periodic patterns patterns.push( ...(await this.detectPeriodicPatterns(timeSeries, activeQuery)), ); // Trend patterns patterns.push( ...(await this.detectTrendPatterns(timeSeries, activeQuery)), ); // Seasonal patterns patterns.push( ...(await this.detectSeasonalPatterns(timeSeries, activeQuery)), ); // Burst patterns patterns.push( ...(await this.detectBurstPatterns(timeSeries, activeQuery)), ); // Decay patterns patterns.push( ...(await this.detectDecayPatterns(timeSeries, activeQuery)), ); // Sort by confidence patterns.sort((a, b) => b.confidence - a.confidence); // Cache results this.patternCache.set(cacheKey, patterns); this.emit("patterns_analyzed", { query: activeQuery, patterns: patterns.length, highConfidence: patterns.filter((p) => p.confidence > 0.7).length, }); return patterns; } catch (error) { this.emit("analysis_error", { error: error instanceof Error ? error.message : String(error), }); throw error; } } /** * Get temporal metrics for a time range */ async getTemporalMetrics(query?: TemporalQuery): Promise<TemporalMetrics> { const defaultQuery: TemporalQuery = { granularity: "day", aggregation: "count", timeRange: this.getDefaultTimeRange(), }; const activeQuery = { ...defaultQuery, ...query }; const cacheKey = this.generateCacheKey("metrics", activeQuery); if (this.metricsCache.has(cacheKey)) { return this.metricsCache.get(cacheKey)!; } const timeSeries = await this.buildTimeSeries(activeQuery); // Calculate activity level const totalActivity = timeSeries.reduce( (sum, point) => sum + point.value, 0, ); const maxPossibleActivity = timeSeries.length * Math.max(...timeSeries.map((p) => p.value)); const activityLevel = maxPossibleActivity > 0 ? totalActivity / maxPossibleActivity : 0; // Calculate growth rate const firstHalf = timeSeries.slice(0, Math.floor(timeSeries.length / 2)); const secondHalf = timeSeries.slice(Math.floor(timeSeries.length / 2)); const firstHalfAvg = firstHalf.reduce((sum, p) => sum + p.value, 0) / firstHalf.length; const secondHalfAvg = secondHalf.reduce((sum, p) => sum + p.value, 0) / secondHalf.length; const growthRate = firstHalfAvg > 0 ? ((secondHalfAvg - firstHalfAvg) / firstHalfAvg) * 100 : 0; // Find peak activity const peakPoint = timeSeries.reduce((max, point) => point.value > max.value ? point : max, ); const peakActivity = { timestamp: peakPoint.timestamp, count: peakPoint.value, }; // Calculate average interval const intervals = []; for (let i = 1; i < timeSeries.length; i++) { intervals.push( timeSeries[i].timestamp.getTime() - timeSeries[i - 1].timestamp.getTime(), ); } const averageInterval = intervals.length > 0 ? intervals.reduce((sum, interval) => sum + interval, 0) / intervals.length : 0; // Calculate consistency (inverse of coefficient of variation) const values = timeSeries.map((p) => p.value); const mean = values.reduce((sum, val) => sum + val, 0) / values.length; const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length; const stdDev = Math.sqrt(variance); const consistency = mean > 0 ? Math.max(0, 1 - stdDev / mean) : 0; // Calculate cyclical strength using autocorrelation const cyclicalStrength = this.calculateCyclicalStrength(values); const metrics: TemporalMetrics = { activityLevel, growthRate, peakActivity, averageInterval, consistency, cyclicalStrength, }; this.metricsCache.set(cacheKey, metrics); return metrics; } /** * Make predictions based on temporal patterns */ async predictFutureActivity( query?: TemporalQuery, ): Promise<PredictionResult> { const defaultQuery: TemporalQuery = { granularity: "day", aggregation: "count", timeRange: this.getDefaultTimeRange(), }; const activeQuery = { ...defaultQuery, ...query }; const cacheKey = this.generateCacheKey("predictions", activeQuery); if (this.predictionCache.has(cacheKey)) { return this.predictionCache.get(cacheKey)!; } // Get historical patterns and metrics const patterns = await this.analyzeTemporalPatterns(activeQuery); const metrics = await this.getTemporalMetrics(activeQuery); const timeSeries = await this.buildTimeSeries(activeQuery); // Predict next activity window const nextActivity = await this.predictNextActivity( timeSeries, patterns, metrics, ); // Categorize trends const shortTermPatterns = patterns.filter((p) => this.isShortTerm(p, activeQuery), ); const longTermPatterns = patterns.filter((p) => this.isLongTerm(p, activeQuery), ); // Detect anomalies const anomalies = await this.detectAnomalies(timeSeries, patterns); // Generate recommendations const recommendations = this.generateRecommendations( patterns, metrics, anomalies, ); const result: PredictionResult = { nextActivity, trends: { shortTerm: shortTermPatterns, longTerm: longTermPatterns, }, anomalies, recommendations, }; this.predictionCache.set(cacheKey, result); return result; } /** * Get temporal insights and actionable recommendations */ async getTemporalInsights(query?: TemporalQuery): Promise<TemporalInsight[]> { const patterns = await this.analyzeTemporalPatterns(query); const metrics = await this.getTemporalMetrics(query); const predictions = await this.predictFutureActivity(query); const insights: TemporalInsight[] = []; // Pattern-based insights for (const pattern of patterns.filter((p) => p.confidence > 0.6)) { insights.push({ type: "pattern", title: `${ pattern.type.charAt(0).toUpperCase() + pattern.type.slice(1) } Pattern Detected`, description: pattern.description, confidence: pattern.confidence, timeframe: this.getPatternTimeframe(pattern), actionable: this.isActionablePattern(pattern), recommendations: this.getPatternRecommendations(pattern), }); } // Trend insights if (metrics.growthRate > 20) { insights.push({ type: "trend", title: "Increasing Activity Trend", description: `Memory activity has increased by ${metrics.growthRate.toFixed( 1, )}% over the analysis period`, confidence: 0.8, timeframe: query?.timeRange || this.getDefaultTimeRange(), actionable: true, recommendations: [ "Consider optimizing memory storage for increased load", "Monitor system performance as activity grows", "Evaluate current pruning policies", ], }); } // Anomaly insights for (const anomaly of predictions.anomalies.filter( (a) => a.severity > 0.7, )) { insights.push({ type: "anomaly", title: `${ anomaly.type.charAt(0).toUpperCase() + anomaly.type.slice(1) } Anomaly`, description: anomaly.description, confidence: anomaly.severity, timeframe: { start: anomaly.timestamp, end: anomaly.timestamp, duration: 0, label: "Point Anomaly", }, actionable: true, recommendations: this.getAnomalyRecommendations(anomaly), }); } // Prediction insights if (predictions.nextActivity.probability > 0.7) { insights.push({ type: "prediction", title: "High Probability Activity Window", description: `${(predictions.nextActivity.probability * 100).toFixed( 1, )}% chance of ${predictions.nextActivity.expectedCount} activities`, confidence: predictions.nextActivity.confidence, timeframe: predictions.nextActivity.timeRange, actionable: true, recommendations: [ "Prepare system for predicted activity surge", "Consider pre-emptive optimization", "Monitor resource utilization during predicted window", ], }); } // Sort by confidence and actionability insights.sort((a, b) => { if (a.actionable !== b.actionable) { return a.actionable ? -1 : 1; } return b.confidence - a.confidence; }); return insights; } /** * Build time series data from memory entries */ private async buildTimeSeries( query: TemporalQuery, ): Promise<Array<{ timestamp: Date; value: number; metadata?: any }>> { const entries = await this.getFilteredEntries(query); const timeRange = query.timeRange || this.getDefaultTimeRange(); // Create time buckets based on granularity const buckets = this.createTimeBuckets(timeRange, query.granularity); const timeSeries: Array<{ timestamp: Date; value: number; metadata?: any; }> = []; for (const bucket of buckets) { const bucketEntries = entries.filter((entry) => { const entryTime = new Date(entry.timestamp); return entryTime >= bucket.start && entryTime < bucket.end; }); let value = 0; const metadata: any = {}; switch (query.aggregation) { case "count": value = bucketEntries.length; break; case "success_rate": { const successful = bucketEntries.filter( (e) => e.data.outcome === "success" || e.data.success === true, ).length; value = bucketEntries.length > 0 ? successful / bucketEntries.length : 0; break; } case "activity_level": // Custom metric based on entry types and interactions value = this.calculateActivityLevel(bucketEntries); break; case "diversity": { const uniqueTypes = new Set(bucketEntries.map((e) => e.type)); value = uniqueTypes.size; break; } } // Add metadata metadata.entryCount = bucketEntries.length; metadata.types = [...new Set(bucketEntries.map((e) => e.type))]; timeSeries.push({ timestamp: bucket.start, value, metadata, }); } // Apply smoothing if requested if (query.smoothing?.enabled) { return this.applySmoothingToTimeSeries(timeSeries, query.smoothing); } return timeSeries; } /** * Get filtered entries based on query */ private async getFilteredEntries( query: TemporalQuery, ): Promise<MemoryEntry[]> { let entries = await this.storage.getAll(); // Apply time range filter if (query.timeRange) { entries = entries.filter((entry) => { const entryTime = new Date(entry.timestamp); return ( entryTime >= query.timeRange!.start && entryTime <= query.timeRange!.end ); }); } // Apply filters if (query.filters) { if (query.filters.types) { entries = entries.filter((entry) => query.filters!.types!.includes(entry.type), ); } if (query.filters.projects) { entries = entries.filter((entry) => query.filters!.projects!.some( (project) => entry.data.projectPath?.includes(project) || entry.data.projectId === project, ), ); } if (query.filters.outcomes) { entries = entries.filter( (entry) => query.filters!.outcomes!.includes(entry.data.outcome) || (entry.data.success === true && query.filters!.outcomes!.includes("success")) || (entry.data.success === false && query.filters!.outcomes!.includes("failure")), ); } if (query.filters.tags) { entries = entries.filter( (entry) => entry.tags?.some((tag) => query.filters!.tags!.includes(tag)), ); } } return entries; } /** * Create time buckets for analysis */ private createTimeBuckets( timeRange: TimeWindow, granularity: string, ): TimeWindow[] { const buckets: TimeWindow[] = []; let current = new Date(timeRange.start); const end = new Date(timeRange.end); while (current < end) { const bucketStart = new Date(current); let bucketEnd: Date; switch (granularity) { case "hour": bucketEnd = new Date(current.getTime() + 60 * 60 * 1000); break; case "day": bucketEnd = new Date(current.getTime() + 24 * 60 * 60 * 1000); break; case "week": bucketEnd = new Date(current.getTime() + 7 * 24 * 60 * 60 * 1000); break; case "month": bucketEnd = new Date( current.getFullYear(), current.getMonth() + 1, 1, ); break; case "year": bucketEnd = new Date(current.getFullYear() + 1, 0, 1); break; default: bucketEnd = new Date(current.getTime() + 24 * 60 * 60 * 1000); } if (bucketEnd > end) { bucketEnd = new Date(end); } buckets.push({ start: bucketStart, end: bucketEnd, duration: bucketEnd.getTime() - bucketStart.getTime(), label: this.formatTimeLabel(bucketStart, granularity), }); current = bucketEnd; } return buckets; } /** * Detect periodic patterns in time series */ private async detectPeriodicPatterns( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, query: TemporalQuery, ): Promise<TemporalPattern[]> { const patterns: TemporalPattern[] = []; const values = timeSeries.map((p) => p.value); // Check for different periods (daily, weekly, monthly cycles) const periods = [1, 7, 30, 365]; // days for (const period of periods) { const adjustedPeriod = this.adjustPeriodForGranularity( period, query.granularity, ); if (adjustedPeriod >= values.length / 3) continue; // Need at least 3 cycles const correlation = this.calculateAutocorrelation(values, adjustedPeriod); if (correlation > 0.6) { patterns.push({ type: "periodic", confidence: correlation, period: period * 24 * 60 * 60 * 1000, // Convert to milliseconds description: `${period}-${query.granularity} cycle detected with ${( correlation * 100 ).toFixed(1)}% correlation`, dataPoints: timeSeries, }); } } return patterns; } /** * Detect trend patterns */ private async detectTrendPatterns( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, _query: TemporalQuery, ): Promise<TemporalPattern[]> { const patterns: TemporalPattern[] = []; const values = timeSeries.map((p) => p.value); if (values.length < 5) return patterns; // Calculate linear regression const { slope, rSquared } = this.calculateLinearRegression(values); if (rSquared > 0.5) { // Good fit const trend = slope > 0.01 ? "increasing" : slope < -0.01 ? "decreasing" : "stable"; if (trend !== "stable") { patterns.push({ type: "trending", confidence: rSquared, trend, description: `${trend} trend detected with R² = ${rSquared.toFixed( 3, )}`, dataPoints: timeSeries, }); } } return patterns; } /** * Detect seasonal patterns */ private async detectSeasonalPatterns( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, query: TemporalQuery, ): Promise<TemporalPattern[]> { const patterns: TemporalPattern[] = []; // Check for daily patterns (hour of day) if (query.granularity === "hour") { const hourlyPattern = this.analyzeHourlyPattern(timeSeries); if (hourlyPattern.confidence > 0.6) { patterns.push({ type: "seasonal", confidence: hourlyPattern.confidence, seasonality: "daily", description: `Daily pattern: peak activity at ${hourlyPattern.peakHour}:00`, dataPoints: timeSeries, }); } } // Check for weekly patterns (day of week) if (["hour", "day"].includes(query.granularity)) { const weeklyPattern = this.analyzeWeeklyPattern(timeSeries); if (weeklyPattern.confidence > 0.6) { patterns.push({ type: "seasonal", confidence: weeklyPattern.confidence, seasonality: "weekly", description: `Weekly pattern: peak activity on ${weeklyPattern.peakDay}`, dataPoints: timeSeries, }); } } return patterns; } /** * Detect burst patterns (sudden spikes) */ private async detectBurstPatterns( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, _query: TemporalQuery, ): Promise<TemporalPattern[]> { const patterns: TemporalPattern[] = []; const values = timeSeries.map((p) => p.value); const mean = values.reduce((sum, val) => sum + val, 0) / values.length; const stdDev = Math.sqrt( values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length, ); const threshold = mean + 2 * stdDev; // 2 standard deviations above mean const bursts = []; for (let i = 0; i < values.length; i++) { if (values[i] > threshold) { bursts.push(i); } } if (bursts.length > 0 && bursts.length < values.length * 0.1) { // Bursts are rare const confidence = Math.min(0.9, bursts.length / (values.length * 0.05)); patterns.push({ type: "burst", confidence, description: `${bursts.length} burst events detected (${( threshold / mean ).toFixed(1)}x normal activity)`, dataPoints: bursts.map((i) => timeSeries[i]), }); } return patterns; } /** * Detect decay patterns (gradual decline) */ private async detectDecayPatterns( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, _query: TemporalQuery, ): Promise<TemporalPattern[]> { const patterns: TemporalPattern[] = []; const values = timeSeries.map((p) => p.value); if (values.length < 10) return patterns; // Look for exponential decay pattern const logValues = values.map((v) => Math.log(Math.max(v, 0.1))); // Avoid log(0) const { slope, rSquared } = this.calculateLinearRegression(logValues); if (slope < -0.05 && rSquared > 0.7) { // Significant decay with good fit patterns.push({ type: "decay", confidence: rSquared, description: `Exponential decay detected (half-life ≈ ${( -0.693 / slope ).toFixed(1)} periods)`, dataPoints: timeSeries, }); } return patterns; } /** * Calculate activity level for a set of entries */ private calculateActivityLevel(entries: MemoryEntry[]): number { if (entries.length === 0) return 0; let score = 0; // Base score from count score += Math.min(1, entries.length / 10); // Cap at 10 entries = 1.0 // Bonus for diversity const uniqueTypes = new Set(entries.map((e) => e.type)); score += uniqueTypes.size * 0.1; // Bonus for successful outcomes const successful = entries.filter( (e) => e.data.outcome === "success" || e.data.success === true, ).length; score += (successful / entries.length) * 0.3; return Math.min(1, score); } /** * Apply smoothing to time series data */ private applySmoothingToTimeSeries( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, smoothing: { method: string; window: number }, ): Array<{ timestamp: Date; value: number; metadata?: any }> { const values = timeSeries.map((p) => p.value); let smoothedValues: number[]; switch (smoothing.method) { case "moving_average": smoothedValues = this.applyMovingAverage(values, smoothing.window); break; case "exponential": smoothedValues = this.applyExponentialSmoothing(values, 0.3); break; default: smoothedValues = values; } return timeSeries.map((point, i) => ({ ...point, value: smoothedValues[i], })); } /** * Apply moving average smoothing */ private applyMovingAverage(values: number[], window: number): number[] { const smoothed: number[] = []; for (let i = 0; i < values.length; i++) { const start = Math.max(0, i - Math.floor(window / 2)); const end = Math.min(values.length, i + Math.ceil(window / 2)); const windowValues = values.slice(start, end); const average = windowValues.reduce((sum, val) => sum + val, 0) / windowValues.length; smoothed.push(average); } return smoothed; } /** * Apply exponential smoothing */ private applyExponentialSmoothing(values: number[], alpha: number): number[] { const smoothed: number[] = [values[0]]; for (let i = 1; i < values.length; i++) { smoothed.push(alpha * values[i] + (1 - alpha) * smoothed[i - 1]); } return smoothed; } /** * Calculate autocorrelation for periodic pattern detection */ private calculateAutocorrelation(values: number[], lag: number): number { if (lag >= values.length) return 0; const n = values.length - lag; const mean = values.reduce((sum, val) => sum + val, 0) / values.length; let numerator = 0; let denominator = 0; for (let i = 0; i < n; i++) { numerator += (values[i] - mean) * (values[i + lag] - mean); } for (let i = 0; i < values.length; i++) { denominator += Math.pow(values[i] - mean, 2); } return denominator > 0 ? numerator / denominator : 0; } /** * Calculate cyclical strength using autocorrelation */ private calculateCyclicalStrength(values: number[]): number { const maxLag = Math.min(values.length / 3, 30); let maxCorrelation = 0; for (let lag = 1; lag < maxLag; lag++) { const correlation = Math.abs(this.calculateAutocorrelation(values, lag)); maxCorrelation = Math.max(maxCorrelation, correlation); } return maxCorrelation; } /** * Calculate linear regression */ private calculateLinearRegression(values: number[]): { slope: number; intercept: number; rSquared: number; } { const n = values.length; const x = Array.from({ length: n }, (_, i) => i); const sumX = x.reduce((sum, val) => sum + val, 0); const sumY = values.reduce((sum, val) => sum + val, 0); const sumXY = x.reduce((sum, val, i) => sum + val * values[i], 0); const sumXX = x.reduce((sum, val) => sum + val * val, 0); const slope = (n * sumXY - sumX * sumY) / (n * sumXX - sumX * sumX); const intercept = (sumY - slope * sumX) / n; // Calculate R² const meanY = sumY / n; const ssRes = values.reduce((sum, val, i) => { const predicted = slope * i + intercept; return sum + Math.pow(val - predicted, 2); }, 0); const ssTot = values.reduce( (sum, val) => sum + Math.pow(val - meanY, 2), 0, ); const rSquared = ssTot > 0 ? 1 - ssRes / ssTot : 0; return { slope, intercept, rSquared }; } /** * Predict next activity window */ private async predictNextActivity( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, patterns: TemporalPattern[], _metrics: TemporalMetrics, ): Promise<PredictionResult["nextActivity"]> { const lastPoint = timeSeries[timeSeries.length - 1]; const averageValue = timeSeries.reduce((sum, p) => sum + p.value, 0) / timeSeries.length; // Base prediction on recent trend let expectedCount = averageValue; let probability = 0.5; // Adjust based on trends const trendPattern = patterns.find((p) => p.type === "trending"); if (trendPattern && trendPattern.trend === "increasing") { expectedCount *= 1.2; probability += 0.2; } else if (trendPattern && trendPattern.trend === "decreasing") { expectedCount *= 0.8; probability -= 0.1; } // Adjust based on periodic patterns const periodicPattern = patterns.find( (p) => p.type === "periodic" && p.confidence > 0.7, ); if (periodicPattern) { probability += 0.3; } // Determine time range for next activity (next period based on granularity) const nextStart = new Date( lastPoint.timestamp.getTime() + 24 * 60 * 60 * 1000, ); // Next day const nextEnd = new Date(nextStart.getTime() + 24 * 60 * 60 * 1000); return { probability: Math.min(0.95, Math.max(0.05, probability)), timeRange: { start: nextStart, end: nextEnd, duration: 24 * 60 * 60 * 1000, label: "Next 24 hours", }, expectedCount: Math.round(expectedCount), confidence: Math.min( 0.9, patterns.reduce((sum, p) => sum + p.confidence, 0) / patterns.length, ), }; } /** * Detect anomalies in time series */ private async detectAnomalies( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, _patterns: TemporalPattern[], ): Promise<PredictionResult["anomalies"]> { const anomalies: PredictionResult["anomalies"] = []; const values = timeSeries.map((p) => p.value); const mean = values.reduce((sum, val) => sum + val, 0) / values.length; const stdDev = Math.sqrt( values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length, ); for (let i = 0; i < timeSeries.length; i++) { const point = timeSeries[i]; const value = point.value; // Spike detection if (value > mean + 3 * stdDev) { anomalies.push({ timestamp: point.timestamp, type: "spike", severity: Math.min(1, (value - mean) / (3 * stdDev)), description: `Activity spike: ${value} (${( (value / mean - 1) * 100 ).toFixed(0)}% above normal)`, }); } // Drought detection if (value < mean - 2 * stdDev && mean > 1) { anomalies.push({ timestamp: point.timestamp, type: "drought", severity: Math.min(1, (mean - value) / (2 * stdDev)), description: `Activity drought: ${value} (${( (1 - value / mean) * 100 ).toFixed(0)}% below normal)`, }); } } // Detect regime shifts (significant changes in mean) const shifts = this.detectRegimeShifts(timeSeries); anomalies.push(...shifts); return anomalies.sort((a, b) => b.severity - a.severity); } /** * Detect regime shifts in time series */ private detectRegimeShifts( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, ): Array<{ timestamp: Date; type: "shift"; severity: number; description: string; }> { const shifts: Array<{ timestamp: Date; type: "shift"; severity: number; description: string; }> = []; const values = timeSeries.map((p) => p.value); if (values.length < 20) return shifts; // Need sufficient data const windowSize = Math.floor(values.length / 4); for (let i = windowSize; i < values.length - windowSize; i++) { const before = values.slice(i - windowSize, i); const after = values.slice(i, i + windowSize); const meanBefore = before.reduce((sum, val) => sum + val, 0) / before.length; const meanAfter = after.reduce((sum, val) => sum + val, 0) / after.length; const changeMagnitude = Math.abs(meanAfter - meanBefore); const relativeChange = meanBefore > 0 ? changeMagnitude / meanBefore : 0; if (relativeChange > 0.5) { // 50% change shifts.push({ timestamp: timeSeries[i].timestamp, type: "shift", severity: Math.min(1, relativeChange), description: `Regime shift: ${meanBefore.toFixed( 1, )} → ${meanAfter.toFixed(1)} (${(relativeChange * 100).toFixed( 0, )}% change)`, }); } } return shifts; } /** * Generate recommendations based on analysis */ private generateRecommendations( patterns: TemporalPattern[], metrics: TemporalMetrics, anomalies: PredictionResult["anomalies"], ): string[] { const recommendations: string[] = []; // Pattern-based recommendations const periodicPattern = patterns.find( (p) => p.type === "periodic" && p.confidence > 0.7, ); if (periodicPattern) { recommendations.push( "Schedule maintenance and optimizations during low-activity periods based on detected cycles", ); } const trendPattern = patterns.find((p) => p.type === "trending"); if (trendPattern?.trend === "increasing") { recommendations.push( "Plan for increased storage and processing capacity based on growing activity trend", ); } else if (trendPattern?.trend === "decreasing") { recommendations.push( "Investigate causes of declining activity and consider engagement strategies", ); } // Metrics-based recommendations if (metrics.consistency < 0.5) { recommendations.push( "High variability detected - consider implementing activity smoothing mechanisms", ); } if (metrics.growthRate > 50) { recommendations.push( "Rapid growth detected - implement proactive scaling measures", ); } // Anomaly-based recommendations const spikes = anomalies.filter( (a) => a.type === "spike" && a.severity > 0.7, ); if (spikes.length > 0) { recommendations.push( "Implement burst handling to manage activity spikes effectively", ); } const droughts = anomalies.filter( (a) => a.type === "drought" && a.severity > 0.7, ); if (droughts.length > 0) { recommendations.push( "Investigate causes of activity droughts and implement retention strategies", ); } return recommendations; } /** * Utility methods */ private getDefaultTimeRange(): TimeWindow { const end = new Date(); const start = new Date(end.getTime() - 30 * 24 * 60 * 60 * 1000); // 30 days ago return { start, end, duration: end.getTime() - start.getTime(), label: "Last 30 days", }; } private generateCacheKey(type: string, query: TemporalQuery): string { return `${type}_${JSON.stringify(query)}`; } private adjustPeriodForGranularity( period: number, granularity: string, ): number { switch (granularity) { case "hour": return period * 24; case "day": return period; case "week": return Math.ceil(period / 7); case "month": return Math.ceil(period / 30); case "year": return Math.ceil(period / 365); default: return period; } } private formatTimeLabel(date: Date, granularity: string): string { switch (granularity) { case "hour": return date.toISOString().slice(0, 13) + ":00"; case "day": return date.toISOString().slice(0, 10); case "week": return `Week of ${date.toISOString().slice(0, 10)}`; case "month": return `${date.getFullYear()}-${(date.getMonth() + 1) .toString() .padStart(2, "0")}`; case "year": return date.getFullYear().toString(); default: return date.toISOString().slice(0, 10); } } private isShortTerm( pattern: TemporalPattern, _query: TemporalQuery, ): boolean { if (pattern.period) { const days = pattern.period / (24 * 60 * 60 * 1000); return days <= 7; } return true; } private isLongTerm(pattern: TemporalPattern, _query: TemporalQuery): boolean { if (pattern.period) { const days = pattern.period / (24 * 60 * 60 * 1000); return days > 30; } return false; } private getPatternTimeframe(pattern: TemporalPattern): TimeWindow { if (pattern.dataPoints.length > 0) { const start = pattern.dataPoints[0].timestamp; const end = pattern.dataPoints[pattern.dataPoints.length - 1].timestamp; return { start, end, duration: end.getTime() - start.getTime(), label: `${start.toISOString().slice(0, 10)} to ${end .toISOString() .slice(0, 10)}`, }; } return this.getDefaultTimeRange(); } private isActionablePattern(pattern: TemporalPattern): boolean { return ( pattern.confidence > 0.7 && ["periodic", "trending", "seasonal"].includes(pattern.type) ); } private getPatternRecommendations(pattern: TemporalPattern): string[] { const recommendations: string[] = []; switch (pattern.type) { case "periodic": recommendations.push( "Schedule regular maintenance during low-activity periods", ); recommendations.push( "Optimize resource allocation based on predictable cycles", ); break; case "trending": if (pattern.trend === "increasing") { recommendations.push("Plan for capacity expansion"); recommendations.push("Implement proactive monitoring"); } else if (pattern.trend === "decreasing") { recommendations.push("Investigate root causes of decline"); recommendations.push("Consider engagement interventions"); } break; case "seasonal": recommendations.push( "Adjust system configuration for seasonal patterns", ); recommendations.push( "Plan marketing and engagement around peak periods", ); break; } return recommendations; } private getAnomalyRecommendations(anomaly: { type: string; severity: number; }): string[] { const recommendations: string[] = []; switch (anomaly.type) { case "spike": recommendations.push("Implement burst protection mechanisms"); recommendations.push("Investigate spike triggers for prevention"); recommendations.push("Consider auto-scaling capabilities"); break; case "drought": recommendations.push("Implement activity monitoring alerts"); recommendations.push("Investigate user engagement issues"); recommendations.push("Consider proactive outreach strategies"); break; case "shift": recommendations.push("Investigate underlying system changes"); recommendations.push("Update baseline metrics and thresholds"); recommendations.push("Review configuration changes during this period"); break; } return recommendations; } private analyzeHourlyPattern( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, ): { confidence: number; peakHour: number } { const hourlyActivity = new Array(24).fill(0); const hourlyCounts = new Array(24).fill(0); for (const point of timeSeries) { const hour = point.timestamp.getHours(); hourlyActivity[hour] += point.value; hourlyCounts[hour]++; } // Calculate average activity per hour const hourlyAverages = hourlyActivity.map((total, i) => hourlyCounts[i] > 0 ? total / hourlyCounts[i] : 0, ); // Find peak hour const peakHour = hourlyAverages.indexOf(Math.max(...hourlyAverages)); // Calculate confidence based on variance const mean = hourlyAverages.reduce((sum, val) => sum + val, 0) / 24; const variance = hourlyAverages.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / 24; const stdDev = Math.sqrt(variance); const confidence = mean > 0 ? Math.min(0.9, stdDev / mean) : 0; return { confidence, peakHour }; } private analyzeWeeklyPattern( timeSeries: Array<{ timestamp: Date; value: number; metadata?: any }>, ): { confidence: number; peakDay: string } { const weeklyActivity = new Array(7).fill(0); const weeklyCounts = new Array(7).fill(0); const dayNames = [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", ]; for (const point of timeSeries) { const day = point.timestamp.getDay(); weeklyActivity[day] += point.value; weeklyCounts[day]++; } // Calculate average activity per day const weeklyAverages = weeklyActivity.map((total, i) => weeklyCounts[i] > 0 ? total / weeklyCounts[i] : 0, ); // Find peak day const peakDayIndex = weeklyAverages.indexOf(Math.max(...weeklyAverages)); const peakDay = dayNames[peakDayIndex]; // Calculate confidence const mean = weeklyAverages.reduce((sum, val) => sum + val, 0) / 7; const variance = weeklyAverages.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / 7; const stdDev = Math.sqrt(variance); const confidence = mean > 0 ? Math.min(0.9, stdDev / mean) : 0; return { confidence, peakDay }; } /** * Setup periodic analysis */ private setupPeriodicAnalysis(): void { // Run analysis every 6 hours setInterval( async () => { try { const insights = await this.getTemporalInsights(); this.emit("periodic_analysis_completed", { insights: insights.length, }); } catch (error) { this.emit("periodic_analysis_error", { error: error instanceof Error ? error.message : String(error), }); } }, 6 * 60 * 60 * 1000, ); } }
  • Tool schema definition including input validation for query (timeRange, granularity, aggregation, filters) and analysisType.
    { name: "memory_temporal_analysis", description: "Analyze temporal patterns and trends in memory data", inputSchema: { type: "object", properties: { query: { type: "object", properties: { timeRange: { type: "object", properties: { start: { type: "string", format: "date-time" }, end: { type: "string", format: "date-time" }, }, }, granularity: { type: "string", enum: ["hour", "day", "week", "month", "year"], default: "day", }, aggregation: { type: "string", enum: ["count", "success_rate", "activity_level", "diversity"], default: "count", }, filters: { type: "object", properties: { types: { type: "array", items: { type: "string" } }, projects: { type: "array", items: { type: "string" } }, outcomes: { type: "array", items: { type: "string" } }, tags: { type: "array", items: { type: "string" } }, }, }, }, }, analysisType: { type: "string", enum: ["patterns", "metrics", "predictions", "insights"], default: "patterns", }, }, }, },
  • Tool registration in memoryTools array exported for MCP integration.
    // Memory Tools for MCP export const memoryTools = [ { name: "memory_recall", description: "Recall memories about a project or topic", inputSchema: { type: "object", properties: { query: { type: "string", description: "Search query or project ID", }, type: { type: "string", enum: [ "analysis", "recommendation", "deployment", "configuration", "interaction", "all", ], description: "Type of memory to recall", }, limit: { type: "number", description: "Maximum number of memories to return", default: 10, }, }, required: ["query"], }, }, { name: "memory_intelligent_analysis", description: "Get intelligent analysis with patterns, predictions, and recommendations", inputSchema: { type: "object", properties: { projectPath: { type: "string", description: "Path to the project for analysis", }, baseAnalysis: { type: "object", description: "Base analysis data to enhance", }, }, required: ["projectPath", "baseAnalysis"], }, }, { name: "memory_enhanced_recommendation", description: "Get enhanced recommendations using learning and knowledge graph", inputSchema: { type: "object", properties: { projectPath: { type: "string", description: "Path to the project", }, baseRecommendation: { type: "object", description: "Base recommendation to enhance", }, projectFeatures: { type: "object", properties: { language: { type: "string" }, framework: { type: "string" }, size: { type: "string", enum: ["small", "medium", "large"] }, complexity: { type: "string", enum: ["simple", "moderate", "complex"], }, hasTests: { type: "boolean" }, hasCI: { type: "boolean" }, hasDocs: { type: "boolean" }, isOpenSource: { type: "boolean" }, }, required: ["language"], }, }, required: ["projectPath", "baseRecommendation", "projectFeatures"], }, }, { name: "memory_learning_stats", description: "Get comprehensive learning and knowledge graph statistics", inputSchema: { type: "object", properties: { includeDetails: { type: "boolean", description: "Include detailed statistics", default: true, }, }, }, }, { name: "memory_knowledge_graph", description: "Query the knowledge graph for relationships and paths", inputSchema: { type: "object", properties: { query: { type: "object", properties: { nodeTypes: { type: "array", items: { type: "string" }, description: "Filter by node types", }, edgeTypes: { type: "array", items: { type: "string" }, description: "Filter by edge types", }, startNode: { type: "string", description: "Starting node for path queries", }, maxDepth: { type: "number", description: "Maximum path depth", default: 3, }, }, }, }, required: ["query"], }, }, { name: "memory_contextual_search", description: "Perform contextual memory retrieval with intelligent ranking", inputSchema: { type: "object", properties: { query: { type: "string", description: "Search query", }, context: { type: "object", properties: { currentProject: { type: "object", properties: { path: { type: "string" }, language: { type: "string" }, framework: { type: "string" }, size: { type: "string", enum: ["small", "medium", "large"] }, }, }, userIntent: { type: "object", properties: { action: { type: "string", enum: [ "analyze", "recommend", "deploy", "troubleshoot", "learn", ], }, urgency: { type: "string", enum: ["low", "medium", "high"] }, experience: { type: "string", enum: ["novice", "intermediate", "expert"], }, }, }, temporalContext: { type: "object", properties: { recency: { type: "string", enum: ["recent", "all", "historical"], }, timeRange: { type: "object", properties: { start: { type: "string" }, end: { type: "string" }, }, }, }, }, }, }, options: { type: "object", properties: { maxResults: { type: "number", default: 10 }, minRelevance: { type: "number", default: 0.3 }, includeReasoning: { type: "boolean", default: true }, }, }, }, required: ["query", "context"], }, }, { name: "memory_agent_network", description: "Manage multi-agent memory sharing and collaboration", inputSchema: { type: "object", properties: { action: { type: "string", enum: [ "register_agent", "share_memory", "sync_request", "get_insights", "network_status", ], description: "Action to perform", }, agentInfo: { type: "object", properties: { name: { type: "string" }, capabilities: { type: "array", items: { type: "string" } }, specializations: { type: "array", items: { type: "string" } }, trustLevel: { type: "string", enum: ["low", "medium", "high", "trusted"], }, }, }, memoryId: { type: "string", description: "Memory ID for sharing operations", }, targetAgent: { type: "string", description: "Target agent for sync operations", }, options: { type: "object", properties: { anonymize: { type: "boolean", default: false }, requireValidation: { type: "boolean", default: false }, }, }, }, required: ["action"], }, }, { name: "memory_insights", description: "Get insights and patterns from memory", inputSchema: { type: "object", properties: { projectId: { type: "string", description: "Project ID to analyze", }, timeRange: { type: "object", properties: { start: { type: "string", format: "date-time" }, end: { type: "string", format: "date-time" }, }, description: "Time range for analysis", }, }, }, }, { name: "memory_similar", description: "Find similar projects from memory", inputSchema: { type: "object", properties: { analysisId: { type: "string", description: "Analysis ID to find similar projects for", }, limit: { type: "number", description: "Maximum number of similar projects", default: 5, }, }, required: ["analysisId"], }, }, { name: "memory_export", description: "Export memories to JSON or CSV", inputSchema: { type: "object", properties: { format: { type: "string", enum: ["json", "csv"], description: "Export format", default: "json", }, filter: { type: "object", properties: { type: { type: "string" }, projectId: { type: "string" }, startDate: { type: "string", format: "date-time" }, endDate: { type: "string", format: "date-time" }, }, description: "Filter memories to export", }, }, }, }, { name: "memory_cleanup", description: "Clean up old memories", inputSchema: { type: "object", properties: { daysToKeep: { type: "number", description: "Number of days of memories to keep", default: 30, }, dryRun: { type: "boolean", description: "Preview what would be deleted without actually deleting", default: false, }, }, }, }, { name: "memory_pruning", description: "Intelligent memory pruning and optimization", inputSchema: { type: "object", properties: { policy: { type: "object", properties: { maxAge: { type: "number", description: "Maximum age in days", default: 180, }, maxSize: { type: "number", description: "Maximum storage size in MB", default: 500, }, maxEntries: { type: "number", description: "Maximum number of entries", default: 50000, }, preservePatterns: { type: "array", items: { type: "string" }, description: "Pattern types to preserve", }, compressionThreshold: { type: "number", description: "Compress entries older than X days", default: 30, }, redundancyThreshold: { type: "number", description: "Remove similar entries with similarity > X", default: 0.85, }, }, }, dryRun: { type: "boolean", description: "Preview pruning without executing", default: false, }, }, }, }, { name: "memory_temporal_analysis", description: "Analyze temporal patterns and trends in memory data", inputSchema: { type: "object", properties: { query: { type: "object", properties: { timeRange: { type: "object", properties: { start: { type: "string", format: "date-time" }, end: { type: "string", format: "date-time" }, }, }, granularity: { type: "string", enum: ["hour", "day", "week", "month", "year"], default: "day", }, aggregation: { type: "string", enum: ["count", "success_rate", "activity_level", "diversity"], default: "count", }, filters: { type: "object", properties: { types: { type: "array", items: { type: "string" } }, projects: { type: "array", items: { type: "string" } }, outcomes: { type: "array", items: { type: "string" } }, tags: { type: "array", items: { type: "string" } }, }, }, }, }, analysisType: { type: "string", enum: ["patterns", "metrics", "predictions", "insights"], default: "patterns", }, }, }, }, { name: "memory_visualization", description: "Generate visual representations of memory data", inputSchema: { type: "object", properties: { visualizationType: { type: "string", enum: [ "dashboard", "timeline", "network", "heatmap", "distribution", "trends", "custom", ], default: "dashboard", }, options: { type: "object", properties: { timeRange: { type: "object", properties: { start: { type: "string", format: "date-time" }, end: { type: "string", format: "date-time" }, }, }, includeCharts: { type: "array", items: { type: "string" } }, config: { type: "object", properties: { width: { type: "number", default: 800 }, height: { type: "number", default: 600 }, theme: { type: "string", enum: ["light", "dark", "auto"], default: "light", }, exportFormat: { type: "string", enum: ["svg", "png", "json", "html"], default: "svg", }, interactive: { type: "boolean", default: true }, }, }, }, }, customVisualization: { type: "object", properties: { type: { type: "string", enum: [ "line", "bar", "scatter", "heatmap", "network", "sankey", "treemap", "timeline", ], }, query: { type: "object", properties: { filters: { type: "object" }, groupBy: { type: "string" }, aggregation: { type: "string" }, }, }, }, }, }, }, }, { name: "memory_export_advanced", description: "Advanced memory export with multiple formats and options", inputSchema: { type: "object", properties: { outputPath: { type: "string", description: "Output file path" }, options: { type: "object", properties: { format: { type: "string", enum: [ "json", "jsonl", "csv", "xml", "yaml", "sqlite", "archive", ], default: "json", }, compression: { type: "string", enum: ["gzip", "zip", "none"], default: "none", }, includeMetadata: { type: "boolean", default: true }, includeLearning: { type: "boolean", default: true }, includeKnowledgeGraph: { type: "boolean", default: true }, filters: { type: "object", properties: { types: { type: "array", items: { type: "string" } }, dateRange: { type: "object", properties: { start: { type: "string", format: "date-time" }, end: { type: "string", format: "date-time" }, }, }, projects: { type: "array", items: { type: "string" } }, tags: { type: "array", items: { type: "string" } }, outcomes: { type: "array", items: { type: "string" } }, }, }, anonymize: { type: "object", properties: { enabled: { type: "boolean", default: false }, fields: { type: "array", items: { type: "string" } }, method: { type: "string", enum: ["hash", "remove", "pseudonymize"], default: "hash", }, }, }, encryption: { type: "object", properties: { enabled: { type: "boolean", default: false }, algorithm: { type: "string", enum: ["aes-256-gcm", "aes-192-gcm", "aes-128-gcm"], default: "aes-256-gcm", }, password: { type: "string" }, }, }, }, }, }, required: ["outputPath"], }, }, { name: "memory_import_advanced", description: "Advanced memory import with validation and conflict resolution", inputSchema: { type: "object", properties: { inputPath: { type: "string", description: "Input file path" }, options: { type: "object", properties: { format: { type: "string", enum: [ "json", "jsonl", "csv", "xml", "yaml", "sqlite", "archive", ], default: "json", }, mode: { type: "string", enum: ["merge", "replace", "append", "update"], default: "merge", }, validation: { type: "string", enum: ["strict", "loose", "none"], default: "strict", }, conflictResolution: { type: "string", enum: ["skip", "overwrite", "merge", "rename"], default: "skip", }, backup: { type: "boolean", default: true }, dryRun: { type: "boolean", default: false }, mapping: { type: "object", description: "Field mapping for different schemas", }, transformation: { type: "object", properties: { enabled: { type: "boolean", default: false }, rules: { type: "array", items: { type: "object", properties: { field: { type: "string" }, operation: { type: "string", enum: ["convert", "transform", "validate"], }, params: { type: "object" }, }, }, }, }, }, }, }, }, required: ["inputPath"], }, }, { name: "memory_migration", description: "Create and execute migration plans between different memory systems", inputSchema: { type: "object", properties: { action: { type: "string", enum: ["create_plan", "execute_migration", "validate_compatibility"], default: "create_plan", }, sourcePath: { type: "string", description: "Source data path" }, migrationPlan: { type: "object", properties: { sourceSystem: { type: "string" }, targetSystem: { type: "string", default: "DocuMCP" }, mapping: { type: "object" }, transformations: { type: "array" }, validation: { type: "array" }, postProcessing: { type: "array", items: { type: "string" } }, }, }, sourceSchema: { type: "object", description: "Source system schema" }, targetSchema: { type: "object", description: "Target system schema" }, options: { type: "object", properties: { autoMap: { type: "boolean", default: true }, preserveStructure: { type: "boolean", default: true }, customMappings: { type: "object" }, }, }, }, }, }, { name: "memory_optimization_metrics", description: "Get comprehensive optimization metrics and recommendations", inputSchema: { type: "object", properties: { includeRecommendations: { type: "boolean", default: true }, timeRange: { type: "object", properties: { start: { type: "string", format: "date-time" }, end: { type: "string", format: "date-time" }, }, }, }, }, }, ];
  • Exports the TemporalMemoryAnalysis class and types for use in handlers and other modules.
    TemporalMemoryAnalysis, type TemporalPattern, type TemporalMetrics, type PredictionResult, type TemporalInsight, } from "./temporal-analysis.js";

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/tosin2013/documcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server