// Copyright 2025 Chris Bunting
// Brief: Caching and performance optimization layer for MCP Code Analysis & Quality Server
// Scope: Multi-backend caching system with performance optimization
import { CacheInterface, LoggerInterface } from '@mcp-code-analysis/shared-types';
import * as fs from 'fs';
import * as path from 'path';
import * as crypto from 'crypto';
export interface CacheEntry<T = unknown> {
value: T;
timestamp: number;
ttl: number;
hits: number;
}
export interface CacheStats {
hits: number;
misses: number;
size: number;
hitRate: number;
}
export interface CacheOptions {
ttl?: number;
maxSize?: number;
cleanupInterval?: number;
}
export class MemoryCache implements CacheInterface {
private cache: Map<string, CacheEntry> = new Map();
private stats: CacheStats = { hits: 0, misses: 0, size: 0, hitRate: 0 };
private options: Required<CacheOptions>;
private cleanupTimer?: ReturnType<typeof setInterval>;
private logger: LoggerInterface;
constructor(options: CacheOptions = {}, logger: LoggerInterface) {
this.logger = logger;
this.options = {
ttl: options.ttl ?? 3600, // 1 hour default
maxSize: options.maxSize ?? 1000,
cleanupInterval: options.cleanupInterval ?? 300000 // 5 minutes
};
this.startCleanupTimer();
}
async get<T>(key: string): Promise<T | undefined> {
const entry = this.cache.get(key);
if (!entry) {
this.stats.misses++;
this.updateHitRate();
return undefined;
}
// Check if entry has expired
if (this.isExpired(entry)) {
this.cache.delete(key);
this.stats.size--;
this.stats.misses++;
this.updateHitRate();
return undefined;
}
// Update hit count and timestamp
entry.hits++;
entry.timestamp = Date.now();
this.stats.hits++;
this.updateHitRate();
this.logger.debug(`Cache hit for key: ${key}`);
return entry.value as T;
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
// Check if we need to evict entries
if (this.cache.size >= this.options.maxSize && !this.cache.has(key)) {
this.evictEntries();
}
const entry: CacheEntry<T> = {
value,
timestamp: Date.now(),
ttl: ttl ?? this.options.ttl,
hits: 0
};
this.cache.set(key, entry);
if (!this.cache.has(key)) {
this.stats.size++;
}
this.logger.debug(`Cache set for key: ${key}, TTL: ${entry.ttl}`);
}
async delete(key: string): Promise<void> {
if (this.cache.delete(key)) {
this.stats.size--;
this.logger.debug(`Cache delete for key: ${key}`);
}
}
async clear(): Promise<void> {
this.cache.clear();
this.stats = { hits: 0, misses: 0, size: 0, hitRate: 0 };
this.logger.info('Cache cleared');
}
async exists(key: string): Promise<boolean> {
const entry = this.cache.get(key);
if (!entry) return false;
if (this.isExpired(entry)) {
this.cache.delete(key);
this.stats.size--;
return false;
}
return true;
}
getStats(): CacheStats {
return { ...this.stats };
}
private isExpired(entry: CacheEntry): boolean {
return Date.now() - entry.timestamp > entry.ttl * 1000;
}
private evictEntries(): void {
// Evict least recently used entries (LRU)
const entries = Array.from(this.cache.entries())
.sort(([, a], [, b]) => a.timestamp - b.timestamp);
const evictCount = Math.ceil(this.options.maxSize * 0.2); // Evict 20%
for (let i = 0; i < evictCount && i < entries.length; i++) {
const [key] = entries[i];
this.cache.delete(key);
this.stats.size--;
}
this.logger.debug(`Evicted ${evictCount} entries from cache`);
}
private updateHitRate(): void {
const total = this.stats.hits + this.stats.misses;
this.stats.hitRate = total > 0 ? this.stats.hits / total : 0;
}
private startCleanupTimer(): void {
this.cleanupTimer = setInterval(() => {
this.cleanupExpiredEntries();
}, this.options.cleanupInterval);
}
private cleanupExpiredEntries(): void {
let cleanedCount = 0;
for (const [key, entry] of this.cache.entries()) {
if (this.isExpired(entry)) {
this.cache.delete(key);
this.stats.size--;
cleanedCount++;
}
}
if (cleanedCount > 0) {
this.logger.debug(`Cleaned up ${cleanedCount} expired entries`);
}
}
dispose(): void {
if (this.cleanupTimer) {
clearInterval(this.cleanupTimer);
}
this.clear();
}
}
export class FileCache implements CacheInterface {
private cacheDir: string;
private logger: LoggerInterface;
constructor(cacheDir: string, logger: LoggerInterface) {
this.cacheDir = cacheDir;
this.logger = logger;
this.ensureCacheDir();
}
private ensureCacheDir(): void {
if (!fs.existsSync(this.cacheDir)) {
fs.mkdirSync(this.cacheDir, { recursive: true });
}
}
async get<T>(key: string): Promise<T | undefined> {
const filePath = this.getFilePath(key);
try {
if (!fs.existsSync(filePath)) {
return undefined;
}
const data = fs.readFileSync(filePath, 'utf8');
const entry: CacheEntry<T> = JSON.parse(data);
// Check if entry has expired
if (this.isExpired(entry)) {
fs.unlinkSync(filePath);
return undefined;
}
this.logger.debug(`File cache hit for key: ${key}`);
return entry.value;
} catch (error) {
this.logger.error(`Error reading from file cache for key ${key}:`, error);
return undefined;
}
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
const filePath = this.getFilePath(key);
const entry: CacheEntry<T> = {
value,
timestamp: Date.now(),
ttl: ttl ?? 3600,
hits: 0
};
try {
fs.writeFileSync(filePath, JSON.stringify(entry, null, 2));
this.logger.debug(`File cache set for key: ${key}`);
} catch (error) {
this.logger.error(`Error writing to file cache for key ${key}:`, error);
}
}
async delete(key: string): Promise<void> {
const filePath = this.getFilePath(key);
try {
if (fs.existsSync(filePath)) {
fs.unlinkSync(filePath);
this.logger.debug(`File cache delete for key: ${key}`);
}
} catch (error) {
this.logger.error(`Error deleting from file cache for key ${key}:`, error);
}
}
async clear(): Promise<void> {
try {
if (fs.existsSync(this.cacheDir)) {
const files = fs.readdirSync(this.cacheDir);
for (const file of files) {
fs.unlinkSync(path.join(this.cacheDir, file));
}
this.logger.info('File cache cleared');
}
} catch (error) {
this.logger.error('Error clearing file cache:', error);
}
}
async exists(key: string): Promise<boolean> {
const filePath = this.getFilePath(key);
try {
if (!fs.existsSync(filePath)) {
return false;
}
const data = fs.readFileSync(filePath, 'utf8');
const entry: CacheEntry = JSON.parse(data);
if (this.isExpired(entry)) {
fs.unlinkSync(filePath);
return false;
}
return true;
} catch (error) {
return false;
}
}
private getFilePath(key: string): string {
// Hash the key to create a safe filename
const hash = crypto.createHash('md5').update(key).digest('hex');
return path.join(this.cacheDir, `${hash}.cache`);
}
private isExpired(entry: CacheEntry): boolean {
return Date.now() - entry.timestamp > entry.ttl * 1000;
}
}
export class CacheManager implements CacheInterface {
private caches: Map<string, CacheInterface> = new Map();
private defaultCache: CacheInterface;
private logger: LoggerInterface;
constructor(defaultCache: CacheInterface, logger: LoggerInterface) {
this.defaultCache = defaultCache;
this.logger = logger;
}
registerCache(name: string, cache: CacheInterface): void {
this.caches.set(name, cache);
this.logger.info(`Registered cache: ${name}`);
}
getCache(name?: string): CacheInterface {
if (name && this.caches.has(name)) {
return this.caches.get(name)!;
}
return this.defaultCache;
}
async get<T>(key: string, cacheName?: string): Promise<T | undefined> {
const cache = this.getCache(cacheName);
return cache.get<T>(key);
}
async set<T>(key: string, value: T, ttl?: number, cacheName?: string): Promise<void> {
const cache = this.getCache(cacheName);
return cache.set(key, value, ttl);
}
async delete(key: string, cacheName?: string): Promise<void> {
const cache = this.getCache(cacheName);
return cache.delete(key);
}
async clear(cacheName?: string): Promise<void> {
if (cacheName) {
const cache = this.getCache(cacheName);
return cache.clear();
} else {
// Clear all caches
await this.defaultCache.clear();
for (const cache of this.caches.values()) {
await cache.clear();
}
}
}
async exists(key: string, cacheName?: string): Promise<boolean> {
const cache = this.getCache(cacheName);
return cache.exists(key);
}
async dispose(): Promise<void> {
await this.defaultCache.clear();
for (const cache of this.caches.values()) {
if (cache instanceof MemoryCache) {
(cache as MemoryCache).dispose();
} else {
await cache.clear();
}
}
this.caches.clear();
}
}
// Performance optimization utilities
export class PerformanceOptimizer {
private cache: CacheInterface;
private logger: LoggerInterface;
constructor(cache: CacheInterface, logger: LoggerInterface) {
this.cache = cache;
this.logger = logger;
}
async memoize<T>(
key: string,
fn: () => Promise<T>,
ttl?: number
): Promise<T> {
// Try to get from cache first
const cached = await this.cache.get<T>(key);
if (cached !== undefined) {
return cached;
}
// Execute function and cache result
const result = await fn();
await this.cache.set(key, result, ttl);
this.logger.debug(`Memoized result for key: ${key}`);
return result;
}
async batchProcess<T, R>(
items: T[],
processor: (item: T) => Promise<R>,
batchSize: number = 10
): Promise<R[]> {
const results: R[] = [];
for (let i = 0; i < items.length; i += batchSize) {
const batch = items.slice(i, i + batchSize);
const batchResults = await Promise.all(
batch.map(item => processor(item))
);
results.push(...batchResults);
this.logger.debug(`Processed batch ${Math.floor(i / batchSize) + 1}/${Math.ceil(items.length / batchSize)}`);
}
return results;
}
debounce<T extends (...args: unknown[]) => unknown>(
func: T,
wait: number
): (...args: Parameters<T>) => void {
let timeout: ReturnType<typeof setTimeout>;
return (...args: Parameters<T>) => {
clearTimeout(timeout);
timeout = setTimeout(() => func(...args), wait);
};
}
throttle<T extends (...args: unknown[]) => unknown>(
func: T,
limit: number
): (...args: Parameters<T>) => void {
let inThrottle: boolean;
return (...args: Parameters<T>) => {
if (!inThrottle) {
func(...args);
inThrottle = true;
setTimeout(() => inThrottle = false, limit);
}
};
}
}
// Factory functions
export function createMemoryCache(
options: CacheOptions = {},
logger: LoggerInterface
): MemoryCache {
return new MemoryCache(options, logger);
}
export function createFileCache(
cacheDir: string,
logger: LoggerInterface
): FileCache {
return new FileCache(cacheDir, logger);
}
export function createCacheManager(
defaultCache: CacheInterface,
logger: LoggerInterface
): CacheManager {
return new CacheManager(defaultCache, logger);
}
export function createPerformanceOptimizer(
cache: CacheInterface,
logger: LoggerInterface
): PerformanceOptimizer {
return new PerformanceOptimizer(cache, logger);
}