/**
* Cache statistics tracking cache performance and resource usage.
* Used by ICacheLayer implementations to report operational metrics.
*/
export interface CacheStats {
/**
* Number of successful cache lookups (key found).
*/
hits: number;
/**
* Number of failed cache lookups (key not found).
*/
misses: number;
/**
* Current number of entries stored in the cache.
*/
size: number;
/**
* Maximum number of entries the cache can hold before eviction occurs.
*/
maxSize: number;
/**
* Total number of entries that have been evicted due to capacity limits.
*/
evictions: number;
}
/**
* Generic cache layer interface providing key-value storage with eviction policies.
* Implementations should handle storage, retrieval, and automatic eviction when capacity is reached.
*
* @template K - Type of cache keys (must be comparable for lookups)
* @template V - Type of cached values
*
* @example
* ```typescript
* // Create a cache for host configurations
* const cache: ICacheLayer<string, HostConfig> = new LRUCacheLayer(100);
*
* // Store a value
* cache.set('prod-host', hostConfig);
*
* // Retrieve a value
* const config = cache.get('prod-host');
*
* // Check cache performance
* const stats = cache.getStats();
* console.log(`Hit rate: ${stats.hits / (stats.hits + stats.misses)}`);
* ```
*/
export interface ICacheLayer<K, V> {
/**
* Retrieve a value from the cache by its key.
* Updates access tracking for eviction policy (e.g., LRU tracking).
*
* @param key - The cache key to lookup
* @returns The cached value if found, undefined if not in cache
*/
get(key: K): V | undefined;
/**
* Store a value in the cache with the specified key.
* If the cache is at capacity, evicts entries according to the eviction policy.
* If the key already exists, updates the value and resets access tracking.
*
* @param key - The cache key to store under
* @param value - The value to cache
*/
set(key: K, value: V): void;
/**
* Remove a specific entry from the cache by its key.
* Does nothing if the key is not in the cache.
*
* @param key - The cache key to remove
*/
delete(key: K): void;
/**
* Remove all entries from the cache and reset statistics.
* Resets the cache to its initial empty state.
*/
clear(): void;
/**
* Get current cache statistics for monitoring and debugging.
* Statistics should reflect the current state of the cache accurately.
*
* @returns Cache statistics including hits, misses, size, capacity, and evictions
*/
getStats(): CacheStats;
}
/**
* Internal cache entry structure for LRU cache.
* Stores both the cached value and timestamp for TTL tracking.
*
* @template V - Type of cached value
*/
interface CacheEntry<V> {
value: V;
timestamp: number;
}
/**
* Least Recently Used (LRU) cache implementation with TTL support.
* Evicts the least recently accessed item when cache reaches max capacity.
* Uses Map for O(1) lookups and leverages insertion order for LRU tracking.
*
* @template K - Type of cache keys (must be comparable for Map lookups)
* @template V - Type of cached values
*
* @example
* ```typescript
* // Create cache with 100 max entries and 1 hour TTL
* const cache = new LRUCacheLayer<string, UserData>(100, 3600000);
*
* // Store and retrieve
* cache.set('user-123', userData);
* const data = cache.get('user-123');
*
* // Monitor performance
* const stats = cache.getStats();
* console.log(`Hit rate: ${stats.hits / (stats.hits + stats.misses)}`);
* ```
*/
export class LRUCacheLayer<K, V> implements ICacheLayer<K, V> {
private readonly cache: Map<K, CacheEntry<V>>;
private readonly maxSize: number;
private readonly ttlMs: number;
private stats: {
hits: number;
misses: number;
evictions: number;
};
/**
* Create a new LRU cache layer.
*
* @param maxSize - Maximum number of entries (default: 50)
* @param ttlMs - Time-to-live in milliseconds (default: 24 hours)
*/
constructor(maxSize = 50, ttlMs: number = 24 * 60 * 60 * 1000) {
this.cache = new Map();
this.maxSize = maxSize;
this.ttlMs = ttlMs;
this.stats = {
hits: 0,
misses: 0,
evictions: 0,
};
}
/**
* Retrieve a value from the cache by its key.
* Moves the accessed entry to the end of the Map (marks as most recently used).
* Returns undefined if key doesn't exist or entry has expired.
*
* @param key - The cache key to lookup
* @returns The cached value if found and not expired, undefined otherwise
*/
get(key: K): V | undefined {
const entry = this.cache.get(key);
if (!entry) {
this.stats.misses++;
return undefined;
}
// Check if entry has expired
const now = Date.now();
if (now - entry.timestamp > this.ttlMs) {
// Entry expired - remove it and count as miss
this.cache.delete(key);
this.stats.misses++;
return undefined;
}
// Entry is valid - move to end (most recently used)
// Note: delete-and-reinsert is O(1) but performs 2 Map operations per hit.
// For higher throughput (>10k ops/sec), consider doubly-linked-list + Map
// approach (e.g., lru-cache npm package) for O(1) single-operation reordering.
// Current implementation is sufficient for typical workloads (<1k ops/sec).
this.cache.delete(key);
this.cache.set(key, entry);
this.stats.hits++;
return entry.value;
}
/**
* Store a value in the cache with the specified key.
* If key exists, updates value and moves to end (most recently used).
* If cache is full, evicts the least recently used entry first.
*
* @param key - The cache key to store under
* @param value - The value to cache
*/
set(key: K, value: V): void {
const now = Date.now();
// If key already exists, delete it first (we'll re-add at end)
if (this.cache.has(key)) {
this.cache.delete(key);
} else if (this.cache.size >= this.maxSize) {
// Cache is full and this is a new key - evict LRU (first entry)
const firstKey = this.cache.keys().next().value;
if (firstKey !== undefined) {
this.cache.delete(firstKey);
this.stats.evictions++;
}
}
// Add entry at end (most recently used position)
this.cache.set(key, { value, timestamp: now });
}
/**
* Remove a specific entry from the cache by its key.
* Does nothing if the key is not in the cache.
*
* @param key - The cache key to remove
*/
delete(key: K): void {
this.cache.delete(key);
}
/**
* Remove all entries from the cache and reset statistics.
* Resets the cache to its initial empty state.
*/
clear(): void {
this.cache.clear();
this.stats = {
hits: 0,
misses: 0,
evictions: 0,
};
}
/**
* Get current cache statistics for monitoring and debugging.
* Statistics reflect the current state including all tracked metrics.
*
* @returns Cache statistics including hits, misses, size, capacity, and evictions
*/
getStats(): CacheStats {
return {
hits: this.stats.hits,
misses: this.stats.misses,
size: this.cache.size,
maxSize: this.maxSize,
evictions: this.stats.evictions,
};
}
}