Skip to main content
Glama

Prompt Auto-Optimizer MCP

by sloth-wq
cache-manager.test.ts11.6 kB
/** * Comprehensive tests for CacheManager */ import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { CacheManager, type CacheConfig } from './cache-manager'; import { promises as fs } from 'fs'; import { join } from 'path'; import { tmpdir } from 'os'; describe('CacheManager', () => { let cache: CacheManager; let tempDir: string; beforeEach(async () => { tempDir = join(tmpdir(), `cache-test-${Date.now()}-${Math.random().toString(36).substring(7)}`); const config: Partial<CacheConfig> = { l1MaxSize: 1024 * 1024, // 1MB l1MaxEntries: 100, l1DefaultTtl: 1000, // 1 second l2Enabled: true, l2Directory: tempDir, l2MaxSize: 10 * 1024 * 1024, // 10MB l2MaxEntries: 1000, l2DefaultTtl: 5000, // 5 seconds l2CompressionEnabled: true, enableStatistics: true, autoCleanupInterval: 100 // 100ms for testing }; cache = new CacheManager(config); await new Promise(resolve => cache.once('initialized', resolve)); }); afterEach(async () => { await cache.shutdown(); try { await fs.rm(tempDir, { recursive: true, force: true }); } catch { // Ignore cleanup errors } }); describe('Basic Cache Operations', () => { it('should store and retrieve values', async () => { const key = 'test-key'; const value = { data: 'test-value', number: 42 }; const setResult = await cache.set(key, value); expect(setResult).toBe(true); const retrievedValue = await cache.get(key); expect(retrievedValue).toEqual(value); }); it('should return null for non-existent keys', async () => { const value = await cache.get('non-existent-key'); expect(value).toBeNull(); }); it('should handle complex objects', async () => { const key = 'complex-object'; const value = { string: 'test', number: 123, boolean: true, array: [1, 2, 3], nested: { prop: 'value' } }; await cache.set(key, value); const retrieved = await cache.get(key); expect(retrieved).toEqual(value); }); it('should delete values', async () => { const key = 'to-delete'; const value = 'test-value'; await cache.set(key, value); const deleteResult = await cache.delete(key); expect(deleteResult).toBe(true); const retrieved = await cache.get(key); expect(retrieved).toBeNull(); }); it('should clear all cache levels', async () => { await cache.set('key1', 'value1'); await cache.set('key2', 'value2'); await cache.clear(); expect(await cache.get('key1')).toBeNull(); expect(await cache.get('key2')).toBeNull(); }); }); describe('TTL and Expiration', () => { it('should respect TTL for L1 cache', async () => { const key = 'ttl-test'; const value = 'expires-soon'; await cache.set(key, value, { ttl: 100 }); // 100ms TTL // Should be available immediately expect(await cache.get(key)).toBe(value); // Wait for expiration await new Promise(resolve => setTimeout(resolve, 150)); // Should be expired expect(await cache.get(key)).toBeNull(); }); it('should handle custom TTL values', async () => { const key = 'custom-ttl'; const value = 'test-value'; await cache.set(key, value, { ttl: 500 }); // Should be available within TTL expect(await cache.get(key)).toBe(value); await new Promise(resolve => setTimeout(resolve, 600)); // Should be expired after TTL expect(await cache.get(key)).toBeNull(); }); }); describe('Multi-Level Caching', () => { it('should promote L2 hits to L1', async () => { const key = 'promotion-test'; const value = 'test-value'; // Set with L2 storage await cache.set(key, value, { storeInL2: true, promoteToL1: false }); // First get should come from L2 const firstGet = await cache.get(key); expect(firstGet).toBe(value); // Second get should come from L1 (promoted) const secondGet = await cache.get(key); expect(secondGet).toBe(value); const stats = cache.getStatistics(); expect(stats.l1.hits).toBeGreaterThan(0); expect(stats.l2.hits).toBeGreaterThan(0); }); it('should handle L2 persistence', async () => { const key = 'persistence-test'; const value = { large: 'data'.repeat(1000) }; await cache.set(key, value, { storeInL2: true }); // Create new cache instance with same directory const newCache = new CacheManager({ l2Enabled: true, l2Directory: tempDir, enableStatistics: true }); await new Promise(resolve => newCache.once('initialized', resolve)); const retrieved = await newCache.get(key); expect(retrieved).toEqual(value); await newCache.shutdown(); }); }); describe('Cache Eviction', () => { it('should evict LRU entries when L1 is full', async () => { // Create cache with very small L1 size const smallCache = new CacheManager({ l1MaxSize: 1024, // 1KB l1MaxEntries: 3, l2Enabled: false, enableStatistics: true }); await new Promise(resolve => smallCache.once('initialized', resolve)); // Fill cache beyond capacity await smallCache.set('key1', 'value1'.repeat(100)); await smallCache.set('key2', 'value2'.repeat(100)); await smallCache.set('key3', 'value3'.repeat(100)); await smallCache.set('key4', 'value4'.repeat(100)); // Should evict key1 expect(await smallCache.get('key1')).toBeNull(); expect(await smallCache.get('key2')).not.toBeNull(); expect(await smallCache.get('key3')).not.toBeNull(); expect(await smallCache.get('key4')).not.toBeNull(); await smallCache.shutdown(); }); it('should update access order for LRU', async () => { const smallCache = new CacheManager({ l1MaxSize: 1024, l1MaxEntries: 2, l2Enabled: false, enableStatistics: true }); await new Promise(resolve => smallCache.once('initialized', resolve)); await smallCache.set('key1', 'value1'); await smallCache.set('key2', 'value2'); // Access key1 to make it more recently used await smallCache.get('key1'); // Add key3, should evict key2 (least recently used) await smallCache.set('key3', 'value3'); expect(await smallCache.get('key1')).not.toBeNull(); expect(await smallCache.get('key2')).toBeNull(); expect(await smallCache.get('key3')).not.toBeNull(); await smallCache.shutdown(); }); }); describe('Cache Warming', () => { it('should warm cache with provided data', async () => { const strategy = { enabled: true, keys: ['key1', 'key2', 'key3'], dataLoader: async (key: string) => `value-for-${key}`, priority: 'high' as const }; const warmedCount = await cache.warmCache(strategy); expect(warmedCount).toBe(3); // Check that all keys are cached expect(await cache.get('key1')).toBe('value-for-key1'); expect(await cache.get('key2')).toBe('value-for-key2'); expect(await cache.get('key3')).toBe('value-for-key3'); }); it('should handle warming failures gracefully', async () => { const strategy = { enabled: true, keys: ['key1', 'key2', 'key3'], dataLoader: async (key: string) => { if (key === 'key2') { throw new Error('Failed to load key2'); } return `value-for-${key}`; }, priority: 'medium' as const }; const warmedCount = await cache.warmCache(strategy); expect(warmedCount).toBe(2); // key1 and key3 should succeed expect(await cache.get('key1')).toBe('value-for-key1'); expect(await cache.get('key2')).toBeNull(); expect(await cache.get('key3')).toBe('value-for-key3'); }); }); describe('Statistics and Monitoring', () => { it('should track cache statistics', async () => { await cache.set('key1', 'value1'); await cache.set('key2', 'value2'); await cache.get('key1'); // Hit await cache.get('key3'); // Miss const stats = cache.getStatistics(); expect(stats.l1.hits).toBeGreaterThan(0); expect(stats.overall.misses).toBeGreaterThan(0); expect(stats.overall.hitRate).toBeGreaterThan(0); expect(stats.overall.hitRate).toBeLessThan(1); }); it('should calculate hit rates correctly', async () => { // Clear any existing stats await cache.clear(); await cache.set('key1', 'value1'); // 2 hits, 1 miss await cache.get('key1'); await cache.get('key1'); await cache.get('missing-key'); const stats = cache.getStatistics(); expect(stats.overall.hits).toBe(2); expect(stats.overall.misses).toBe(1); expect(stats.overall.hitRate).toBeCloseTo(2/3, 2); }); }); describe('Error Handling', () => { it('should handle invalid cache operations gracefully', async () => { // Test with invalid key const result = await cache.delete('non-existent-key'); expect(result).toBe(false); }); it('should handle shutdown gracefully', async () => { await cache.set('key1', 'value1'); await cache.shutdown(); // Operations after shutdown should fail gracefully const result = await cache.get('key1'); expect(result).toBeNull(); }); }); describe('Compression', () => { it('should compress large values in L2', async () => { const key = 'large-data'; const value = 'a'.repeat(10000); // 10KB of repeated data await cache.set(key, value, { storeInL2: true }); const retrieved = await cache.get(key); expect(retrieved).toBe(value); }); it('should handle compressed and uncompressed data', async () => { const smallKey = 'small-data'; const smallValue = 'small'; const largeKey = 'large-data'; const largeValue = 'large-data-that-should-be-compressed'.repeat(100); await cache.set(smallKey, smallValue, { storeInL2: true }); await cache.set(largeKey, largeValue, { storeInL2: true }); expect(await cache.get(smallKey)).toBe(smallValue); expect(await cache.get(largeKey)).toBe(largeValue); }); }); describe('Event Handling', () => { it('should emit events for cache operations', async () => { const events: string[] = []; cache.on('set', () => events.push('set')); cache.on('hit', () => events.push('hit')); cache.on('miss', () => events.push('miss')); cache.on('evicted', () => events.push('evicted')); await cache.set('key1', 'value1'); await cache.get('key1'); // hit await cache.get('missing'); // miss expect(events).toContain('set'); expect(events).toContain('hit'); expect(events).toContain('miss'); }); }); describe('Cleanup and Maintenance', () => { it('should clean up expired entries', async () => { await cache.set('short-lived', 'value', { ttl: 50 }); await cache.set('long-lived', 'value', { ttl: 5000 }); // Wait for short-lived to expire await new Promise(resolve => setTimeout(resolve, 100)); // Trigger cleanup await new Promise(resolve => setTimeout(resolve, 150)); expect(await cache.get('short-lived')).toBeNull(); expect(await cache.get('long-lived')).not.toBeNull(); }); }); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/sloth-wq/prompt-auto-optimizer-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server