Skip to main content
Glama

Prompt Auto-Optimizer MCP

by sloth-wq
memory-leak-validation.test.ts15.4 kB
/** * Memory Leak Detection Validation Tests * * Comprehensive integration tests for memory leak detection across * all GEPA components with stress testing and real-world scenarios. */ import { MemoryLeakDetector, MemoryLeakIntegration } from '../../core/memory-leak-detector'; import { CacheManager } from '../../core/cache/cache-manager'; import { ParetoFrontier } from '../../core/pareto-frontier'; import { LLMAdapter } from '../../services/llm-adapter'; import { PromptCandidate } from '../../types/gepa'; describe('Memory Leak Detection Integration Tests', () => { let detector: MemoryLeakDetector; let cacheManager: CacheManager; let paretoFrontier: ParetoFrontier; let llmAdapter: LLMAdapter; beforeEach(async () => { // Initialize memory leak detection detector = MemoryLeakIntegration.initialize({ heapGrowthRate: 2, // Lower threshold for testing maxObjectCount: 100, maxHeapSize: 50, monitoringWindow: 5000, }); // Initialize GEPA components cacheManager = new CacheManager({ l1MaxSize: 1024 * 1024, // 1MB l1MaxEntries: 100, l2Enabled: false, // Disable for testing }); paretoFrontier = new ParetoFrontier({ objectives: [ { name: 'score', weight: 1, direction: 'maximize', extractor: (candidate: PromptCandidate) => candidate.averageScore, }, { name: 'efficiency', weight: 0.5, direction: 'minimize', extractor: (candidate: PromptCandidate) => candidate.rolloutCount || 1, }, ], maxSize: 50, }); llmAdapter = new LLMAdapter({ maxConcurrentProcesses: 2, processTimeout: 5000, // 5 seconds for testing executable: 'echo', // Use echo instead of claude for testing }); }); afterEach(async () => { // Cleanup await cacheManager.shutdown(); paretoFrontier.clear(); llmAdapter.shutdown(); detector.shutdown(); MemoryLeakIntegration.shutdown(); }); describe('Cache Manager Memory Leak Detection', () => { it('should detect cache memory leaks', async () => { let leakDetected = false; detector.on('memoryLeakDetected', (detection) => { if (detection.component === 'cache-manager' && detection.leakType === 'object_accumulation') { leakDetected = true; } }); // Fill cache beyond threshold for (let i = 0; i < 150; i++) { await cacheManager.set(`key-${i}`, { data: `value-${i}`.repeat(100) }); } // Trigger leak detection await detector.detectMemoryLeaks(); expect(leakDetected).toBe(true); }); it('should auto-fix cache overflows', async () => { let autoFixApplied = false; detector.on('autoFixApplied', (detection) => { if (detection.component === 'cache-manager') { autoFixApplied = true; } }); // Trigger cache overflow for (let i = 0; i < 200; i++) { await cacheManager.set(`overflow-${i}`, { data: 'x'.repeat(1000) }); } // Wait for auto-fix await new Promise(resolve => setTimeout(resolve, 1000)); await detector.detectMemoryLeaks(); // Check if cache was cleaned up const stats = cacheManager.getStatistics(); expect(stats.l1.entries).toBeLessThan(200); }); it('should track cache operations correctly', () => { const componentStats = detector.getStatistics().components; const cacheComponent = componentStats.find(c => c.name === 'cache-manager'); expect(cacheComponent).toBeDefined(); const initialCount = cacheComponent?.objectCount || 0; // Add cache entries cacheManager.set('test-1', { data: 'test' }); cacheManager.set('test-2', { data: 'test' }); const updatedStats = detector.getStatistics().components; const updatedCacheComponent = updatedStats.find(c => c.name === 'cache-manager'); expect(updatedCacheComponent?.objectCount).toBeGreaterThan(initialCount); }); }); describe('Pareto Frontier Memory Leak Detection', () => { it('should detect frontier memory leaks', async () => { let leakDetected = false; detector.on('memoryLeakDetected', (detection) => { if (detection.component === 'pareto-frontier') { leakDetected = true; } }); // Add many candidates beyond threshold for (let i = 0; i < 120; i++) { const candidate: PromptCandidate = { id: `candidate-${i}`, generation: 1, content: `Test prompt ${i}`.repeat(50), // Large content averageScore: Math.random(), rolloutCount: i + 1, timestamp: new Date(), }; await paretoFrontier.addCandidate(candidate); } await detector.detectMemoryLeaks(); expect(leakDetected).toBe(true); }); it('should perform memory cleanup on frontier', async () => { // Add candidates for (let i = 0; i < 50; i++) { const candidate: PromptCandidate = { id: `cleanup-${i}`, generation: 1, content: `Cleanup test ${i}`, averageScore: Math.random(), rolloutCount: 1, timestamp: new Date(), }; await paretoFrontier.addCandidate(candidate); } const beforeCleanup = paretoFrontier.size(); const cleanupResult = await paretoFrontier.performMemoryCleanup(); expect(cleanupResult.cleanedObjects).toBeGreaterThan(0); }); it('should track frontier operations', () => { const stats = detector.getStatistics(); const frontierComponent = stats.components.find(c => c.name === 'pareto-frontier'); expect(frontierComponent).toBeDefined(); const initialCount = frontierComponent?.objectCount || 0; // Add candidate const candidate: PromptCandidate = { id: 'track-test', generation: 1, content: 'Track test candidate', averageScore: 0.8, rolloutCount: 1, timestamp: new Date(), }; paretoFrontier.addCandidate(candidate); const updatedStats = detector.getStatistics(); const updatedComponent = updatedStats.components.find(c => c.name === 'pareto-frontier'); expect(updatedComponent?.objectCount).toBeGreaterThan(initialCount); }); }); describe('LLM Adapter Memory Leak Detection', () => { it('should detect process leaks', async () => { let processLeakDetected = false; detector.on('memoryLeakDetected', (detection) => { if (detection.component === 'llm-adapter' || detection.leakType === 'process_leak') { processLeakDetected = true; } }); // Simulate many LLM calls (using echo for testing) const promises = Array.from({ length: 10 }, (_, i) => llmAdapter.generateResponse(`test prompt ${i}`) .catch(() => ({ response: 'mock', processingTime: 100 })) ); await Promise.allSettled(promises); await detector.detectMemoryLeaks(); // In real scenarios with actual Claude processes, this would detect leaks }); it('should clean up long-running processes', async () => { const beforeCleanup = llmAdapter['activeProcesses'].size; const cleanupResult = await llmAdapter.performMemoryCleanup(); expect(cleanupResult.killedProcesses).toBeGreaterThanOrEqual(0); expect(cleanupResult.freedMemory).toBeGreaterThanOrEqual(0); }); it('should track process operations', () => { const stats = detector.getStatistics(); const llmComponent = stats.components.find(c => c.name === 'llm-adapter'); expect(llmComponent).toBeDefined(); }); }); describe('System-Wide Memory Leak Detection', () => { it('should detect heap growth leaks', async () => { let heapLeakDetected = false; detector.on('memoryLeakDetected', (detection) => { if (detection.leakType === 'heap_growth') { heapLeakDetected = true; } }); // Simulate memory pressure const buffers: Buffer[] = []; for (let i = 0; i < 100; i++) { buffers.push(Buffer.alloc(1024 * 1024)); // 1MB each } await detector.detectMemoryLeaks(); // Clean up buffers.length = 0; if (global.gc) global.gc(); }); it('should perform comprehensive memory monitoring', async () => { // Add load to all components const promises = [ // Cache load (async () => { for (let i = 0; i < 20; i++) { await cacheManager.set(`load-${i}`, { data: 'x'.repeat(500) }); } })(), // Frontier load (async () => { for (let i = 0; i < 20; i++) { const candidate: PromptCandidate = { id: `load-${i}`, generation: 1, content: `Load test ${i}`, averageScore: Math.random(), rolloutCount: 1, timestamp: new Date(), }; await paretoFrontier.addCandidate(candidate); } })(), // LLM load (simulated) (async () => { const llmPromises = Array.from({ length: 5 }, (_, i) => llmAdapter.generateResponse(`load test ${i}`) .catch(() => ({ response: 'mock', processingTime: 100 })) ); await Promise.allSettled(llmPromises); })(), ]; await Promise.all(promises); // Run comprehensive detection const detections = await detector.detectMemoryLeaks(); const stats = detector.getStatistics(); expect(stats.components.length).toBeGreaterThan(0); expect(stats.memoryTrend.length).toBeGreaterThan(0); }); it('should handle memory pressure simulation', async () => { const events: string[] = []; detector.on('memoryPressureStart', () => events.push('start')); detector.on('memoryPressureStep', () => events.push('step')); detector.on('memoryPressureEnd', () => events.push('end')); await detector.simulateMemoryPressure({ enabled: true, targetMemoryMB: 5, duration: 500, // 0.5 seconds escalationSteps: 2, }); expect(events).toContain('start'); expect(events).toContain('end'); }); }); describe('Stress Testing', () => { it('should handle concurrent operations without memory leaks', async () => { const concurrentOperations = Array.from({ length: 10 }, async (_, i) => { // Concurrent cache operations await cacheManager.set(`concurrent-${i}`, { data: `test-${i}` }); // Concurrent frontier operations const candidate: PromptCandidate = { id: `concurrent-${i}`, generation: 1, content: `Concurrent test ${i}`, averageScore: Math.random(), rolloutCount: 1, timestamp: new Date(), }; await paretoFrontier.addCandidate(candidate); // Run detection await detector.detectMemoryLeaks(); }); await Promise.all(concurrentOperations); // Verify system stability const stats = detector.getStatistics(); expect(stats.detections.totalDetections).toBeDefined(); }); it('should maintain performance under load', async () => { const startTime = performance.now(); // Simulate high load for (let i = 0; i < 100; i++) { await cacheManager.set(`perf-${i}`, { data: 'performance test' }); if (i % 10 === 0) { await detector.detectMemoryLeaks(); } } const duration = performance.now() - startTime; // Should complete within reasonable time expect(duration).toBeLessThan(5000); // 5 seconds }); it('should recover from memory pressure', async () => { // Create memory pressure const buffers: Buffer[] = []; for (let i = 0; i < 50; i++) { buffers.push(Buffer.alloc(1024 * 1024)); // 1MB each } // Trigger detection and cleanup await detector.detectMemoryLeaks(); const cleanupResult = await detector.forceCleanup(); // Release memory buffers.length = 0; if (global.gc) global.gc(); // Verify recovery expect(cleanupResult.cleaned).toBeGreaterThanOrEqual(0); }); }); describe('Real-World Scenarios', () => { it('should detect and prevent memory leaks in long-running sessions', async () => { const sessionDuration = 2000; // 2 seconds const startTime = Date.now(); const detections: any[] = []; detector.on('memoryLeakDetected', (detection) => { detections.push(detection); }); // Simulate long-running session while (Date.now() - startTime < sessionDuration) { // Continuous operations await cacheManager.set(`session-${Date.now()}`, { data: 'session data' }); const candidate: PromptCandidate = { id: `session-${Date.now()}`, generation: 1, content: 'Session candidate', averageScore: Math.random(), rolloutCount: 1, timestamp: new Date(), }; await paretoFrontier.addCandidate(candidate); // Periodic cleanup if (Math.random() < 0.1) { // 10% chance await detector.forceCleanup(); } await new Promise(resolve => setTimeout(resolve, 50)); } // Final verification const finalStats = detector.getStatistics(); expect(finalStats.components.length).toBeGreaterThan(0); }); it('should integrate with component-specific cleanup', async () => { // Fill components with data for (let i = 0; i < 30; i++) { await cacheManager.set(`integration-${i}`, { data: 'integration test' }); } // Trigger component-specific cleanup await cacheManager['performCleanup'](); await paretoFrontier.performMemoryCleanup(); await llmAdapter.performMemoryCleanup(); // Verify cleanup effectiveness const stats = detector.getStatistics(); expect(stats.components.every(c => c.objectCount >= 0)).toBe(true); }); }); describe('Error Handling and Edge Cases', () => { it('should handle detector shutdown gracefully', async () => { // Perform operations await cacheManager.set('shutdown-test', { data: 'test' }); // Shutdown detector detector.shutdown(); // Operations should continue without error await cacheManager.set('post-shutdown', { data: 'test' }); expect(true).toBe(true); // Should not throw }); it('should handle malformed objects', () => { expect(() => { const circularObj: any = {}; circularObj.self = circularObj; detector.trackObjectAllocation('test-component', circularObj); }).not.toThrow(); }); it('should handle component cleanup during detection', async () => { // Start detection const detectionPromise = detector.detectMemoryLeaks(); // Cleanup components concurrently await cacheManager.clear(); paretoFrontier.clear(); // Wait for detection to complete const detections = await detectionPromise; expect(Array.isArray(detections)).toBe(true); }); }); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/sloth-wq/prompt-auto-optimizer-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server