import { describe, test, expect, beforeAll, afterAll, beforeEach } from '@jest/globals';
import { AudioInspector } from '../lib/inspector.js';
import { promises as fs } from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
describe('Audio Inspector Performance and Stress Tests', () => {
let inspector;
let testDir;
beforeAll(async () => {
testDir = path.join(__dirname, 'performance-test');
await fs.mkdir(testDir, { recursive: true });
});
afterAll(async () => {
await fs.rmdir(testDir, { recursive: true }).catch(() => {});
});
beforeEach(() => {
inspector = new AudioInspector();
});
describe('Memory Usage Tests', () => {
test('should handle large file analysis without memory leaks', async () => {
const largeFileSize = 10 * 1024 * 1024; // 10MB
const largeFile = path.join(testDir, 'large-file.wav');
// Create a large fake audio file
const buffer = Buffer.alloc(largeFileSize, 'A');
await fs.writeFile(largeFile, buffer);
const initialMemory = process.memoryUsage().heapUsed;
try {
const result = await inspector.analyzeFile(largeFile);
expect(result).toBeDefined();
// Force garbage collection if available
if (global.gc) {
global.gc();
}
const finalMemory = process.memoryUsage().heapUsed;
const memoryIncrease = finalMemory - initialMemory;
// Memory increase should be reasonable (less than 50MB)
expect(memoryIncrease).toBeLessThan(50 * 1024 * 1024);
} finally {
await fs.unlink(largeFile).catch(() => {});
}
});
test('should handle multiple concurrent analyses without excessive memory usage', async () => {
const fileCount = 10;
const files = [];
// Create multiple test files
for (let i = 0; i < fileCount; i++) {
const fileName = `concurrent-${i}.mp3`;
const filePath = path.join(testDir, fileName);
await fs.writeFile(filePath, `test content ${i}`.repeat(1000));
files.push(filePath);
}
const initialMemory = process.memoryUsage().heapUsed;
try {
// Process all files concurrently
const promises = files.map(file => inspector.analyzeFile(file));
const results = await Promise.all(promises);
expect(results.length).toBe(fileCount);
// Force garbage collection
if (global.gc) {
global.gc();
}
const finalMemory = process.memoryUsage().heapUsed;
const memoryIncrease = finalMemory - initialMemory;
// Memory increase should be reasonable for concurrent processing
expect(memoryIncrease).toBeLessThan(100 * 1024 * 1024);
} finally {
// Clean up files
for (const file of files) {
await fs.unlink(file).catch(() => {});
}
}
});
});
describe('Processing Speed Tests', () => {
test('should process small files quickly', async () => {
const smallFile = path.join(testDir, 'small-file.mp3');
await fs.writeFile(smallFile, 'small test content');
const startTime = Date.now();
const result = await inspector.analyzeFile(smallFile);
const endTime = Date.now();
const processingTime = endTime - startTime;
expect(result).toBeDefined();
expect(processingTime).toBeLessThan(1000); // Should complete in under 1 second
await fs.unlink(smallFile).catch(() => {});
});
test('should handle batch processing efficiently', async () => {
const batchSize = 50;
const files = [];
// Create batch files
for (let i = 0; i < batchSize; i++) {
const fileName = `batch-${i}.mp3`;
const filePath = path.join(testDir, fileName);
await fs.writeFile(filePath, `batch content ${i}`);
files.push(fileName);
}
const startTime = Date.now();
const batchResult = await inspector.analyzeBatch(testDir);
const endTime = Date.now();
const totalTime = endTime - startTime;
const avgTimePerFile = totalTime / batchResult.summary.totalFiles;
expect(batchResult.summary.totalFiles).toBeGreaterThanOrEqual(batchSize);
expect(avgTimePerFile).toBeLessThan(500); // Average under 500ms per file
// Clean up
for (const fileName of files) {
await fs.unlink(path.join(testDir, fileName)).catch(() => {});
}
});
test('should scale linearly with file count', async () => {
const fileCounts = [5, 10, 20];
const timings = [];
for (const count of fileCounts) {
const testFiles = [];
// Create test files
for (let i = 0; i < count; i++) {
const fileName = `scale-test-${count}-${i}.mp3`;
const filePath = path.join(testDir, fileName);
await fs.writeFile(filePath, `scale test content ${i}`);
testFiles.push(filePath);
}
const startTime = Date.now();
const batchResult = await inspector.analyzeBatch(testDir);
const endTime = Date.now();
const processingTime = endTime - startTime;
timings.push({ count, time: processingTime });
// Clean up
for (const file of testFiles) {
await fs.unlink(file).catch(() => {});
}
}
// Verify reasonable scaling (should not be exponential)
expect(timings.length).toBe(fileCounts.length);
// Check that processing time doesn't increase exponentially
for (let i = 1; i < timings.length; i++) {
const currentRatio = timings[i].time / timings[i-1].time;
const fileRatio = timings[i].count / timings[i-1].count;
// Time increase should be roughly proportional to file count increase
expect(currentRatio).toBeLessThan(fileRatio * 2);
}
});
});
describe('Error Handling Performance', () => {
test('should handle multiple file errors efficiently', async () => {
const errorFileCount = 20;
const errorFiles = [];
// Create files that will cause errors
for (let i = 0; i < errorFileCount; i++) {
const fileName = `error-file-${i}.mp3`;
const filePath = path.join(testDir, fileName);
if (i % 2 === 0) {
// Create corrupted files
await fs.writeFile(filePath, '\x00\x00\x00\x00');
} else {
// Create empty files
await fs.writeFile(filePath, '');
}
errorFiles.push(filePath);
}
const startTime = Date.now();
const batchResult = await inspector.analyzeBatch(testDir);
const endTime = Date.now();
const processingTime = endTime - startTime;
const avgTimePerFile = processingTime / errorFileCount;
expect(batchResult.summary.totalFiles).toBeGreaterThanOrEqual(errorFileCount);
expect(avgTimePerFile).toBeLessThan(2000); // Should handle errors quickly
// Clean up
for (const file of errorFiles) {
await fs.unlink(file).catch(() => {});
}
});
test('should timeout gracefully on problematic files', async () => {
const timeoutFile = path.join(testDir, 'timeout-test.mp3');
// Create a file that might cause processing delays
const largeContent = Buffer.alloc(1024 * 1024, 'problematic content');
await fs.writeFile(timeoutFile, largeContent);
const startTime = Date.now();
const result = await inspector.analyzeFile(timeoutFile);
const endTime = Date.now();
const processingTime = endTime - startTime;
// Should complete within reasonable time (30 seconds as per Jest config)
expect(processingTime).toBeLessThan(30000);
expect(result).toBeDefined();
await fs.unlink(timeoutFile).catch(() => {});
});
});
describe('Resource Management', () => {
test('should handle file descriptor limits properly', async () => {
const fdTestCount = 100; // Test with many files
const files = [];
// Create many files
for (let i = 0; i < fdTestCount; i++) {
const fileName = `fd-test-${i}.mp3`;
const filePath = path.join(testDir, fileName);
await fs.writeFile(filePath, `fd test content ${i}`);
files.push(filePath);
}
try {
// Process files sequentially to test file descriptor management
const results = [];
for (const file of files) {
const result = await inspector.analyzeFile(file);
results.push(result);
}
expect(results.length).toBe(fdTestCount);
// All results should be defined (no file descriptor exhaustion)
results.forEach(result => {
expect(result).toBeDefined();
});
} finally {
// Clean up
for (const file of files) {
await fs.unlink(file).catch(() => {});
}
}
});
test('should clean up temporary resources properly', async () => {
const resourceFile = path.join(testDir, 'resource-test.mp3');
await fs.writeFile(resourceFile, 'resource test content');
// Get initial file descriptor count (if available)
const initialFDs = process.platform === 'linux' ?
await fs.readdir('/proc/self/fd').then(fds => fds.length).catch(() => 0) : 0;
// Perform multiple analyses
for (let i = 0; i < 10; i++) {
const result = await inspector.analyzeFile(resourceFile);
expect(result).toBeDefined();
}
// Check that file descriptors haven't leaked (Linux only)
if (process.platform === 'linux' && initialFDs > 0) {
const finalFDs = await fs.readdir('/proc/self/fd').then(fds => fds.length).catch(() => 0);
expect(finalFDs).toBeLessThanOrEqual(initialFDs + 5); // Allow for some variation
}
await fs.unlink(resourceFile).catch(() => {});
});
});
describe('Stress Testing', () => {
test('should handle maximum supported file formats simultaneously', async () => {
const formats = inspector.supportedFormats;
const stressFiles = [];
// Create one file for each supported format
for (let i = 0; i < formats.length; i++) {
const format = formats[i];
const fileName = `stress-test${format}`;
const filePath = path.join(testDir, fileName);
await fs.writeFile(filePath, `stress test content for ${format}`);
stressFiles.push(filePath);
}
const startTime = Date.now();
const batchResult = await inspector.analyzeBatch(testDir);
const endTime = Date.now();
const processingTime = endTime - startTime;
expect(batchResult.summary.totalFiles).toBeGreaterThanOrEqual(formats.length);
expect(processingTime).toBeLessThan(60000); // Should complete within 1 minute
// Clean up
for (const file of stressFiles) {
await fs.unlink(file).catch(() => {});
}
});
test('should maintain accuracy under high load', async () => {
const highLoadCount = 30;
const referenceFile = path.join(testDir, 'reference.mp3');
await fs.writeFile(referenceFile, 'reference content for accuracy test');
// Get reference result
const referenceResult = await inspector.analyzeFile(referenceFile);
// Create many identical files
const identicalFiles = [];
for (let i = 0; i < highLoadCount; i++) {
const fileName = `identical-${i}.mp3`;
const filePath = path.join(testDir, fileName);
await fs.writeFile(filePath, 'reference content for accuracy test');
identicalFiles.push(filePath);
}
// Process all files concurrently
const promises = identicalFiles.map(file => inspector.analyzeFile(file));
const results = await Promise.all(promises);
// All results should be consistent with reference
results.forEach(result => {
expect(result.file.size).toBe(referenceResult.file.size);
expect(result.format.container).toBe(referenceResult.format.container);
});
// Clean up
await fs.unlink(referenceFile).catch(() => {});
for (const file of identicalFiles) {
await fs.unlink(file).catch(() => {});
}
});
test('should handle deep directory structures efficiently', async () => {
// Create deep directory structure
let deepPath = testDir;
const depth = 15;
for (let i = 0; i < depth; i++) {
deepPath = path.join(deepPath, `level-${i}`);
await fs.mkdir(deepPath, { recursive: true });
}
// Create files at various levels
const deepFiles = [];
let currentPath = testDir;
for (let i = 0; i < depth; i++) {
currentPath = path.join(currentPath, `level-${i}`);
const fileName = `deep-file-${i}.mp3`;
const filePath = path.join(currentPath, fileName);
await fs.writeFile(filePath, `deep file content at level ${i}`);
deepFiles.push(filePath);
}
const startTime = Date.now();
const batchResult = await inspector.analyzeBatch(testDir, true); // recursive
const endTime = Date.now();
const processingTime = endTime - startTime;
expect(batchResult.summary.totalFiles).toBeGreaterThanOrEqual(depth);
expect(processingTime).toBeLessThan(30000); // Should handle deep structures efficiently
// Clean up deep structure (will be handled by afterAll)
});
});
describe('Platform-Specific Performance', () => {
test('should perform consistently across different path formats', async () => {
const pathFormats = [
'simple-file.mp3',
'file with spaces.mp3',
'file-with-dashes.mp3',
'file_with_underscores.mp3'
];
const timings = [];
for (const fileName of pathFormats) {
const filePath = path.join(testDir, fileName);
await fs.writeFile(filePath, 'path format test content');
const startTime = Date.now();
const result = await inspector.analyzeFile(filePath);
const endTime = Date.now();
const processingTime = endTime - startTime;
timings.push(processingTime);
expect(result).toBeDefined();
await fs.unlink(filePath).catch(() => {});
}
// All timings should be reasonably similar
const avgTiming = timings.reduce((a, b) => a + b, 0) / timings.length;
timings.forEach(timing => {
expect(timing).toBeLessThan(avgTiming * 3); // No timing should be 3x average
});
});
test('should handle Unicode filenames efficiently', async () => {
const unicodeNames = [
'测试文件.mp3', // Chinese
'файл-тест.mp3', // Cyrillic
'ファイル-テスト.mp3', // Japanese
'archivo-prueba.mp3' // Spanish
];
const results = [];
for (const fileName of unicodeNames) {
try {
const filePath = path.join(testDir, fileName);
await fs.writeFile(filePath, 'unicode filename test');
const result = await inspector.analyzeFile(filePath);
results.push(result);
expect(result).toBeDefined();
await fs.unlink(filePath).catch(() => {});
} catch (error) {
// Some filesystems may not support certain Unicode characters
console.warn(`Unicode filename test skipped for ${fileName}: ${error.message}`);
}
}
// At least some Unicode names should work
expect(results.length).toBeGreaterThan(0);
});
});
});