trajectory-store.test.ts•31.8 kB
/**
* Comprehensive tests for TrajectoryStore
* Tests trajectory storage, retrieval, filtering, archiving, and error handling
*
* NOTE: These tests are written BEFORE implementation (Test-First Development)
* All tests should FAIL initially until TrajectoryStore is implemented
*/
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
import { promises as fs } from 'fs';
import { resolve, join } from 'path';
import type {
TrajectoryFilter,
} from '../types/gepa';
import type {
TrajectoryStoreConfig,
SaveResult,
ArchiveResult,
IndexRebuildResult
} from './trajectory-store-types';
import {
sampleTrajectories,
TestDataGenerator
} from '../test/fixtures/sample-data';
import { MockFactory, createMockFn, createMockRejection } from '../test/helpers';
// Import the TrajectoryStore class that we need to implement
// This import will FAIL until the implementation exists
import { TrajectoryStore } from './trajectory-store';
describe('TrajectoryStore', () => {
let store: TrajectoryStore;
let tempDir: string;
// eslint-disable-next-line no-console
let originalConsoleError: typeof console.error;
beforeEach(async () => {
// Create temporary directory for test data
tempDir = resolve(process.cwd(), 'temp', 'test-trajectory-store');
await fs.mkdir(tempDir, { recursive: true });
// Initialize store with test directory
store = new TrajectoryStore({
dataDir: tempDir,
maxFileSize: 10 * 1024 * 1024, // 10MB for tests
indexingEnabled: true,
compressionEnabled: false, // Disable for easier testing
archiveAfterDays: 7,
});
// Mock console.error to capture error logs
// eslint-disable-next-line no-console
originalConsoleError = console.error;
// eslint-disable-next-line no-console
console.error = vi.fn();
});
afterEach(async () => {
// Clean up test directory
try {
await fs.rm(tempDir, { recursive: true, force: true });
} catch {
// Ignore cleanup errors
}
// Restore console.error
// eslint-disable-next-line no-console
console.error = originalConsoleError;
vi.restoreAllMocks();
});
describe('Constructor and Initialization', () => {
it('should create store with default configuration', () => {
const defaultStore = new TrajectoryStore();
expect(defaultStore).toBeInstanceOf(TrajectoryStore);
expect(defaultStore.config.dataDir).toContain('gepa-trajectories');
expect(defaultStore.config.maxFileSize).toBe(50 * 1024 * 1024); // 50MB default
expect(defaultStore.config.indexingEnabled).toBe(true);
expect(defaultStore.config.compressionEnabled).toBe(true);
expect(defaultStore.config.archiveAfterDays).toBe(30);
});
it('should create store with custom configuration', () => {
const customConfig = {
dataDir: '/custom/path',
maxFileSize: 100 * 1024 * 1024,
indexingEnabled: false,
compressionEnabled: false,
archiveAfterDays: 14,
};
const customStore = new TrajectoryStore(customConfig);
expect(customStore.config).toEqual(customConfig);
});
it('should initialize directories on first use', async () => {
const trajectory = sampleTrajectories[0];
await store.save(trajectory);
// Check that directories were created
const exists = await fs.access(tempDir).then(() => true).catch(() => false);
expect(exists).toBe(true);
const indexDir = join(tempDir, 'index');
const indexExists = await fs.access(indexDir).then(() => true).catch(() => false);
expect(indexExists).toBe(true);
});
});
describe('Save Trajectory', () => {
it('should save a single trajectory successfully', async () => {
const trajectory = sampleTrajectories[0];
const result = await store.save(trajectory);
expect(result.success).toBe(true);
expect(result.id).toBe(trajectory.id);
expect(result.filePath).toContain(trajectory.id);
// Verify file was created
const exists = await fs.access(result.filePath).then(() => true).catch(() => false);
expect(exists).toBe(true);
});
it('should save trajectory with proper file structure', async () => {
const trajectory = sampleTrajectories[0];
const result = await store.save(trajectory);
// Read the saved file
const savedContent = await fs.readFile(result.filePath, 'utf-8');
const savedTrajectory = JSON.parse(savedContent);
expect(savedTrajectory.id).toBe(trajectory.id);
expect(savedTrajectory.promptId).toBe(trajectory.promptId);
expect(savedTrajectory.taskId).toBe(trajectory.taskId);
expect(savedTrajectory.steps).toHaveLength(trajectory.steps.length);
expect(savedTrajectory.finalResult.success).toBe(trajectory.finalResult.success);
});
it('should handle trajectory with serializable Maps', async () => {
// Create trajectory with Map data that needs serialization
const trajectory = MockFactory.createExecutionTrajectory({
id: 'test-with-maps',
steps: [
{
stepNumber: 1,
action: 'test_action',
reasoning: 'Test reasoning',
timestamp: new Date(),
toolInput: { complexData: new Map([['key1', 'value1'], ['key2', 42]]) },
},
],
});
const result = await store.save(trajectory);
expect(result.success).toBe(true);
// Verify serialization handled Maps correctly
const savedContent = await fs.readFile(result.filePath, 'utf-8');
const savedTrajectory = JSON.parse(savedContent);
// Maps should be converted to objects or arrays for JSON storage
expect(savedTrajectory.steps[0].toolInput.complexData).toBeDefined();
});
it('should create index entry when indexing is enabled', async () => {
const trajectory = sampleTrajectories[0];
await store.save(trajectory);
// Check that index was updated
const indexPath = join(tempDir, 'index', 'trajectories.json');
const indexExists = await fs.access(indexPath).then(() => true).catch(() => false);
expect(indexExists).toBe(true);
const indexContent = await fs.readFile(indexPath, 'utf-8');
const index = JSON.parse(indexContent);
expect(index).toHaveProperty(trajectory.id);
expect(index[trajectory.id].promptId).toBe(trajectory.promptId);
expect(index[trajectory.id].taskId).toBe(trajectory.taskId);
expect(index[trajectory.id].timestamp).toBeDefined();
expect(index[trajectory.id].success).toBe(trajectory.finalResult.success);
expect(index[trajectory.id].score).toBe(trajectory.finalResult.score);
});
it('should handle file system errors gracefully', async () => {
// Mock fs.writeFile to simulate failure
const mockWriteFile = vi.spyOn(fs, 'writeFile').mockRejectedValueOnce(new Error('Disk full'));
expect(mockWriteFile).toBeDefined(); // Ensure mock is created
const trajectory = sampleTrajectories[0];
const result = await store.save(trajectory);
expect(result.success).toBe(false);
expect(result.error).toContain('Disk full');
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalledWith(
expect.stringContaining('Failed to save trajectory'),
expect.any(Error)
);
});
it('should prevent saving duplicate trajectory IDs', async () => {
const trajectory = sampleTrajectories[0];
// Save first time
const result1 = await store.save(trajectory);
expect(result1.success).toBe(true);
// Try to save same ID again
const result2 = await store.save(trajectory);
expect(result2.success).toBe(false);
expect(result2.error).toContain('already exists');
});
it('should handle large trajectories within size limits', async () => {
// Create a large trajectory with many steps
const largeTrajectory = TestDataGenerator.generateExecutionTrajectory({
id: 'large-trajectory-test',
steps: Array.from({ length: 1000 }, (_, i) => ({
stepNumber: i + 1,
action: `action_${i}`,
reasoning: `This is reasoning for step ${i}. `.repeat(10), // Add some bulk
timestamp: new Date(Date.now() + i * 1000),
})),
});
const result = await store.save(largeTrajectory);
expect(result.success).toBe(true);
expect(result.filePath).toBeDefined();
});
it('should reject trajectories exceeding size limits', async () => {
// Create store with very small size limit
const smallStore = new TrajectoryStore({
dataDir: tempDir,
maxFileSize: 1024, // 1KB limit
});
// Create trajectory that will exceed limit
const hugeTraj = TestDataGenerator.generateExecutionTrajectory({
id: 'huge-trajectory',
steps: Array.from({ length: 100 }, (_, i) => ({
stepNumber: i + 1,
action: `action_${i}`,
reasoning: 'This is a very long reasoning section that will make the trajectory very large and exceed our 1KB limit for testing purposes. '.repeat(20),
timestamp: new Date(),
})),
});
const result = await smallStore.save(hugeTraj);
expect(result.success).toBe(false);
expect(result.error).toContain('exceeds maximum file size');
});
});
describe('Load Trajectory', () => {
beforeEach(async () => {
// Save sample trajectories for loading tests
for (const trajectory of sampleTrajectories) {
await store.save(trajectory);
}
});
it('should load existing trajectory by ID', async () => {
const originalTrajectory = sampleTrajectories[0];
const loaded = await store.load(originalTrajectory.id);
expect(loaded).toBeDefined();
expect(loaded!.id).toBe(originalTrajectory.id);
expect(loaded!.promptId).toBe(originalTrajectory.promptId);
expect(loaded!.taskId).toBe(originalTrajectory.taskId);
expect(loaded!.steps).toHaveLength(originalTrajectory.steps.length);
expect(loaded!.finalResult.success).toBe(originalTrajectory.finalResult.success);
});
it('should return null for non-existent trajectory', async () => {
const loaded = await store.load('non-existent-id');
expect(loaded).toBeNull();
});
it('should handle file system errors during load', async () => {
// Mock fs.readFile to simulate failure
const mockReadFile = vi.spyOn(fs, 'readFile').mockRejectedValueOnce(new Error('File corrupted'));
expect(mockReadFile).toBeDefined(); // Ensure mock is created
const trajectory = sampleTrajectories[0];
const loaded = await store.load(trajectory.id);
expect(loaded).toBeNull();
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalledWith(
expect.stringContaining('Failed to load trajectory'),
expect.any(Error)
);
});
it('should handle corrupted JSON files', async () => {
const trajectory = sampleTrajectories[0];
const filePath = join(tempDir, `${trajectory.id}.json`);
// Corrupt the file by writing invalid JSON
await fs.writeFile(filePath, '{ invalid json content }', 'utf-8');
const loaded = await store.load(trajectory.id);
expect(loaded).toBeNull();
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalledWith(
expect.stringContaining('Failed to parse trajectory JSON'),
expect.any(Error)
);
});
it('should properly deserialize complex data types', async () => {
// Create trajectory with dates and other complex types
const trajectory = MockFactory.createExecutionTrajectory({
id: 'complex-data-test',
timestamp: new Date('2024-01-01T12:00:00Z'),
steps: [
{
stepNumber: 1,
action: 'test_action',
reasoning: 'Test reasoning',
timestamp: new Date('2024-01-01T12:00:01Z'),
},
],
});
await store.save(trajectory);
const loaded = await store.load(trajectory.id);
expect(loaded).toBeDefined();
expect(loaded!.timestamp).toBeInstanceOf(Date);
expect(loaded!.steps[0].timestamp).toBeInstanceOf(Date);
expect(loaded!.timestamp.toISOString()).toBe('2024-01-01T12:00:00.000Z');
});
});
describe('Query Trajectories', () => {
beforeEach(async () => {
// Save sample trajectories plus additional test data
for (const trajectory of sampleTrajectories) {
await store.save(trajectory);
}
// Add some additional trajectories for filtering tests
const additionalTrajectories = [
TestDataGenerator.generateExecutionTrajectory({
id: 'recent-success',
promptId: 'prompt-test-001',
taskId: 'task-filter-001',
timestamp: new Date('2024-02-01T00:00:00Z'),
finalResult: { success: true, score: 0.95, output: 'Excellent result' },
}),
TestDataGenerator.generateExecutionTrajectory({
id: 'recent-failure',
promptId: 'prompt-test-002',
taskId: 'task-filter-002',
timestamp: new Date('2024-02-01T00:00:00Z'),
finalResult: { success: false, score: 0.25, output: 'Failed result' },
}),
TestDataGenerator.generateExecutionTrajectory({
id: 'old-trajectory',
promptId: 'prompt-old',
taskId: 'task-old',
timestamp: new Date('2023-01-01T00:00:00Z'),
finalResult: { success: true, score: 0.60, output: 'Old but good' },
}),
];
for (const trajectory of additionalTrajectories) {
await store.save(trajectory);
}
});
it('should return all trajectories when no filter is provided', async () => {
const results = await store.query();
expect(results.length).toBeGreaterThanOrEqual(5); // Sample + additional trajectories
expect(results.every(t => t.id && t.promptId && t.taskId)).toBe(true);
});
it('should filter by promptId', async () => {
const filter: TrajectoryFilter = {
promptId: 'prompt-test-001',
};
const results = await store.query(filter);
expect(results.length).toBeGreaterThan(0);
expect(results.every(t => t.promptId === 'prompt-test-001')).toBe(true);
});
it('should filter by taskId', async () => {
const filter: TrajectoryFilter = {
taskId: 'task-filter-001',
};
const results = await store.query(filter);
expect(results.length).toBe(1);
expect(results[0].taskId).toBe('task-filter-001');
});
it('should filter by success status', async () => {
const successFilter: TrajectoryFilter = {
successOnly: true,
};
const successResults = await store.query(successFilter);
expect(successResults.length).toBeGreaterThan(0);
expect(successResults.every(t => t.finalResult.success === true)).toBe(true);
});
it('should filter by score range', async () => {
const scoreFilter: TrajectoryFilter = {
minScore: 0.8,
maxScore: 1.0,
};
const results = await store.query(scoreFilter);
expect(results.length).toBeGreaterThan(0);
expect(results.every(t => t.finalResult.score >= 0.8 && t.finalResult.score <= 1.0)).toBe(true);
});
it('should filter by date range', async () => {
const dateFilter: TrajectoryFilter = {
dateRange: {
start: new Date('2024-01-01T00:00:00Z'),
end: new Date('2024-12-31T23:59:59Z'),
},
};
const results = await store.query(dateFilter);
expect(results.length).toBeGreaterThan(0);
expect(results.every(t => {
const timestamp = new Date(t.timestamp);
return timestamp >= dateFilter.dateRange!.start && timestamp <= dateFilter.dateRange!.end;
})).toBe(true);
});
it('should support limit and offset for pagination', async () => {
const page1 = await store.query({ limit: 2, offset: 0 });
const page2 = await store.query({ limit: 2, offset: 2 });
expect(page1.length).toBeLessThanOrEqual(2);
expect(page2.length).toBeLessThanOrEqual(2);
// Ensure no overlap between pages
const page1Ids = page1.map(t => t.id);
const page2Ids = page2.map(t => t.id);
const intersection = page1Ids.filter(id => page2Ids.includes(id));
expect(intersection).toHaveLength(0);
});
it('should combine multiple filters', async () => {
const combinedFilter: TrajectoryFilter = {
successOnly: true,
minScore: 0.8,
dateRange: {
start: new Date('2024-01-01T00:00:00Z'),
end: new Date('2024-12-31T23:59:59Z'),
},
limit: 10,
};
const results = await store.query(combinedFilter);
expect(results.every(t =>
t.finalResult.success === true &&
t.finalResult.score >= 0.8 &&
new Date(t.timestamp) >= combinedFilter.dateRange!.start &&
new Date(t.timestamp) <= combinedFilter.dateRange!.end
)).toBe(true);
expect(results.length).toBeLessThanOrEqual(10);
});
it('should return empty array when no trajectories match filter', async () => {
const filter: TrajectoryFilter = {
promptId: 'non-existent-prompt',
taskId: 'non-existent-task',
};
const results = await store.query(filter);
expect(results).toEqual([]);
});
it('should handle query errors gracefully', async () => {
// Mock index reading to fail
const indexPath = join(tempDir, 'index', 'trajectories.json');
const mockReadFile = vi.spyOn(fs, 'readFile').mockImplementation((path) => {
if (path === indexPath) {
return Promise.reject(new Error('Index corrupted'));
}
return Promise.resolve('{}');
});
expect(mockReadFile).toBeDefined(); // Ensure mock is created
const results = await store.query();
expect(results).toEqual([]);
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalledWith(
expect.stringContaining('Failed to query trajectories'),
expect.any(Error)
);
});
it('should sort results by timestamp in descending order by default', async () => {
const results = await store.query();
expect(results.length).toBeGreaterThan(1);
for (let i = 1; i < results.length; i++) {
const prev = new Date(results[i - 1].timestamp);
const curr = new Date(results[i].timestamp);
expect(prev.getTime()).toBeGreaterThanOrEqual(curr.getTime());
}
});
});
describe('Archive Old Trajectories', () => {
beforeEach(async () => {
// Create trajectories with different ages
const oldTrajectory = TestDataGenerator.generateExecutionTrajectory({
id: 'old-trajectory-1',
timestamp: new Date(Date.now() - 10 * 24 * 60 * 60 * 1000), // 10 days old
});
const recentTrajectory = TestDataGenerator.generateExecutionTrajectory({
id: 'recent-trajectory-1',
timestamp: new Date(Date.now() - 2 * 24 * 60 * 60 * 1000), // 2 days old
});
await store.save(oldTrajectory);
await store.save(recentTrajectory);
});
it('should archive trajectories older than configured days', async () => {
const result = await store.archiveOld();
expect(result.success).toBe(true);
expect(result.archivedCount).toBeGreaterThan(0);
expect(result.archivedIds).toContain('old-trajectory-1');
expect(result.archivedIds).not.toContain('recent-trajectory-1');
});
it('should create archive directory structure', async () => {
await store.archiveOld();
const archiveDir = join(tempDir, 'archive');
const archiveExists = await fs.access(archiveDir).then(() => true).catch(() => false);
expect(archiveExists).toBe(true);
});
it('should move files to archive directory with date-based organization', async () => {
const result = await store.archiveOld();
if (result.archivedCount > 0) {
// Check that archived files exist in archive directory
const archiveDir = join(tempDir, 'archive');
const archiveContents = await fs.readdir(archiveDir);
expect(archiveContents.length).toBeGreaterThan(0);
}
});
it('should update index to reflect archived trajectories', async () => {
await store.archiveOld();
// Query should not return archived trajectories
const activeResults = await store.query();
const activeIds = activeResults.map(t => t.id);
expect(activeIds).not.toContain('old-trajectory-1');
expect(activeIds).toContain('recent-trajectory-1');
});
it('should handle archiving errors gracefully', async () => {
// Mock file operations to fail
const mockRename = vi.spyOn(fs, 'rename').mockRejectedValueOnce(new Error('Permission denied'));
expect(mockRename).toBeDefined(); // Ensure mock is created
const result = await store.archiveOld();
// Should continue with other files even if some fail
expect(result.success).toBe(true); // Partial success is still success
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalled();
});
it('should not archive trajectories if none are old enough', async () => {
// Create store with very long archive period
const longArchiveStore = new TrajectoryStore({
dataDir: tempDir,
archiveAfterDays: 365, // 1 year
});
const result = await longArchiveStore.archiveOld();
expect(result.success).toBe(true);
expect(result.archivedCount).toBe(0);
expect(result.archivedIds).toEqual([]);
});
});
describe('Index Management', () => {
it('should build index from existing trajectory files', async () => {
// Save trajectories first
for (const trajectory of sampleTrajectories) {
await store.save(trajectory);
}
// Clear index
const indexPath = join(tempDir, 'index', 'trajectories.json');
await fs.writeFile(indexPath, '{}', 'utf-8');
// Rebuild index
const result = await store.rebuildIndex();
expect(result.success).toBe(true);
expect(result.indexedCount).toBe(sampleTrajectories.length);
// Verify index contains all trajectories
const indexContent = await fs.readFile(indexPath, 'utf-8');
const index = JSON.parse(indexContent);
for (const trajectory of sampleTrajectories) {
expect(index).toHaveProperty(trajectory.id);
}
});
it('should handle missing index gracefully', async () => {
// Delete index directory
const indexDir = join(tempDir, 'index');
await fs.rm(indexDir, { recursive: true, force: true });
// Query should still work by falling back to file system
const results = await store.query();
expect(Array.isArray(results)).toBe(true);
});
it('should optimize index for fast querying', async () => {
// Save many trajectories
const manyTrajectories = TestDataGenerator.generateExecutionTrajectories(100);
for (const trajectory of manyTrajectories) {
await store.save(trajectory);
}
// Query should be fast even with many trajectories
const startTime = process.hrtime.bigint();
const results = await store.query({ limit: 10 });
const endTime = process.hrtime.bigint();
const queryTime = Number(endTime - startTime) / 1_000_000; // Convert to milliseconds
expect(results.length).toBeLessThanOrEqual(10);
expect(queryTime).toBeLessThan(100); // Should complete in under 100ms
});
});
describe('Error Handling and Edge Cases', () => {
it('should handle invalid trajectory data', async () => {
const invalidTrajectory = {
id: 'invalid-trajectory',
// Missing required fields
timestamp: new Date(),
} as any;
const result = await store.save(invalidTrajectory);
expect(result.success).toBe(false);
expect(result.error).toContain('invalid');
});
it('should handle concurrent save operations', async () => {
const trajectories = TestDataGenerator.generateExecutionTrajectories(10);
// Save all trajectories concurrently
const savePromises = trajectories.map(t => store.save(t));
const results = await Promise.all(savePromises);
// All saves should succeed
expect(results.every(r => r.success)).toBe(true);
// All trajectories should be queryable
const queryResults = await store.query();
expect(queryResults.length).toBeGreaterThanOrEqual(10);
});
it('should handle disk space issues', async () => {
// Mock fs.writeFile to simulate disk full error after a few writes
let writeCount = 0;
const mockWriteFile = vi.spyOn(fs, 'writeFile').mockImplementation(() => {
writeCount++;
if (writeCount > 2) {
return Promise.reject(new Error('ENOSPC: no space left on device'));
}
return Promise.resolve();
});
expect(mockWriteFile).toBeDefined(); // Ensure mock is created
const trajectories = TestDataGenerator.generateExecutionTrajectories(5);
const results: Array<{ success: boolean; error?: string }> = [];
for (const trajectory of trajectories) {
const result = await store.save(trajectory);
results.push(result);
}
// Some saves should fail due to disk space
const failedSaves = results.filter(r => !r.success);
expect(failedSaves.length).toBeGreaterThan(0);
expect(failedSaves.some(r => r.error?.includes('no space left'))).toBe(true);
});
it('should handle corrupted index files', async () => {
// Create corrupted index
const indexPath = join(tempDir, 'index', 'trajectories.json');
await fs.mkdir(join(tempDir, 'index'), { recursive: true });
await fs.writeFile(indexPath, '{ corrupted json }', 'utf-8');
// Store should recover and rebuild index
const trajectory = sampleTrajectories[0];
const result = await store.save(trajectory);
expect(result.success).toBe(true);
// Index should be fixed
const queryResults = await store.query();
expect(queryResults.some(t => t.id === trajectory.id)).toBe(true);
});
it('should handle permission errors', async () => {
// Mock fs operations to simulate permission errors
const mockMkdir = vi.spyOn(fs, 'mkdir').mockRejectedValueOnce(new Error('EACCES: permission denied'));
expect(mockMkdir).toBeDefined(); // Ensure mock is created
const trajectory = sampleTrajectories[0];
const result = await store.save(trajectory);
expect(result.success).toBe(false);
expect(result.error).toContain('permission denied');
});
it('should validate trajectory data before saving', async () => {
const invalidTrajectories = [
{ id: '', promptId: 'test' }, // Empty ID
{ id: 'test', promptId: '' }, // Empty promptId
{ id: 'test', promptId: 'test', steps: 'not-array' }, // Invalid steps
{ id: 'test', promptId: 'test', steps: [], finalResult: null }, // Null result
];
for (const invalid of invalidTrajectories) {
const result = await store.save(invalid as any);
expect(result.success).toBe(false);
expect(result.error).toContain('validation');
}
});
});
describe('Performance and Memory Management', () => {
it('should handle large batches of trajectories efficiently', async () => {
const batchSize = 1000;
const trajectories = TestDataGenerator.generateExecutionTrajectories(batchSize);
const startTime = process.hrtime.bigint();
// Save all trajectories
for (const trajectory of trajectories) {
await store.save(trajectory);
}
const endTime = process.hrtime.bigint();
const totalTime = Number(endTime - startTime) / 1_000_000; // Convert to milliseconds
// Should complete in reasonable time (less than 30 seconds for 1000 trajectories)
expect(totalTime).toBeLessThan(30000);
// All trajectories should be saved
const results = await store.query();
expect(results.length).toBeGreaterThanOrEqual(batchSize);
}, 35000); // 35 second timeout for this test
it('should not leak memory during repeated operations', async () => {
const initialMemory = process.memoryUsage().heapUsed;
// Perform many operations
for (let i = 0; i < 100; i++) {
const trajectory = TestDataGenerator.generateExecutionTrajectory({
id: `memory-test-${i}`,
});
await store.save(trajectory);
await store.load(trajectory.id);
}
// Force garbage collection if available
if (global.gc) {
global.gc();
}
const finalMemory = process.memoryUsage().heapUsed;
const memoryIncrease = finalMemory - initialMemory;
// Memory increase should be reasonable (less than 50MB)
expect(memoryIncrease).toBeLessThan(50 * 1024 * 1024);
});
it('should cleanup resources properly on store disposal', async () => {
// This test would check that file handles, streams, etc. are properly closed
// For now, just ensure the store can be "disposed" without errors
const trajectory = sampleTrajectories[0];
await store.save(trajectory);
// Simulate cleanup (in real implementation, this would close file handles, etc.)
expect(() => {
// Store cleanup logic would go here
// For now, just test that we can still query after "cleanup"
}).not.toThrow();
});
});
describe('Configuration Edge Cases', () => {
it('should handle zero archiveAfterDays', async () => {
const immediateArchiveStore = new TrajectoryStore({
dataDir: tempDir,
archiveAfterDays: 0,
});
const trajectory = TestDataGenerator.generateExecutionTrajectory();
await immediateArchiveStore.save(trajectory);
// Should archive immediately
const result = await immediateArchiveStore.archiveOld();
expect(result.archivedCount).toBe(1);
});
it('should handle very large maxFileSize', async () => {
const largeFileStore = new TrajectoryStore({
dataDir: tempDir,
maxFileSize: Number.MAX_SAFE_INTEGER,
});
const trajectory = sampleTrajectories[0];
const result = await largeFileStore.save(trajectory);
expect(result.success).toBe(true);
});
it('should handle disabled indexing', async () => {
const noIndexStore = new TrajectoryStore({
dataDir: tempDir,
indexingEnabled: false,
});
const trajectory = sampleTrajectories[0];
await noIndexStore.save(trajectory);
// Should still be able to query (using file system scan)
const results = await noIndexStore.query();
expect(results.some(t => t.id === trajectory.id)).toBe(true);
});
});
});