integration-setup.tsโข5.46 kB
/**
* Integration test setup file
* Configures the testing environment for integration tests
*/
import { beforeAll, afterAll, beforeEach, afterEach } from 'vitest';
import { promises as fs } from 'fs';
import { resolve } from 'path';
// Integration test configuration
const INTEGRATION_DATA_DIR = resolve(process.cwd(), 'temp/integration');
const TEST_CONFIG_PATH = resolve(INTEGRATION_DATA_DIR, 'gepa.config.json');
/**
* Global setup for integration tests
*/
beforeAll(async (): Promise<void> => {
// Create integration test directory
await fs.mkdir(INTEGRATION_DATA_DIR, { recursive: true });
await fs.mkdir(resolve(INTEGRATION_DATA_DIR, 'prompts'), { recursive: true });
await fs.mkdir(resolve(INTEGRATION_DATA_DIR, 'trajectories'), { recursive: true });
await fs.mkdir(resolve(INTEGRATION_DATA_DIR, 'results'), { recursive: true });
// Create test configuration file
const testConfig = {
llmProvider: {
type: 'claude-code-direct',
config: {
executable: 'echo', // Mock executable for tests
workingDir: INTEGRATION_DATA_DIR,
env: {},
maxConcurrentProcesses: 2,
processTimeout: 10000, // Shorter timeout for tests
},
},
evolution: {
defaultPopulationSize: 3, // Smaller for tests
defaultMaxGenerations: 5, // Fewer generations for tests
defaultMutationRate: 0.5,
},
storage: {
dataDir: INTEGRATION_DATA_DIR,
archiveAfterDays: 1,
},
};
await fs.writeFile(TEST_CONFIG_PATH, JSON.stringify(testConfig, null, 2));
// Set environment variables
process.env.NODE_ENV = 'test';
process.env.GEPA_TEST_MODE = 'integration';
process.env.GEPA_CONFIG_PATH = TEST_CONFIG_PATH;
process.env.GEPA_DATA_DIR = INTEGRATION_DATA_DIR;
// eslint-disable-next-line no-console
// eslint-disable-next-line no-console
console.log('๐ง Integration test environment initialized');
});
/**
* Global cleanup for integration tests
*/
afterAll(async (): Promise<void> => {
// Clean up integration test files
try {
await fs.rm(INTEGRATION_DATA_DIR, { recursive: true, force: true });
} catch (error) {
// eslint-disable-next-line no-console
// eslint-disable-next-line no-console
console.warn('Failed to clean up integration test directory:', error);
}
// eslint-disable-next-line no-console
// eslint-disable-next-line no-console
console.log('๐งน Integration test environment cleaned up');
});
/**
* Before each integration test
*/
beforeEach(async (): Promise<void> => {
// Clear any existing test data
const dirs = ['prompts', 'trajectories', 'results'];
for (const dir of dirs) {
const dirPath = resolve(INTEGRATION_DATA_DIR, dir);
try {
const files = await fs.readdir(dirPath);
await Promise.all(files.map(file => fs.rm(resolve(dirPath, file), { force: true })));
} catch {
// Directory might not exist, ignore
}
}
});
/**
* After each integration test
*/
afterEach((): void => {
// Any per-test cleanup can go here
});
// Integration test utilities
declare global {
// eslint-disable-next-line no-var
var integrationUtils: {
createTestTask: (description: string) => Promise<string>;
executeFullEvolution: (taskDescription: string) => Promise<Record<string, unknown>>;
verifyEvolutionResults: (results: Record<string, unknown>) => boolean;
setupMockLLMResponses: (responses: string[]) => void;
};
}
globalThis.integrationUtils = {
/**
* Create a test task for evolution
*/
async createTestTask(description: string): Promise<string> {
const taskId = `task-${Date.now()}`;
const taskData = {
id: taskId,
description,
createdAt: new Date().toISOString(),
status: 'pending',
};
const taskPath = resolve(INTEGRATION_DATA_DIR, 'tasks', `${taskId}.json`);
await fs.mkdir(resolve(INTEGRATION_DATA_DIR, 'tasks'), { recursive: true });
await fs.writeFile(taskPath, JSON.stringify(taskData, null, 2));
return taskId;
},
/**
* Execute a full evolution process for integration testing
*/
async executeFullEvolution(taskDescription: string): Promise<Record<string, unknown>> {
// This would normally start an evolution process
// For testing, we return a mock result structure
return {
evolutionId: `evolution-${Date.now()}`,
taskDescription,
generations: 3,
bestPrompt: {
id: 'best-prompt-123',
content: 'Optimized prompt content',
score: 0.95,
},
convergenceAchieved: true,
totalRollouts: 15,
};
},
/**
* Verify evolution results meet expected criteria
*/
verifyEvolutionResults(results: Record<string, unknown>): boolean {
if (!results || typeof results !== 'object') {
return false;
}
const bestPrompt = results.bestPrompt as Record<string, unknown> | undefined;
return !!(
results.evolutionId &&
bestPrompt &&
typeof bestPrompt.score === 'number' &&
bestPrompt.score > 0.5 &&
typeof results.totalRollouts === 'number' &&
results.totalRollouts > 0
);
},
/**
* Setup mock LLM responses for testing
*/
setupMockLLMResponses(responses: string[]): void {
// In a real implementation, this would configure mock responses
// For now, we just store them for potential use
(globalThis as Record<string, unknown>).mockLLMResponses = responses;
},
};
export {};