component-caches.ts•22.4 kB
/**
* Component-Specific Cache Implementations for GEPA
* Provides specialized caching for Evolution Engine, Pareto Frontier, LLM Adapter,
* Trajectory Store, and Reflection Engine
*/
import { CacheManager, CacheConfig } from './cache-manager';
import type {
PromptCandidate,
ExecutionTrajectory,
ReflectionAnalysis,
FailurePattern
} from '../../types/gepa';
// Interface for batch analysis recommendations
interface BatchAnalysisRecommendation {
type: string;
description: string;
confidence: number;
priority: 'low' | 'medium' | 'high';
[key: string]: unknown;
}
import type { ConvergenceMetrics, ParetoPoint } from '../pareto-frontier';
import type { ClaudeResponse } from '../../services/llm-adapter';
// Interface for cacheable components
interface CacheableComponent {
getCacheStatistics(): Promise<import('./cache-manager.js').CacheStatistics>;
shutdown?(): Promise<void>;
}
/**
* Evolution Engine Cache - Optimizes candidate evaluation and generation
*/
export class EvolutionEngineCache {
private readonly cacheManager: CacheManager;
constructor(config: Partial<CacheConfig> = {}) {
this.cacheManager = new CacheManager({
l1MaxSize: 32 * 1024 * 1024, // 32MB for candidate data
l1MaxEntries: 5000,
l1DefaultTtl: 1800000, // 30 minutes
l2DefaultTtl: 7200000, // 2 hours
enableCacheWarming: true,
...config
});
}
/**
* Cache candidate evaluation result
*/
async cacheEvaluation(
candidateId: string,
taskId: string,
score: number,
trajectory?: ExecutionTrajectory
): Promise<void> {
const key = this.getEvaluationKey(candidateId, taskId);
await this.cacheManager.set(key, {
score,
trajectory,
timestamp: Date.now()
}, {
ttl: 3600000, // 1 hour for evaluations
storeInL2: true
});
}
/**
* Get cached evaluation result
*/
async getEvaluation(candidateId: string, taskId: string): Promise<{
score: number;
trajectory?: ExecutionTrajectory;
timestamp: number;
} | null> {
const key = this.getEvaluationKey(candidateId, taskId);
return await this.cacheManager.get(key);
}
/**
* Cache generation metrics for convergence analysis
*/
async cacheGenerationMetrics(
evolutionId: string,
generation: number,
metrics: {
averageFitness: number;
bestFitness: number;
diversityScore: number;
mutationSuccessRate: number;
evaluationTime: number;
}
): Promise<void> {
const key = `generation:${evolutionId}:${generation}`;
await this.cacheManager.set(key, metrics, {
ttl: 86400000, // 24 hours
storeInL2: true
});
}
/**
* Get cached generation metrics
*/
async getGenerationMetrics(evolutionId: string, generation: number): Promise<any | null> {
const key = `generation:${evolutionId}:${generation}`;
return await this.cacheManager.get(key);
}
/**
* Cache mutation results to avoid duplicate work
*/
async cacheMutation(
basePromptHash: string,
mutationType: string,
parameters: Record<string, unknown>,
result: PromptCandidate
): Promise<void> {
const key = this.getMutationKey(basePromptHash, mutationType, parameters);
await this.cacheManager.set(key, result, {
ttl: 1800000, // 30 minutes
storeInL2: false // Mutations are transient
});
}
/**
* Get cached mutation result
*/
async getMutation(
basePromptHash: string,
mutationType: string,
parameters: Record<string, unknown>
): Promise<PromptCandidate | null> {
const key = this.getMutationKey(basePromptHash, mutationType, parameters);
return await this.cacheManager.get(key);
}
/**
* Warm cache with recent successful candidates
*/
async warmWithSuccessfulCandidates(candidates: PromptCandidate[]): Promise<void> {
await this.cacheManager.warmCache({
enabled: true,
keys: candidates.map(c => `candidate:${c.id}`),
dataLoader: async (key) => {
const candidateId = key.split(':')[1];
return candidates.find(c => c.id === candidateId);
},
priority: 'high'
});
}
private getEvaluationKey(candidateId: string, taskId: string): string {
return `eval:${candidateId}:${taskId}`;
}
private getMutationKey(
basePromptHash: string,
mutationType: string,
parameters: Record<string, unknown>
): string {
const paramHash = this.hashObject(parameters);
return `mutation:${basePromptHash}:${mutationType}:${paramHash}`;
}
private hashObject(obj: Record<string, unknown>): string {
return Buffer.from(JSON.stringify(obj)).toString('base64').substring(0, 16);
}
async getCacheStatistics(): Promise<ReturnType<CacheManager['getStatistics']>> {
return this.cacheManager.getStatistics();
}
async shutdown(): Promise<void> {
await this.cacheManager.shutdown();
}
}
/**
* Pareto Frontier Cache - Optimizes dominance calculations and frontier operations
*/
export class ParetoFrontierCache {
private readonly cacheManager: CacheManager;
private readonly dominanceCache = new Map<string, boolean>();
private readonly hypervolumeCache = new Map<string, { value: number; timestamp: number }>();
constructor(config: Partial<CacheConfig> = {}) {
this.cacheManager = new CacheManager({
l1MaxSize: 16 * 1024 * 1024, // 16MB for frontier data
l1MaxEntries: 2000,
l1DefaultTtl: 3600000, // 1 hour
l2DefaultTtl: 14400000, // 4 hours
...config
});
}
/**
* Cache dominance relationship between two candidates
*/
cacheDominance(candidateA: string, candidateB: string, dominates: boolean): void {
const key = `${candidateA}:${candidateB}`;
this.dominanceCache.set(key, dominates);
// Keep cache size bounded
if (this.dominanceCache.size > 10000) {
const firstKey = this.dominanceCache.keys().next().value;
if (firstKey) {
this.dominanceCache.delete(firstKey);
}
}
}
/**
* Get cached dominance relationship
*/
getDominance(candidateA: string, candidateB: string): boolean | null {
const key = `${candidateA}:${candidateB}`;
return this.dominanceCache.get(key) ?? null;
}
/**
* Cache hypervolume calculation result
*/
cacheHypervolume(frontierHash: string, referencePoint: Record<string, number>, value: number): void {
const key = this.getHypervolumeKey(frontierHash, referencePoint);
this.hypervolumeCache.set(key, {
value,
timestamp: Date.now()
});
}
/**
* Get cached hypervolume result
*/
getHypervolume(frontierHash: string, referencePoint: Record<string, number>): number | null {
const key = this.getHypervolumeKey(frontierHash, referencePoint);
const cached = this.hypervolumeCache.get(key);
if (cached && Date.now() - cached.timestamp < 1800000) { // 30 minutes
return cached.value;
}
if (cached) {
this.hypervolumeCache.delete(key);
}
return null;
}
/**
* Cache convergence metrics for frontier analysis
*/
async cacheConvergenceMetrics(
frontierHash: string,
metrics: ConvergenceMetrics
): Promise<void> {
const key = `convergence:${frontierHash}`;
await this.cacheManager.set(key, metrics, {
ttl: 1800000, // 30 minutes
storeInL2: true
});
}
/**
* Get cached convergence metrics
*/
async getConvergenceMetrics(frontierHash: string): Promise<ConvergenceMetrics | null> {
const key = `convergence:${frontierHash}`;
return await this.cacheManager.get(key);
}
/**
* Cache frontier snapshot for quick restoration
*/
async cacheFrontierSnapshot(
snapshotId: string,
frontier: ParetoPoint[],
metadata: Record<string, unknown>
): Promise<void> {
const key = `snapshot:${snapshotId}`;
await this.cacheManager.set(key, {
frontier,
metadata,
timestamp: Date.now()
}, {
ttl: 7200000, // 2 hours
storeInL2: true
});
}
/**
* Get cached frontier snapshot
*/
async getFrontierSnapshot(snapshotId: string): Promise<{
frontier: ParetoPoint[];
metadata: Record<string, unknown>;
timestamp: number;
} | null> {
const key = `snapshot:${snapshotId}`;
return await this.cacheManager.get(key);
}
private getHypervolumeKey(frontierHash: string, referencePoint: Record<string, number>): string {
const refHash = this.hashObject(referencePoint);
return `hypervolume:${frontierHash}:${refHash}`;
}
private hashObject(obj: Record<string, unknown>): string {
return Buffer.from(JSON.stringify(obj)).toString('base64').substring(0, 16);
}
async getCacheStatistics(): Promise<ReturnType<CacheManager['getStatistics']>> {
return this.cacheManager.getStatistics();
}
async shutdown(): Promise<void> {
await this.cacheManager.shutdown();
}
}
/**
* LLM Adapter Cache - Optimizes response caching and process management
*/
export class LLMAdapterCache {
private readonly cacheManager: CacheManager;
constructor(config: Partial<CacheConfig> = {}) {
this.cacheManager = new CacheManager({
l1MaxSize: 64 * 1024 * 1024, // 64MB for LLM responses
l1MaxEntries: 1000,
l1DefaultTtl: 7200000, // 2 hours
l2DefaultTtl: 86400000, // 24 hours
l2CompressionEnabled: true, // Compress large responses
...config
});
}
/**
* Cache LLM response with content-based key
*/
async cacheResponse(
prompt: string,
systemPrompt: string | undefined,
response: ClaudeResponse,
options?: { temperature?: number; maxTokens?: number }
): Promise<void> {
const key = this.getResponseKey(prompt, systemPrompt, options);
await this.cacheManager.set(key, response, {
ttl: 7200000, // 2 hours
storeInL2: true
});
}
/**
* Get cached LLM response
*/
async getResponse(
prompt: string,
systemPrompt: string | undefined,
options?: { temperature?: number; maxTokens?: number }
): Promise<ClaudeResponse | null> {
const key = this.getResponseKey(prompt, systemPrompt, options);
return await this.cacheManager.get(key);
}
/**
* Cache trajectory analysis result
*/
async cacheTrajectoryAnalysis(
trajectoryId: string,
targetPrompt: string,
analysis: ReflectionAnalysis
): Promise<void> {
const key = `analysis:${trajectoryId}:${this.hashString(targetPrompt)}`;
await this.cacheManager.set(key, analysis, {
ttl: 14400000, // 4 hours
storeInL2: true
});
}
/**
* Get cached trajectory analysis
*/
async getTrajectoryAnalysis(
trajectoryId: string,
targetPrompt: string
): Promise<ReflectionAnalysis | null> {
const key = `analysis:${trajectoryId}:${this.hashString(targetPrompt)}`;
return await this.cacheManager.get(key);
}
/**
* Cache mutation generation result
*/
async cacheMutationGeneration(
basePrompt: string,
improvement: Record<string, unknown>,
result: string
): Promise<void> {
const key = this.getMutationGenerationKey(basePrompt, improvement);
await this.cacheManager.set(key, result, {
ttl: 3600000, // 1 hour
storeInL2: true
});
}
/**
* Get cached mutation generation result
*/
async getMutationGeneration(
basePrompt: string,
improvement: Record<string, unknown>
): Promise<string | null> {
const key = this.getMutationGenerationKey(basePrompt, improvement);
return await this.cacheManager.get(key);
}
private getResponseKey(
prompt: string,
systemPrompt: string | undefined,
options?: { temperature?: number; maxTokens?: number }
): string {
const fullPrompt = systemPrompt ? `${systemPrompt}\n\n${prompt}` : prompt;
const optionsStr = JSON.stringify(options || {});
return `response:${this.hashString(fullPrompt)}:${this.hashString(optionsStr)}`;
}
private getMutationGenerationKey(
basePrompt: string,
improvement: Record<string, unknown>
): string {
const improvementStr = JSON.stringify(improvement);
return `mutation:${this.hashString(basePrompt)}:${this.hashString(improvementStr)}`;
}
private hashString(str: string): string {
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32bit integer
}
return hash.toString(36);
}
async getCacheStatistics(): Promise<ReturnType<CacheManager['getStatistics']>> {
return this.cacheManager.getStatistics();
}
async shutdown(): Promise<void> {
await this.cacheManager.shutdown();
}
}
/**
* Trajectory Store Cache - Optimizes query results and index operations
*/
export class TrajectoryStoreCache {
private readonly cacheManager: CacheManager;
private readonly queryCache = new Map<string, {
result: ExecutionTrajectory[];
timestamp: number;
}>();
constructor(config: Partial<CacheConfig> = {}) {
this.cacheManager = new CacheManager({
l1MaxSize: 24 * 1024 * 1024, // 24MB for trajectory data
l1MaxEntries: 1500,
l1DefaultTtl: 1800000, // 30 minutes
l2DefaultTtl: 7200000, // 2 hours
...config
});
}
/**
* Cache individual trajectory
*/
async cacheTrajectory(trajectory: ExecutionTrajectory): Promise<void> {
const key = `trajectory:${trajectory.id}`;
await this.cacheManager.set(key, trajectory, {
ttl: 7200000, // 2 hours
storeInL2: true
});
}
/**
* Get cached trajectory
*/
async getTrajectory(id: string): Promise<ExecutionTrajectory | null> {
const key = `trajectory:${id}`;
return await this.cacheManager.get(key);
}
/**
* Cache query results with filter-based key
*/
cacheQueryResult(
filter: Record<string, unknown>,
result: ExecutionTrajectory[]
): void {
const key = this.getQueryKey(filter);
this.queryCache.set(key, {
result: [...result], // Clone to prevent mutation
timestamp: Date.now()
});
// Keep query cache bounded
if (this.queryCache.size > 100) {
const firstKey = this.queryCache.keys().next().value;
if (firstKey) {
this.queryCache.delete(firstKey);
}
}
}
/**
* Get cached query result
*/
getQueryResult(filter: Record<string, unknown>): ExecutionTrajectory[] | null {
const key = this.getQueryKey(filter);
const cached = this.queryCache.get(key);
if (cached && Date.now() - cached.timestamp < 300000) { // 5 minutes
return [...cached.result]; // Clone to prevent mutation
}
if (cached) {
this.queryCache.delete(key);
}
return null;
}
/**
* Cache index entries for fast lookups
*/
async cacheIndexEntry(
trajectoryId: string,
indexData: {
promptId: string;
taskId: string;
timestamp: Date;
success: boolean;
score: number;
}
): Promise<void> {
const key = `index:${trajectoryId}`;
await this.cacheManager.set(key, indexData, {
ttl: 14400000, // 4 hours
storeInL2: true
});
}
/**
* Get cached index entry
*/
async getIndexEntry(trajectoryId: string): Promise<{
promptId: string;
taskId: string;
timestamp: Date;
success: boolean;
score: number;
} | null> {
const key = `index:${trajectoryId}`;
return await this.cacheManager.get(key);
}
/**
* Cache aggregated statistics
*/
async cacheStatistics(
type: 'prompt' | 'task' | 'global',
id: string,
stats: Record<string, unknown>
): Promise<void> {
const key = `stats:${type}:${id}`;
await this.cacheManager.set(key, stats, {
ttl: 1800000, // 30 minutes
storeInL2: false // Statistics are frequently updated
});
}
/**
* Get cached statistics
*/
async getStatistics(type: 'prompt' | 'task' | 'global', id: string): Promise<Record<string, unknown> | null> {
const key = `stats:${type}:${id}`;
return await this.cacheManager.get(key);
}
private getQueryKey(filter: Record<string, unknown>): string {
// Sort keys for consistent hashing
const sortedFilter = Object.keys(filter)
.sort()
.reduce((result, key) => {
result[key] = filter[key];
return result;
}, {} as Record<string, unknown>);
return `query:${this.hashObject(sortedFilter)}`;
}
private hashObject(obj: Record<string, unknown>): string {
return Buffer.from(JSON.stringify(obj)).toString('base64').substring(0, 16);
}
async getCacheStatistics(): Promise<import('./cache-manager').CacheStatistics> {
return this.cacheManager.getStatistics();
}
async shutdown(): Promise<void> {
await this.cacheManager.shutdown();
}
}
/**
* Reflection Engine Cache - Optimizes pattern analysis and recommendation caching
*/
export class ReflectionEngineCache {
private readonly cacheManager: CacheManager;
private readonly patternCache = new Map<string, {
patterns: FailurePattern[];
timestamp: number;
}>();
constructor(config: Partial<CacheConfig> = {}) {
this.cacheManager = new CacheManager({
l1MaxSize: 16 * 1024 * 1024, // 16MB for analysis data
l1MaxEntries: 1000,
l1DefaultTtl: 3600000, // 1 hour
l2DefaultTtl: 14400000, // 4 hours
...config
});
}
/**
* Cache reflection analysis result
*/
async cacheAnalysis(analysis: ReflectionAnalysis): Promise<void> {
const key = `analysis:${analysis.trajectoryId}`;
await this.cacheManager.set(key, analysis, {
ttl: 7200000, // 2 hours
storeInL2: true
});
}
/**
* Get cached reflection analysis
*/
async getAnalysis(trajectoryId: string): Promise<ReflectionAnalysis | null> {
const key = `analysis:${trajectoryId}`;
return await this.cacheManager.get(key);
}
/**
* Cache failure patterns for a prompt
*/
cachePatterns(promptId: string, patterns: FailurePattern[]): void {
const key = `patterns:${promptId}`;
this.patternCache.set(key, {
patterns: [...patterns], // Clone to prevent mutation
timestamp: Date.now()
});
// Keep pattern cache bounded
if (this.patternCache.size > 200) {
const firstKey = this.patternCache.keys().next().value;
if (firstKey) {
this.patternCache.delete(firstKey);
}
}
}
/**
* Get cached failure patterns
*/
getPatterns(promptId: string): FailurePattern[] | null {
const key = `patterns:${promptId}`;
const cached = this.patternCache.get(key);
if (cached && Date.now() - cached.timestamp < 1800000) { // 30 minutes
return [...cached.patterns]; // Clone to prevent mutation
}
if (cached) {
this.patternCache.delete(key);
}
return null;
}
/**
* Cache batch analysis result
*/
async cacheBatchAnalysis(
trajectoryIds: string[],
result: {
commonPatterns: FailurePattern[];
recommendations: BatchAnalysisRecommendation[];
overallConfidence: number;
}
): Promise<void> {
const key = this.getBatchKey(trajectoryIds);
await this.cacheManager.set(key, result, {
ttl: 3600000, // 1 hour
storeInL2: true
});
}
/**
* Get cached batch analysis result
*/
async getBatchAnalysis(trajectoryIds: string[]): Promise<{
commonPatterns: FailurePattern[];
recommendations: BatchAnalysisRecommendation[];
overallConfidence: number;
} | null> {
const key = this.getBatchKey(trajectoryIds);
return await this.cacheManager.get(key);
}
/**
* Cache pattern aggregation results
*/
async cacheAggregatedPatterns(
contextKey: string,
patterns: FailurePattern[],
metadata: Record<string, unknown>
): Promise<void> {
const key = `aggregated:${contextKey}`;
await this.cacheManager.set(key, {
patterns,
metadata,
timestamp: Date.now()
}, {
ttl: 1800000, // 30 minutes
storeInL2: true
});
}
/**
* Get cached aggregated patterns
*/
async getAggregatedPatterns(contextKey: string): Promise<{
patterns: FailurePattern[];
metadata: Record<string, unknown>;
timestamp: number;
} | null> {
const key = `aggregated:${contextKey}`;
return await this.cacheManager.get(key);
}
private getBatchKey(trajectoryIds: string[]): string {
const sortedIds = [...trajectoryIds].sort();
const idsStr = sortedIds.join(',');
return `batch:${this.hashString(idsStr)}`;
}
private hashString(str: string): string {
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32bit integer
}
return hash.toString(36);
}
async getCacheStatistics(): Promise<import('./cache-manager').CacheStatistics> {
return this.cacheManager.getStatistics();
}
async shutdown(): Promise<void> {
await this.cacheManager.shutdown();
}
}
/**
* Unified Cache Registry - Manages all component caches
*/
export class CacheRegistry {
private readonly caches: Map<string, CacheableComponent> = new Map();
registerCache(name: string, cache: CacheableComponent): void {
this.caches.set(name, cache);
}
getCache<T>(name: string): T | null {
return (this.caches.get(name) as T) || null;
}
async getGlobalStatistics(): Promise<Record<string, import('./cache-manager').CacheStatistics>> {
const stats: Record<string, import('./cache-manager').CacheStatistics> = {};
for (const [name, cache] of this.caches) {
try {
stats[name] = await cache.getCacheStatistics();
} catch {
stats[name] = {
l1: { hits: 0, misses: 0, hitRate: 0, size: 0, entries: 0, maxSize: 0, maxEntries: 0 },
l2: { hits: 0, misses: 0, hitRate: 0, size: 0, entries: 0, maxSize: 0, maxEntries: 0 },
overall: { hits: 0, misses: 0, hitRate: 0, evictions: 0, compressionRatio: 1 }
};
}
}
return stats;
}
async shutdownAll(): Promise<void> {
const shutdownPromises: Promise<void>[] = [];
for (const cache of this.caches.values()) {
if (cache.shutdown) {
shutdownPromises.push(cache.shutdown());
}
}
await Promise.allSettled(shutdownPromises);
this.caches.clear();
}
}