Skip to main content
Glama
ProjectBrain.ts12.2 kB
import { RedisClient } from './RedisClient.js'; import { SummaryMerger } from './SummaryMerger.js'; import { SessionIdManager } from '../utils/sessionId.js'; import { filterExistingFiles } from '../utils/validation.js'; import { ProjectState, CheckpointHistory } from '../types/schema.js'; import { logger } from '../utils/logger.js'; /** * Main orchestrator for the infinite context system */ export class ProjectBrain { private redis: RedisClient; private merger: SummaryMerger; private sessionManager: SessionIdManager; private currentSessionId: string | null = null; // Failure tracking for history saving private historyFailureCount = 0; private readonly MAX_HISTORY_FAILURES = 3; // File existence cache private fileExistenceCache = new Map<string, { exists: boolean; timestamp: number }>(); private readonly FILE_CACHE_TTL_MS = 30000; // 30 seconds constructor( redisUrl: string, geminiApiKey: string, anthropicApiKey: string, projectRoot: string ) { this.redis = new RedisClient(redisUrl); this.merger = new SummaryMerger(geminiApiKey, anthropicApiKey); this.sessionManager = new SessionIdManager(projectRoot); } /** * Initializes the project brain (connects to Redis, loads session) */ async initialize(): Promise<void> { try { // Connect to Redis await this.redis.connect(); // Get or create session ID this.currentSessionId = await this.sessionManager.getOrCreate(); // Try to acquire session lock const lockAcquired = await this.redis.acquireSessionLock(this.currentSessionId); if (!lockAcquired) { logger.warn( 'Another session may be active. Proceeding anyway, but concurrent usage may cause conflicts.' ); } logger.info('ProjectBrain initialized', { sessionId: this.currentSessionId }); } catch (error) { logger.error('Failed to initialize ProjectBrain', { error }); throw new Error(`Initialization failed: ${error}`); } } /** * Shuts down the project brain (releases lock, disconnects) */ async shutdown(): Promise<void> { try { if (this.currentSessionId) { await this.redis.releaseSessionLock(this.currentSessionId); } await this.redis.disconnect(); logger.info('ProjectBrain shut down successfully'); } catch (error) { logger.error('Error during shutdown', { error }); } } /** * Ensures session is initialized */ private ensureInitialized(): string { if (!this.currentSessionId) { throw new Error('ProjectBrain not initialized. Call initialize() first.'); } return this.currentSessionId; } /** * Filters existing files using a cache to reduce file system calls */ private async filterExistingFilesWithCache(filePaths: string[]): Promise<string[]> { const now = Date.now(); const results: string[] = []; const toCheck: string[] = []; for (const path of filePaths) { const cached = this.fileExistenceCache.get(path); if (cached && (now - cached.timestamp) < this.FILE_CACHE_TTL_MS) { if (cached.exists) results.push(path); } else { toCheck.push(path); } } if (toCheck.length > 0) { // Check uncached files const checked = await filterExistingFiles(toCheck); for (const path of toCheck) { const exists = checked.includes(path); this.fileExistenceCache.set(path, { exists, timestamp: now }); if (exists) results.push(path); } } return results; } /** * Checkpoints the current context to Redis */ async checkpoint(context: string, tokenCount: number): Promise<string> { const sessionId = this.ensureInitialized(); const startTime = Date.now(); try { logger.info('Starting checkpoint', { sessionId, tokenCount }); // Update state with locking const updatedState = await this.redis.updateStateWithLock( sessionId, async (oldState) => { // Merge old state with new context using LLM const merged = await this.merger.merge(oldState, context, tokenCount); // Update token count merged.meta.token_budget_used = tokenCount; // Validate and clean active files merged.active_context.active_files = await this.filterExistingFilesWithCache( merged.active_context.active_files ); return merged; } ); // Save to checkpoint history (async, non-blocking) const duration = Date.now() - startTime; const historyEntry: CheckpointHistory = { version: updatedState.meta.version, timestamp: updatedState.meta.last_checkpoint, merge_duration_ms: duration, token_count: tokenCount, context_ratio: tokenCount / 200000, // Assuming 200k token limit state: updatedState, }; // Fire and forget - don't await this.redis.saveCheckpointHistory(sessionId, historyEntry).catch((error) => { this.historyFailureCount++; logger.warn('Failed to save checkpoint history (non-critical)', { error, failureCount: this.historyFailureCount }); if (this.historyFailureCount >= this.MAX_HISTORY_FAILURES) { logger.error('Multiple checkpoint history failures detected. Rollback may not work correctly.'); } }); // Refresh session lock await this.redis.refreshSessionLock(sessionId); logger.info('Checkpoint completed successfully', { version: updatedState.meta.version, duration, }); return `Checkpoint saved successfully (version ${updatedState.meta.version}, ${duration}ms)`; } catch (error) { logger.error('Checkpoint failed', { error, sessionId }); throw new Error(`Checkpoint failed: ${error}`); } } /** * Resumes from the last checkpoint */ async resume(): Promise<string> { const sessionId = this.ensureInitialized(); try { logger.info('Resuming from last checkpoint', { sessionId }); const state = await this.redis.getState(sessionId); // Validate active files still exist const validatedFiles = await this.filterExistingFilesWithCache(state.active_context.active_files); // Format context for Claude const formatted = this.formatStateForResume(state, validatedFiles); logger.info('Resume completed', { version: state.meta.version, activeFiles: validatedFiles.length, }); return formatted; } catch (error) { logger.error('Resume failed', { error, sessionId }); throw new Error(`Resume failed: ${error}`); } } /** * Formats project state into human-readable context for Claude */ private formatStateForResume(state: ProjectState, validatedFiles: string[]): string { const sections: string[] = []; sections.push('# Project Context Resume'); sections.push(''); // Overview if (state.project_context.overview) { sections.push('## Overview'); sections.push(state.project_context.overview); sections.push(''); } // Architecture if (state.project_context.architecture) { sections.push('## Architecture'); sections.push(state.project_context.architecture); sections.push(''); } // Current task if (state.active_context.current_task) { sections.push('## Current Task'); sections.push(state.active_context.current_task); sections.push(''); } // Active files if (validatedFiles.length > 0) { sections.push('## Active Files'); validatedFiles.forEach((file) => sections.push(`- ${file}`)); sections.push(''); } // Recent changes if (state.project_context.recent_changes.length > 0) { sections.push('## Recent Changes'); state.project_context.recent_changes.slice(0, 5).forEach((change) => { sections.push(`### ${new Date(change.timestamp).toLocaleString()}`); sections.push(change.summary); if (change.files.length > 0) { sections.push(`Files: ${change.files.join(', ')}`); } sections.push(''); }); } // Active decisions if (state.active_context.active_decisions.length > 0) { sections.push('## Active Decisions'); state.active_context.active_decisions.forEach((decision) => { const status = decision.status === 'decided' ? '✓' : '?'; sections.push(`${status} ${decision.question}`); if (decision.decision) { sections.push(` → ${decision.decision}`); } }); sections.push(''); } // Metadata sections.push('---'); sections.push( `Last checkpoint: ${new Date(state.meta.last_checkpoint).toLocaleString()}` ); sections.push(`Version: ${state.meta.version}`); sections.push(`Token budget used: ${state.meta.token_budget_used.toLocaleString()}`); return sections.join('\n'); } /** * Rolls back to a previous checkpoint version */ async rollback(steps: number = 1): Promise<string> { const sessionId = this.ensureInitialized(); try { logger.info('Rolling back checkpoint', { sessionId, steps }); const history = await this.redis.getCheckpointHistory(sessionId); if (history.length === 0) { return 'No checkpoint history available to rollback.'; } if (steps > history.length) { return `Only ${history.length} checkpoints available. Cannot rollback ${steps} steps.`; } // Get the target checkpoint (index is steps because history[0] is current) const targetCheckpoint = history[steps]; // Restore the state await this.redis.updateStateWithLock(sessionId, async () => { return targetCheckpoint.state; }); logger.info('Rollback completed', { targetVersion: targetCheckpoint.version, targetTimestamp: targetCheckpoint.timestamp, }); return `Rolled back to version ${targetCheckpoint.version} (${new Date( targetCheckpoint.timestamp ).toLocaleString()})`; } catch (error) { logger.error('Rollback failed', { error, sessionId }); throw new Error(`Rollback failed: ${error}`); } } /** * Gets the current status/metadata */ async status(): Promise<string> { const sessionId = this.ensureInitialized(); try { const state = await this.redis.getState(sessionId); const history = await this.redis.getCheckpointHistory(sessionId); const sections: string[] = []; sections.push('# Project State Status'); sections.push(''); sections.push(`Session ID: ${sessionId}`); sections.push(`Version: ${state.meta.version}`); sections.push( `Last Checkpoint: ${new Date(state.meta.last_checkpoint).toLocaleString()}` ); sections.push( `Last Access: ${new Date(state.meta.last_access).toLocaleString()}` ); sections.push( `Token Budget Used: ${state.meta.token_budget_used.toLocaleString()} / 200,000 (${( (state.meta.token_budget_used / 200000) * 100 ).toFixed(1)}%)` ); sections.push(''); sections.push(`Active Files: ${state.active_context.active_files.length}`); state.active_context.active_files.forEach((file) => { sections.push(` - ${file}`); }); sections.push(''); sections.push(`Active Decisions: ${state.active_context.active_decisions.length}`); state.active_context.active_decisions.forEach((decision) => { sections.push(` - [${decision.status}] ${decision.question}`); }); sections.push(''); sections.push(`Checkpoint History: ${history.length} versions available`); history.forEach((h, i) => { sections.push( ` ${i}. v${h.version} - ${new Date(h.timestamp).toLocaleString()} (${h.merge_duration_ms }ms, ${h.token_count} tokens)` ); }); return sections.join('\n'); } catch (error) { logger.error('Status check failed', { error, sessionId }); throw new Error(`Status check failed: ${error}`); } } }

Implementation Reference

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/coderdeep11/claude-memory-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server