session_compact_ledger
Automatically summarize old session ledger entries to prevent indefinite growth and maintain fast context loading in Prism MCP. Use dry-run mode to preview compaction before execution.
Instructions
Auto-compact old session ledger entries by rolling them up into AI-generated summaries. This prevents the ledger from growing indefinitely and keeps deep context loading fast.
How it works:
Finds projects with more entries than the threshold
Summarizes old entries using Gemini (keeps recent entries intact)
Inserts a rollup entry and archives the originals (soft-delete)
Use dry_run=true to preview what would be compacted without executing.
Input Schema
| Name | Required | Description | Default |
|---|---|---|---|
| project | No | Optional: compact a specific project. If omitted, auto-detects all candidates. | |
| threshold | No | Minimum entries before compaction triggers (default: 50). | |
| keep_recent | No | Number of recent entries to keep intact (default: 10). | |
| dry_run | No | If true, only preview what would be compacted without executing. Default: false. |
Implementation Reference
- src/tools/compactionHandler.ts:66-233 (handler)Main handler function for session_compact_ledger, responsible for identifying compaction candidates, performing Gemini-based summarization of ledger entries, and saving a rollup entry.
export async function compactLedgerHandler(args: unknown) { if (!isCompactLedgerArgs(args)) { throw new Error("Invalid arguments for session_compact_ledger"); } const { project, threshold = 50, keep_recent = 10, dry_run = false, } = args; console.error( `[compact_ledger] ${dry_run ? "DRY RUN: " : ""}` + `project=${project || "auto-detect"}, threshold=${threshold}, keep_recent=${keep_recent}` ); const storage = await getStorage(); // Step 1: Find candidates let candidates: any[]; if (project) { // If specific project, check it directly const entries = await storage.getLedgerEntries({ project: `eq.${project}`, user_id: `eq.${PRISM_USER_ID}`, "archived_at": "is.null", "is_rollup": "eq.false", select: "id", }); const count = entries.length; if (count <= threshold) { return { content: [{ type: "text", text: `✅ Project "${project}" has ${count} active entries ` + `(threshold: ${threshold}). No compaction needed.`, }], isError: false, }; } candidates = [{ project, total_entries: count, to_compact: count - keep_recent }]; } else { // Auto-detect candidates using storage backend candidates = await storage.getCompactionCandidates(threshold, keep_recent, PRISM_USER_ID); } if (candidates.length === 0) { return { content: [{ type: "text", text: `✅ No projects exceed the compaction threshold (${threshold} entries). ` + `All clear!`, }], isError: false, }; } // Dry run: just report candidates if (dry_run) { const summary = candidates.map(c => `• ${c.project}: ${c.total_entries} entries (${c.to_compact} would be compacted)` ).join("\n"); return { content: [{ type: "text", text: `🔍 Compaction preview (dry run):\n\n${summary}\n\n` + `Run without dry_run to execute compaction.`, }], isError: false, }; } // Step 2: Compact each candidate project const results: string[] = []; for (const candidate of candidates) { const proj = candidate.project; const toCompact = Math.min(candidate.to_compact, MAX_ENTRIES_PER_RUN); console.error(`[compact_ledger] Compacting ${toCompact} entries for "${proj}"`); // Fetch oldest entries (the ones to be rolled up) const oldEntries = await storage.getLedgerEntries({ project: `eq.${proj}`, user_id: `eq.${PRISM_USER_ID}`, "archived_at": "is.null", "is_rollup": "eq.false", order: "created_at.asc", limit: String(toCompact), select: "id,summary,decisions,files_changed,keywords,session_date", }); if (oldEntries.length === 0) { results.push(`• ${proj}: no entries to compact`); continue; } // Step 3: Chunked summarization const chunks: any[][] = []; for (let i = 0; i < oldEntries.length; i += COMPACTION_CHUNK_SIZE) { chunks.push(oldEntries.slice(i, i + COMPACTION_CHUNK_SIZE)); } let finalSummary: string; if (chunks.length === 1) { finalSummary = await summarizeEntries(chunks[0]); } else { const chunkSummaries = await Promise.all( chunks.map(chunk => summarizeEntries(chunk)) ); const metaEntries = chunkSummaries.map((s, i) => ({ session_date: `chunk ${i + 1}`, summary: s, decisions: [], files_changed: [], })); finalSummary = await summarizeEntries(metaEntries); } // Collect all unique keywords from rolled-up entries const allKeywords = [...new Set( oldEntries.flatMap((e: any) => e.keywords || []) )]; // Collect all unique files changed const allFiles = [...new Set( oldEntries.flatMap((e: any) => e.files_changed || []) )]; // Step 4: Insert rollup entry via storage backend await storage.saveLedger({ project: proj, user_id: PRISM_USER_ID, summary: `[ROLLUP of ${oldEntries.length} sessions] ${finalSummary}`, keywords: allKeywords, files_changed: allFiles, decisions: [`Rolled up ${oldEntries.length} sessions on ${new Date().toISOString()}`], is_rollup: true, rollup_count: oldEntries.length, conversation_id: `rollup-${Date.now()}`, }); // Step 5: Archive old entries (soft-delete) for (const entry of oldEntries) { await storage.patchLedger((entry as any).id, { archived_at: new Date().toISOString(), }); } results.push( `• ${proj}: ${oldEntries.length} entries → 1 rollup ` + `(${allKeywords.length} keywords preserved)` ); } return { content: [{ type: "text", text: `🧹 Ledger compaction complete:\n\n${results.join("\n")}\n\n` + `Original entries are archived (soft-deleted), not permanently removed.`, }], isError: false, }; } - src/tools/compactionHandler.ts:21-30 (schema)Type guard and argument definition for session_compact_ledger tool inputs.
export function isCompactLedgerArgs( args: unknown ): args is { project?: string; threshold?: number; keep_recent?: number; dry_run?: boolean; } { return typeof args === "object" && args !== null; } - src/tools/sessionMemoryDefinitions.ts:215-221 (registration)Registration and schema definition of the session_compact_ledger tool.
export const SESSION_COMPACT_LEDGER_TOOL: Tool = { name: "session_compact_ledger", description: "Auto-compact old session ledger entries by rolling them up into AI-generated summaries. " + "This prevents the ledger from growing indefinitely and keeps deep context loading fast.\n\n" + "How it works:\n" + "1. Finds projects with more entries than the threshold\n" +