m9k_info
Display memory index metrics including corpus size, search pipeline status, usage statistics, and embedding worker state for persistent conversation storage.
Instructions
Show memory index information: corpus size, search pipeline status, usage metrics, embedding worker state.
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
No arguments | |||
Implementation Reference
- src/tools/memory.ts:172-419 (handler)The handler function for 'm9k_info' that aggregates statistics about the memory index, search pipeline, usage metrics, and current process status.
async () => { const sessions = ( ctx.db .prepare('SELECT COUNT(*) AS cnt FROM conv_sessions WHERE deleted_at IS NULL') .get() as { cnt: number; } ).cnt; const chunks = ( ctx.db .prepare('SELECT COUNT(*) AS cnt FROM conv_chunks WHERE deleted_at IS NULL') .get() as { cnt: number; } ).cnt; const projects = ( ctx.db .prepare( 'SELECT COUNT(DISTINCT project) AS cnt FROM conv_sessions WHERE deleted_at IS NULL', ) .get() as { cnt: number } ).cnt; const oldestSession = ( ctx.db .prepare('SELECT MIN(started_at) AS ts FROM conv_sessions WHERE deleted_at IS NULL') .get() as { ts: string | null } ).ts; const newestSession = ( ctx.db .prepare('SELECT MAX(started_at) AS ts FROM conv_sessions WHERE deleted_at IS NULL') .get() as { ts: string | null } ).ts; // Build span string from oldest/newest dates const formatDate = (iso: string | null): string | null => { if (!iso) return null; return iso.slice(0, 10); // "2026-02-28T..." → "2026-02-28" }; const oldest = formatDate(oldestSession); const newest = formatDate(newestSession); const span = oldest && newest ? `${oldest} → ${newest}` : null; const eligibleChunks = countEligibleChunks(ctx.db); let embeddedChunksText = 0; let embeddedChunksCode = 0; try { embeddedChunksText = countEmbeddedChunks(ctx.db, '_text'); } catch { // vec table may not exist yet } try { embeddedChunksCode = countEmbeddedChunks(ctx.db, '_code'); } catch { // vec table may not exist yet } const ignoredProjects = getIgnoredProjects(ctx.db).length; const orphanedSessions = parseInt(getStat(ctx.db, 'orphaned_sessions') ?? '0', 10); const searchCount = parseInt(getStat(ctx.db, 'search_count') ?? '0', 10); const hitCount = parseInt(getStat(ctx.db, 'hit_count') ?? '0', 10); const tokensServed = parseInt(getStat(ctx.db, 'tokens_served') ?? '0', 10); const lastSearchAt = getStat(ctx.db, 'last_search_at'); // --- Search lines: one readable string per embedder --- const embeddingModelIdText = getMeta(ctx.db, 'embedding_model_id_text'); const embeddingDimensionsText = parseInt( getMeta(ctx.db, 'embedding_dimensions_text') ?? '0', 10, ); const embeddingModelIdCode = getMeta(ctx.db, 'embedding_model_id_code'); const embeddingDimensionsCode = parseInt( getMeta(ctx.db, 'embedding_dimensions_code') ?? '0', 10, ); const buildSearchLine = ( modelId: string | null, dims: number, embedded: number, eligible: number, ): string => { if (!modelId) return 'disabled'; const pct = eligible > 0 ? Math.round((embedded / eligible) * 100) : 0; return `${modelId} (${dims}d) — ${embedded}/${eligible} (${pct}%)`; }; const textLine = buildSearchLine( embeddingModelIdText, embeddingDimensionsText, embeddedChunksText, eligibleChunks, ); const codeLine = buildSearchLine( embeddingModelIdCode, embeddingDimensionsCode, embeddedChunksCode, eligibleChunks, ); // Reranker line const rerankerLine = ctx.searchContext.reranker ? `${ctx.searchContext.reranker.backend()} (${ctx.searchContext.reranker.modelId()})` : 'none'; // --- Activity: merge worker + migration into one block --- const workerStatus: EmbedJobStatus = ctx.orchestrator?.getStatus() ?? { active: false, suffix: null, embedded: 0, total: 0, pid: null, rssMB: null, heapUsedMB: null, }; const hasMigrationText = !!getMeta(ctx.db, 'migration_target_model_text'); const hasMigrationCode = !!getMeta(ctx.db, 'migration_target_model_code'); let activity: { type: string; target: string | null; progress: string; pid: number | null; rssMB: number | null; heapUsedMB: number | null; stuckSince?: string | null; } | null = null; const buildProgressString = (embedded: number, total: number): string => { const pct = total > 0 ? Math.round((embedded / total) * 100) : 0; return `${embedded}/${total} (${pct}%)`; }; if (workerStatus.active) { // Check if current worker suffix matches a migration const suffix = workerStatus.suffix ?? '_text'; const isMigration = (suffix === '_text' && hasMigrationText) || (suffix === '_code' && hasMigrationCode); let embedded = workerStatus.embedded; let total = workerStatus.total; // Worker may not have reported total yet — fall back to migration meta / eligible chunks if (isMigration && total === 0) { embedded = parseInt(getMeta(ctx.db, `migration_progress${suffix}`) ?? '0', 10); total = eligibleChunks; } activity = { type: isMigration ? 'migration' : 'backfill', target: workerStatus.suffix, progress: buildProgressString(embedded, total), pid: workerStatus.pid, rssMB: workerStatus.rssMB, heapUsedMB: workerStatus.heapUsedMB, }; } else if (hasMigrationText || hasMigrationCode) { // Migration meta exists but no worker — stuck const suffix = hasMigrationText ? '_text' : '_code'; const stuckSince = getMeta(ctx.db, `migration_started_at${suffix}`); const migrationProgress = parseInt( getMeta(ctx.db, `migration_progress${suffix}`) ?? '0', 10, ); activity = { type: 'stuck-migration', target: suffix, progress: buildProgressString(migrationProgress, eligibleChunks), pid: null, rssMB: null, heapUsedMB: null, stuckSince, }; } // --- Status emoji --- let status: string; if (!ctx.cfg.embeddingsEnabled) { status = '\u{1F7E2} bm25-only'; } else if (workerStatus.active) { status = '\u{1F535} embedding'; } else if (activity?.type === 'stuck-migration') { status = '\u{1F7E0} degraded'; } else { status = '\u{1F7E2} healthy'; } // --- Uptime --- const uptimeSec = Math.round(process.uptime()); let uptime: string; if (uptimeSec < 60) { uptime = `${uptimeSec}s`; } else if (uptimeSec < 3600) { uptime = `${Math.floor(uptimeSec / 60)}m`; } else { const h = Math.floor(uptimeSec / 3600); const m = Math.floor((uptimeSec % 3600) / 60); uptime = m > 0 ? `${h}h${m}m` : `${h}h`; } // --- Process --- const mem = process.memoryUsage(); const processInfo = { pid: process.pid, rssMB: Math.round((mem.rss / 1024 / 1024) * 10) / 10, heapUsedMB: Math.round((mem.heapUsed / 1024 / 1024) * 10) / 10, uptimeSeconds: uptimeSec, }; return { content: [ { type: 'text' as const, text: JSON.stringify({ version: ctx.version, status, mode: ctx.mode, logLevel: ctx.cfg.logLevel, uptime, corpus: { sessions, chunks, projects, ignoredProjects, orphanedSessions, span, }, search: { bm25: 'active', text: textLine, code: codeLine, reranker: rerankerLine, }, activity, usage: { searches: searchCount, hits: hitCount, tokensServed, lastSearch: lastSearchAt, }, process: processInfo, }), }, ], }; }, - src/tools/memory.ts:159-420 (registration)Registration of the 'm9k_info' tool within the 'registerMemoryTools' function.
server.registerTool( 'm9k_info', { description: 'Show memory index information: corpus size, search pipeline status, usage metrics, embedding worker state.', inputSchema: {}, annotations: { readOnlyHint: true, destructiveHint: false, idempotentHint: true, openWorldHint: false, }, }, async () => { const sessions = ( ctx.db .prepare('SELECT COUNT(*) AS cnt FROM conv_sessions WHERE deleted_at IS NULL') .get() as { cnt: number; } ).cnt; const chunks = ( ctx.db .prepare('SELECT COUNT(*) AS cnt FROM conv_chunks WHERE deleted_at IS NULL') .get() as { cnt: number; } ).cnt; const projects = ( ctx.db .prepare( 'SELECT COUNT(DISTINCT project) AS cnt FROM conv_sessions WHERE deleted_at IS NULL', ) .get() as { cnt: number } ).cnt; const oldestSession = ( ctx.db .prepare('SELECT MIN(started_at) AS ts FROM conv_sessions WHERE deleted_at IS NULL') .get() as { ts: string | null } ).ts; const newestSession = ( ctx.db .prepare('SELECT MAX(started_at) AS ts FROM conv_sessions WHERE deleted_at IS NULL') .get() as { ts: string | null } ).ts; // Build span string from oldest/newest dates const formatDate = (iso: string | null): string | null => { if (!iso) return null; return iso.slice(0, 10); // "2026-02-28T..." → "2026-02-28" }; const oldest = formatDate(oldestSession); const newest = formatDate(newestSession); const span = oldest && newest ? `${oldest} → ${newest}` : null; const eligibleChunks = countEligibleChunks(ctx.db); let embeddedChunksText = 0; let embeddedChunksCode = 0; try { embeddedChunksText = countEmbeddedChunks(ctx.db, '_text'); } catch { // vec table may not exist yet } try { embeddedChunksCode = countEmbeddedChunks(ctx.db, '_code'); } catch { // vec table may not exist yet } const ignoredProjects = getIgnoredProjects(ctx.db).length; const orphanedSessions = parseInt(getStat(ctx.db, 'orphaned_sessions') ?? '0', 10); const searchCount = parseInt(getStat(ctx.db, 'search_count') ?? '0', 10); const hitCount = parseInt(getStat(ctx.db, 'hit_count') ?? '0', 10); const tokensServed = parseInt(getStat(ctx.db, 'tokens_served') ?? '0', 10); const lastSearchAt = getStat(ctx.db, 'last_search_at'); // --- Search lines: one readable string per embedder --- const embeddingModelIdText = getMeta(ctx.db, 'embedding_model_id_text'); const embeddingDimensionsText = parseInt( getMeta(ctx.db, 'embedding_dimensions_text') ?? '0', 10, ); const embeddingModelIdCode = getMeta(ctx.db, 'embedding_model_id_code'); const embeddingDimensionsCode = parseInt( getMeta(ctx.db, 'embedding_dimensions_code') ?? '0', 10, ); const buildSearchLine = ( modelId: string | null, dims: number, embedded: number, eligible: number, ): string => { if (!modelId) return 'disabled'; const pct = eligible > 0 ? Math.round((embedded / eligible) * 100) : 0; return `${modelId} (${dims}d) — ${embedded}/${eligible} (${pct}%)`; }; const textLine = buildSearchLine( embeddingModelIdText, embeddingDimensionsText, embeddedChunksText, eligibleChunks, ); const codeLine = buildSearchLine( embeddingModelIdCode, embeddingDimensionsCode, embeddedChunksCode, eligibleChunks, ); // Reranker line const rerankerLine = ctx.searchContext.reranker ? `${ctx.searchContext.reranker.backend()} (${ctx.searchContext.reranker.modelId()})` : 'none'; // --- Activity: merge worker + migration into one block --- const workerStatus: EmbedJobStatus = ctx.orchestrator?.getStatus() ?? { active: false, suffix: null, embedded: 0, total: 0, pid: null, rssMB: null, heapUsedMB: null, }; const hasMigrationText = !!getMeta(ctx.db, 'migration_target_model_text'); const hasMigrationCode = !!getMeta(ctx.db, 'migration_target_model_code'); let activity: { type: string; target: string | null; progress: string; pid: number | null; rssMB: number | null; heapUsedMB: number | null; stuckSince?: string | null; } | null = null; const buildProgressString = (embedded: number, total: number): string => { const pct = total > 0 ? Math.round((embedded / total) * 100) : 0; return `${embedded}/${total} (${pct}%)`; }; if (workerStatus.active) { // Check if current worker suffix matches a migration const suffix = workerStatus.suffix ?? '_text'; const isMigration = (suffix === '_text' && hasMigrationText) || (suffix === '_code' && hasMigrationCode); let embedded = workerStatus.embedded; let total = workerStatus.total; // Worker may not have reported total yet — fall back to migration meta / eligible chunks if (isMigration && total === 0) { embedded = parseInt(getMeta(ctx.db, `migration_progress${suffix}`) ?? '0', 10); total = eligibleChunks; } activity = { type: isMigration ? 'migration' : 'backfill', target: workerStatus.suffix, progress: buildProgressString(embedded, total), pid: workerStatus.pid, rssMB: workerStatus.rssMB, heapUsedMB: workerStatus.heapUsedMB, }; } else if (hasMigrationText || hasMigrationCode) { // Migration meta exists but no worker — stuck const suffix = hasMigrationText ? '_text' : '_code'; const stuckSince = getMeta(ctx.db, `migration_started_at${suffix}`); const migrationProgress = parseInt( getMeta(ctx.db, `migration_progress${suffix}`) ?? '0', 10, ); activity = { type: 'stuck-migration', target: suffix, progress: buildProgressString(migrationProgress, eligibleChunks), pid: null, rssMB: null, heapUsedMB: null, stuckSince, }; } // --- Status emoji --- let status: string; if (!ctx.cfg.embeddingsEnabled) { status = '\u{1F7E2} bm25-only'; } else if (workerStatus.active) { status = '\u{1F535} embedding'; } else if (activity?.type === 'stuck-migration') { status = '\u{1F7E0} degraded'; } else { status = '\u{1F7E2} healthy'; } // --- Uptime --- const uptimeSec = Math.round(process.uptime()); let uptime: string; if (uptimeSec < 60) { uptime = `${uptimeSec}s`; } else if (uptimeSec < 3600) { uptime = `${Math.floor(uptimeSec / 60)}m`; } else { const h = Math.floor(uptimeSec / 3600); const m = Math.floor((uptimeSec % 3600) / 60); uptime = m > 0 ? `${h}h${m}m` : `${h}h`; } // --- Process --- const mem = process.memoryUsage(); const processInfo = { pid: process.pid, rssMB: Math.round((mem.rss / 1024 / 1024) * 10) / 10, heapUsedMB: Math.round((mem.heapUsed / 1024 / 1024) * 10) / 10, uptimeSeconds: uptimeSec, }; return { content: [ { type: 'text' as const, text: JSON.stringify({ version: ctx.version, status, mode: ctx.mode, logLevel: ctx.cfg.logLevel, uptime, corpus: { sessions, chunks, projects, ignoredProjects, orphanedSessions, span, }, search: { bm25: 'active', text: textLine, code: codeLine, reranker: rerankerLine, }, activity, usage: { searches: searchCount, hits: hitCount, tokensServed, lastSearch: lastSearchAt, }, process: processInfo, }), }, ], }; }, );