/**
* Agent Synch MCP Server - Storage Layer
* Handles all persistent storage operations for the memory bank.
*/
import * as fs from 'fs/promises';
import * as fsSync from 'fs';
import * as path from 'path';
import Database from 'better-sqlite3';
/**
* AgentSynchStorage - Core storage engine using SQLite with MD dual-write.
*/
export class AgentSynchStorage {
baseDir;
db;
constructor(baseDir) {
// Default to ~/.agent-synch if not specified
this.baseDir = baseDir || path.join(process.env.HOME || process.env.USERPROFILE || '', '.agent-synch');
// Ensure base directory exists (Sync because Database constructor is synchronous)
try {
fsSync.mkdirSync(this.baseDir, { recursive: true });
}
catch (e) {
// Ignore if exists
}
// Ensure DB is initialized roughly on create, actual tables created in initialize()
this.db = new Database(path.join(this.baseDir, 'agent_synch.db'));
}
/**
* Initialize storage structure and SQLite tables.
*/
async initialize() {
await fs.mkdir(path.join(this.baseDir, 'global', 'filing_cabinet'), { recursive: true });
await fs.mkdir(path.join(this.baseDir, 'projects'), { recursive: true });
// Enable WAL mode for better concurrency
this.db.pragma('journal_mode = WAL');
// Create Tables
this.db.exec(`
CREATE TABLE IF NOT EXISTS global_config (
key TEXT PRIMARY KEY,
value TEXT
);
CREATE TABLE IF NOT EXISTS active_contexts (
project_id TEXT PRIMARY KEY,
summary TEXT,
last_updated TEXT,
focus TEXT,
task_graph TEXT -- JSON
);
CREATE TABLE IF NOT EXISTS filing_cabinet (
project_id TEXT,
file_path TEXT,
summary TEXT,
key_exports TEXT, -- JSON
dependencies TEXT, -- JSON
dependents TEXT, -- JSON
indexed_at TEXT,
content TEXT,
PRIMARY KEY (project_id, file_path)
);
CREATE TABLE IF NOT EXISTS spatial_rooms (
project_id TEXT,
path TEXT,
description TEXT,
depth INTEGER,
connected_rooms TEXT, -- JSON
key_items TEXT, -- JSON
PRIMARY KEY (project_id, path)
);
CREATE TABLE IF NOT EXISTS bugs (
id TEXT PRIMARY KEY,
project_id TEXT,
title TEXT,
description TEXT,
stack_trace TEXT,
file_path TEXT,
line_number INTEGER,
severity TEXT,
status TEXT,
created_at TEXT,
resolved_at TEXT,
resolution TEXT
);
CREATE TABLE IF NOT EXISTS events (
id TEXT PRIMARY KEY,
project_id TEXT,
agent_id TEXT,
event_type TEXT,
summary TEXT,
focus TEXT,
metadata TEXT, -- JSON
timestamp TEXT,
previous_event_id TEXT
);
CREATE INDEX IF NOT EXISTS idx_events_project ON events(project_id);
`);
}
// --- Project Helpers ---
projectDir(projectId) {
if (projectId === 'global') {
return path.join(this.baseDir, 'global');
}
return path.join(this.baseDir, 'projects', this.sanitizeId(projectId));
}
sanitizeId(id) {
// Simple hash-like sanitization for folder names
return id.replace(/[^a-zA-Z0-9-_]/g, '_');
}
async ensureProjectDir(projectId) {
const dir = this.projectDir(projectId);
await fs.mkdir(path.join(dir, 'filing_cabinet'), { recursive: true });
}
// --- Active Context (with MD fallback) ---
async getActiveContext(projectId) {
// Try DB first
const row = this.db.prepare('SELECT * FROM active_contexts WHERE project_id = ?').get(projectId);
if (row) {
return {
summary: row.summary,
lastUpdated: row.last_updated,
focus: row.focus || undefined,
taskGraph: row.task_graph ? JSON.parse(row.task_graph) : undefined,
};
}
// Fallback to MD - parse front matter style (Migration path)
try {
const dir = this.projectDir(projectId);
const mdPath = path.join(dir, 'active_context.md');
const mdData = await fs.readFile(mdPath, 'utf-8');
return this.parseContextFromMd(mdData);
}
catch {
return null;
}
}
parseContextFromMd(mdContent) {
// Parse basic MD format:
// # Active Context
// **Last Updated**: ...
// **Focus**: ...
// ## Summary
// ...
const lines = mdContent.split('\n');
const context = {
summary: '',
lastUpdated: new Date().toISOString(),
};
let inSummary = false;
for (const line of lines) {
if (line.startsWith('**Last Updated**:')) {
context.lastUpdated = line.split(':').slice(1).join(':').trim();
}
else if (line.startsWith('**Focus**:')) {
context.focus = line.split(':').slice(1).join(':').trim();
}
else if (line.startsWith('## Summary')) {
inSummary = true;
}
else if (inSummary && line.trim()) {
context.summary += (context.summary ? '\n' : '') + line;
}
}
return context.summary ? context : null;
}
async setActiveContext(projectId, context) {
// Write to DB
this.db.prepare(`
INSERT OR REPLACE INTO active_contexts (project_id, summary, last_updated, focus, task_graph)
VALUES (?, ?, ?, ?, ?)
`).run(projectId, context.summary, context.lastUpdated, context.focus, context.taskGraph ? JSON.stringify(context.taskGraph) : null);
// Also generate MD for human readability and agent fallback
await this.ensureProjectDir(projectId);
const mdPath = path.join(this.projectDir(projectId), 'active_context.md');
const mdContent = this.generateContextMd(context, projectId);
await fs.writeFile(mdPath, mdContent);
}
generateContextMd(context, projectId) {
return `# Active Context
**Project**: ${projectId}
**Last Updated**: ${context.lastUpdated}
${context.focus ? `**Focus**: ${context.focus}` : ''}
## Summary
${context.summary}
---
*Auto-generated by Agent Synch from SQLite.*
`;
}
// --- Filing Cabinet (SQLite) ---
async indexFile(projectId, entry) {
if (!entry.originalPath)
throw new Error('File path required');
const indexedAt = new Date().toISOString();
this.db.prepare(`
INSERT OR REPLACE INTO filing_cabinet (project_id, file_path, summary, key_exports, dependencies, dependents, indexed_at)
VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(projectId, entry.originalPath, entry.summary, JSON.stringify(entry.keyExports || []), JSON.stringify(entry.dependencies || []), JSON.stringify(entry.dependents || []), indexedAt);
}
async getFileFromCabinet(projectId, originalPath) {
const row = this.db.prepare(`
SELECT * FROM filing_cabinet WHERE project_id = ? AND file_path = ?
`).get(projectId, originalPath);
if (!row)
return null;
return {
originalPath: row.file_path,
summary: row.summary,
keyExports: JSON.parse(row.key_exports),
dependencies: JSON.parse(row.dependencies),
dependents: JSON.parse(row.dependents),
indexedAt: row.indexed_at
};
}
async listCabinet(projectId) {
const rows = this.db.prepare(`
SELECT file_path FROM filing_cabinet WHERE project_id = ?
`).all(projectId);
return rows.map(r => r.file_path);
}
// --- Project Profile (Legacy JSON Support) ---
async getProjectProfile(projectId) {
try {
const filePath = path.join(this.projectDir(projectId), 'project_profile.json');
const data = await fs.readFile(filePath, 'utf-8');
return JSON.parse(data);
}
catch {
return null;
}
}
async setProjectProfile(projectId, profile) {
await this.ensureProjectDir(projectId);
const filePath = path.join(this.projectDir(projectId), 'project_profile.json');
await fs.writeFile(filePath, JSON.stringify(profile, null, 2));
}
// --- Spatial Map (SQLite) ---
async getSpatialMap(projectId) {
const rows = this.db.prepare('SELECT * FROM spatial_rooms WHERE project_id = ?').all(projectId);
const rooms = {};
for (const row of rows) {
rooms[row.path] = {
path: row.path,
description: row.description,
depth: row.depth,
connectedRooms: JSON.parse(row.connected_rooms),
keyItems: JSON.parse(row.key_items),
};
}
return { rooms };
}
async saveSpatialMap(projectId, map) {
// No-op for SQLite as we save per room usually, but if needed we can batch upsert.
// For now, let's assume methods like addRoom invoke DB directly.
// If this method is called with a full map, we should upsert all rooms.
const insert = this.db.prepare(`
INSERT OR REPLACE INTO spatial_rooms (project_id, path, description, depth, connected_rooms, key_items)
VALUES (?, ?, ?, ?, ?, ?)
`);
const transaction = this.db.transaction((rooms) => {
for (const room of rooms) {
insert.run(projectId, room.path, room.description, room.depth, JSON.stringify(room.connectedRooms || []), JSON.stringify(room.keyItems || []));
}
});
transaction(Object.values(map.rooms));
}
async addRoom(projectId, room) {
this.db.prepare(`
INSERT OR IGNORE INTO spatial_rooms (project_id, path, description, depth, connected_rooms, key_items)
VALUES (?, ?, ?, ?, ?, ?)
`).run(projectId, room.path, room.description, room.depth, JSON.stringify(room.connectedRooms || []), JSON.stringify(room.keyItems || []));
}
async linkRooms(projectId, roomA, roomB) {
// This is tricky in SQLite without reading first, or complex standard JSON update.
// Reading is easier logic-wise.
const map = await this.getSpatialMap(projectId);
if (map.rooms[roomA] && !map.rooms[roomA].connectedRooms.includes(roomB)) {
map.rooms[roomA].connectedRooms.push(roomB);
}
if (map.rooms[roomB] && !map.rooms[roomB].connectedRooms.includes(roomA)) {
map.rooms[roomB].connectedRooms.push(roomA);
}
await this.saveSpatialMap(projectId, map);
}
// --- Search (SQLite) ---
async searchMemory(query, projectId) {
let sql = `SELECT * FROM filing_cabinet WHERE (summary LIKE ? OR file_path LIKE ?)`;
const params = [`%${query}%`, `%${query}%`];
if (projectId) {
sql += ` AND project_id = ?`;
params.push(projectId);
}
sql += ` LIMIT 10`;
const rows = this.db.prepare(sql).all(...params);
return rows.map(row => ({
path: row.file_path,
projectId: row.project_id,
summary: row.summary,
matchScore: 1 // Simple match
}));
}
// --- Project Management (SQLite) ---
async listProjects() {
const rows = this.db.prepare('SELECT DISTINCT project_id FROM active_contexts').all();
return rows.map(r => r.project_id);
}
// --- Bug Logging (SQLite) ---
async logBug(entry) {
const id = `bug_${Date.now()}_${Math.random().toString(36).substr(2, 5)}`;
const now = new Date().toISOString();
this.db.prepare(`
INSERT INTO bugs (id, project_id, title, description, stack_trace, file_path, line_number, severity, status, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(id, entry.projectId, entry.title, entry.description, entry.stackTrace || null, entry.filePath || null, entry.lineNumber || null, entry.severity, 'open', now);
return id;
}
async getBugs(projectId, status) {
let sql = `SELECT * FROM bugs WHERE project_id = ?`;
const params = [projectId];
if (status) {
sql += ` AND status = ?`;
params.push(status);
}
sql += ` ORDER BY created_at DESC`;
const rows = this.db.prepare(sql).all(...params);
return rows.map(row => ({
id: row.id,
projectId: row.project_id,
title: row.title,
description: row.description,
stackTrace: row.stack_trace,
filePath: row.file_path,
lineNumber: row.line_number,
severity: row.severity,
status: row.status,
createdAt: row.created_at,
resolvedAt: row.resolved_at,
resolution: row.resolution
}));
}
async resolveBug(bugId, projectId, resolution) {
const now = new Date().toISOString();
this.db.prepare(`
UPDATE bugs
SET status = 'resolved', resolution = ?, resolved_at = ?
WHERE id = ? AND project_id = ?
`).run(resolution, now, bugId, projectId);
}
// --- Server Configuration (SQLite) ---
async getServerConfig() {
const row = this.db.prepare('SELECT value FROM global_config WHERE key = ?').get('server_config');
return row ? JSON.parse(row.value) : null;
}
async setServerConfig(config) {
this.db.prepare('INSERT OR REPLACE INTO global_config (key, value) VALUES (?, ?)').run('server_config', JSON.stringify(config));
}
async isFirstRun() {
const config = await this.getServerConfig();
return config === null;
}
// --- Event-Based Context Caching (SQLite) ---
async emitContextEvent(event) {
const id = `evt_${Date.now()}_${Math.random().toString(36).substr(2, 5)}`;
const fullEvent = {
...event,
id,
timestamp: new Date().toISOString(),
};
// Save to DB
this.db.prepare(`
INSERT INTO events (id, project_id, agent_id, event_type, summary, focus, metadata, timestamp, previous_event_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`).run(fullEvent.id, fullEvent.projectId, fullEvent.agentId, fullEvent.eventType, fullEvent.summary, fullEvent.focus || null, fullEvent.metadata ? JSON.stringify(fullEvent.metadata) : null, fullEvent.timestamp, fullEvent.previousEventId || null);
// Generate MD summary for human readability and handoff
await this.generateEventMd(fullEvent);
// If handoff/checkpoint, update active context automatically
if (event.eventType === 'handoff' || event.eventType === 'checkpoint') {
const activeCtx = await this.getActiveContext(event.projectId) || { summary: '', lastUpdated: '' };
await this.setActiveContext(event.projectId, {
summary: event.summary,
focus: event.focus,
lastUpdated: fullEvent.timestamp,
taskGraph: activeCtx.taskGraph
});
}
return fullEvent;
}
async generateEventMd(event) {
await this.ensureProjectDir(event.projectId);
const eventsDir = path.join(this.projectDir(event.projectId), 'events');
await fs.mkdir(eventsDir, { recursive: true });
const mdPath = path.join(eventsDir, `${event.id}.md`);
const mdContent = `# Context Event: ${event.eventType.toUpperCase()}
**ID**: ${event.id}
**Agent**: ${event.agentId}
**Time**: ${event.timestamp}
${event.focus ? `**Focus**: ${event.focus}` : ''}
${event.previousEventId ? `**Previous**: ${event.previousEventId}` : ''}
## Summary
${event.summary}
${event.metadata ? `## Metadata
\`\`\`json
${JSON.stringify(event.metadata, null, 2)}
\`\`\`` : ''}
`;
await fs.writeFile(mdPath, mdContent);
}
async getContextEvents(projectId, limit = 10) {
const rows = this.db.prepare(`
SELECT * FROM events
WHERE project_id = ?
ORDER BY timestamp DESC
LIMIT ?
`).all(projectId, limit);
return rows.map(row => ({
id: row.id,
projectId: row.project_id,
agentId: row.agent_id,
eventType: row.event_type,
summary: row.summary,
focus: row.focus,
metadata: row.metadata ? JSON.parse(row.metadata) : undefined,
timestamp: row.timestamp,
previousEventId: row.previous_event_id
}));
}
async getLastEvent(projectId) {
const events = await this.getContextEvents(projectId, 1);
return events[0] || null;
}
}