Skip to main content
Glama
doc-control.js17.4 kB
// Documentation Control System - Manages artifact versioning and documentation import { StorylineNotionSync } from '../tools/storyline-notion-sync.js'; import { promises as fs } from 'fs'; import path from 'path'; import crypto from 'crypto'; export class DocControl { constructor() { this.notionSync = new StorylineNotionSync(); this.artifactPath = './generated_assets'; this.initialized = false; } async initialize() { if (this.initialized) return; // Ensure artifact directory exists await this.ensureDirectoryExists(this.artifactPath); try { // Try to initialize Notion sync (graceful fallback if not configured) await this.notionSync.ensureStorylineDatabases(); } catch (error) { console.log('⚠️ Notion sync not available:', error.message); } this.initialized = true; console.log('📚 Documentation Control initialized'); } // Create artifact record in database async createArtifact(taskId, artifactType, toolResult) { try { const { Pool } = await import('pg'); const pool = new Pool({ connectionString: process.env.DATABASE_URL }); // Determine file information from tool result const fileInfo = this.extractFileInfo(toolResult); // Generate file hash if file exists let fileHash = null; if (fileInfo.filePath && await this.fileExists(fileInfo.filePath)) { fileHash = await this.generateFileHash(fileInfo.filePath); } const query = ` INSERT INTO artifacts ( task_id, artifact_type, file_path, file_url, file_hash, metadata, file_size, mime_type, status ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, 'active') RETURNING * `; const result = await pool.query(query, [ taskId, artifactType, fileInfo.filePath, fileInfo.fileUrl, fileHash, JSON.stringify(toolResult), fileInfo.fileSize || 0, fileInfo.mimeType || 'application/octet-stream' ]); const artifact = result.rows[0]; // Create initial version record await this.createVersionRecord(artifact.id, 1, fileInfo.filePath, null, 'Initial creation'); await pool.end(); console.log(`📄 Created artifact record: ${artifactType} (ID: ${artifact.id})`); return artifact; } catch (error) { console.error('❌ Failed to create artifact:', error); throw error; } } // Record approval in database with full context async recordApproval(task, approvals) { try { const { Pool } = await import('pg'); const pool = new Pool({ connectionString: process.env.DATABASE_URL }); // Record each approval stage for (const [stage, approval] of Object.entries(approvals)) { if (approval && approval.reviewer) { // Get agent ID const agentQuery = `SELECT id FROM agents WHERE name = $1`; const agentResult = await pool.query(agentQuery, [approval.reviewer]); if (agentResult.rows.length > 0) { const agentId = agentResult.rows[0].id; const approvalQuery = ` INSERT INTO approvals (task_id, stage, by_agent, status, notes, feedback) VALUES ($1, $2, $3, $4, $5, $6) ON CONFLICT (task_id, stage, by_agent) DO UPDATE SET status = $4, notes = $5, feedback = $6, created_at = CURRENT_TIMESTAMP `; await pool.query(approvalQuery, [ task.id, stage, agentId, approval.approved ? 'approved' : 'needs_revision', this.summarizeApproval(approval), JSON.stringify(approval.feedback) ]); } } } // Update task status based on approvals const allApproved = Object.values(approvals).every(approval => approval.approved); const newStatus = allApproved ? 'approved' : 'needs_revision'; await pool.query( `UPDATE tasks SET status = $1, updated_at = CURRENT_TIMESTAMP WHERE id = $2`, [newStatus, task.id] ); await pool.end(); console.log(`✅ Recorded approvals for task ${task.id}: ${newStatus}`); } catch (error) { console.error('❌ Failed to record approval:', error); } } // Create version record for artifact changes async createVersionRecord(artifactId, version, filePath, editorAgentId, changes) { try { const { Pool } = await import('pg'); const pool = new Pool({ connectionString: process.env.DATABASE_URL }); const query = ` INSERT INTO doc_versions (artifact_id, version, file_path, editor_agent_id, changes) VALUES ($1, $2, $3, $4, $5) RETURNING id `; const result = await pool.query(query, [ artifactId, version, filePath, editorAgentId, changes ]); await pool.end(); return result.rows[0].id; } catch (error) { console.error('❌ Failed to create version record:', error); } } // Sync artifacts to Notion if available async syncToNotion(artifactId, artifactType, metadata) { try { // Only sync specific artifact types to Notion if (!this.shouldSyncToNotion(artifactType)) { return null; } let notionPageId = null; switch (artifactType) { case 'storyline': if (metadata.storyline) { const result = await this.notionSync.syncStorylineToNotion(metadata.storyline); notionPageId = result.notion_page_id; } break; case 'scene': if (metadata.scene && metadata.storylineNotionId) { const result = await this.notionSync.syncSceneToNotion( metadata.scene, metadata.storylineNotionId ); notionPageId = result.notion_page_id; } break; case 'character_development': if (metadata.development && metadata.characterNotionId && metadata.storylineNotionId && metadata.sceneNotionId) { const result = await this.notionSync.syncCharacterDevelopmentToNotion( metadata.development, metadata.characterNotionId, metadata.storylineNotionId, metadata.sceneNotionId ); notionPageId = result.notion_page_id; } break; } // Update artifact with Notion page ID if (notionPageId) { await this.updateArtifactNotionId(artifactId, notionPageId); console.log(`📝 Synced artifact ${artifactId} to Notion: ${notionPageId}`); } return notionPageId; } catch (error) { console.error('⚠️ Notion sync failed (continuing without it):', error.message); return null; } } async updateArtifactNotionId(artifactId, notionPageId) { try { const { Pool } = await import('pg'); const pool = new Pool({ connectionString: process.env.DATABASE_URL }); await pool.query( `UPDATE artifacts SET notion_page_id = $1 WHERE id = $2`, [notionPageId, artifactId] ); await pool.end(); } catch (error) { console.error('❌ Failed to update artifact Notion ID:', error); } } // Generate comprehensive production documentation async generateProductionDocumentation(workflowId) { try { const { Pool } = await import('pg'); const pool = new Pool({ connectionString: process.env.DATABASE_URL }); // Get workflow and all related data const workflowQuery = ` SELECT w.*, json_agg( json_build_object( 'task', t.*, 'artifacts', ( SELECT json_agg(a.*) FROM artifacts a WHERE a.task_id = t.id ), 'approvals', ( SELECT json_agg(ap.*) FROM approvals ap WHERE ap.task_id = t.id ) ) ) as tasks FROM workflows w LEFT JOIN tasks t ON w.id = t.workflow_id WHERE w.id = $1 GROUP BY w.id `; const result = await pool.query(workflowQuery, [workflowId]); if (result.rows.length === 0) { throw new Error(`Workflow ${workflowId} not found`); } const workflowData = result.rows[0]; // Generate documentation const documentation = { workflow: { id: workflowData.id, name: workflowData.name, description: workflowData.description, status: workflowData.status, created_at: workflowData.created_at, completed_at: workflowData.completed_at }, production_summary: this.generateProductionSummary(workflowData), tasks: workflowData.tasks.filter(task => task.task.id), // Filter out null tasks quality_metrics: this.calculateQualityMetrics(workflowData), artifact_inventory: this.generateArtifactInventory(workflowData), approval_trail: this.generateApprovalTrail(workflowData), generated_at: new Date().toISOString() }; // Save documentation to file const docPath = path.join(this.artifactPath, `workflow_${workflowId}_documentation.json`); await fs.writeFile(docPath, JSON.stringify(documentation, null, 2)); await pool.end(); console.log(`📋 Generated production documentation: ${docPath}`); return { success: true, documentation: documentation, documentPath: docPath }; } catch (error) { console.error('❌ Failed to generate production documentation:', error); return { success: false, error: error.message }; } } // Helper methods extractFileInfo(toolResult) { // Extract file information from various tool result formats const fileInfo = { filePath: null, fileUrl: null, fileSize: 0, mimeType: null }; // Look for common file path fields if (toolResult.manifest_path) { fileInfo.filePath = toolResult.manifest_path; fileInfo.mimeType = 'application/json'; } else if (toolResult.breakdown_path) { fileInfo.filePath = toolResult.breakdown_path; fileInfo.mimeType = 'application/json'; } else if (toolResult.images_path) { fileInfo.filePath = toolResult.images_path; fileInfo.mimeType = 'application/json'; } else if (toolResult.audio_metadata_path) { fileInfo.filePath = toolResult.audio_metadata_path; fileInfo.mimeType = 'application/json'; } else if (toolResult.character && toolResult.character.puppet_image_path) { fileInfo.filePath = toolResult.character.puppet_image_path; fileInfo.mimeType = 'image/png'; } return fileInfo; } async fileExists(filePath) { try { await fs.access(filePath); return true; } catch { return false; } } async generateFileHash(filePath) { try { const fileBuffer = await fs.readFile(filePath); return crypto.createHash('sha256').update(fileBuffer).digest('hex'); } catch { return null; } } shouldSyncToNotion(artifactType) { return ['storyline', 'scene', 'character_development'].includes(artifactType); } summarizeApproval(approval) { if (approval.approved) { return `Approved by ${approval.reviewer}`; } else { const issueCount = approval.issues?.length || 0; return `Needs revision - ${issueCount} issues identified by ${approval.reviewer}`; } } generateProductionSummary(workflowData) { const tasks = workflowData.tasks.filter(t => t.task.id); const totalTasks = tasks.length; const completedTasks = tasks.filter(t => t.task.status === 'approved').length; const failedTasks = tasks.filter(t => t.task.status === 'failed').length; return { total_tasks: totalTasks, completed_tasks: completedTasks, failed_tasks: failedTasks, success_rate: totalTasks > 0 ? (completedTasks / totalTasks * 100).toFixed(1) : 0, status: workflowData.status }; } calculateQualityMetrics(workflowData) { const tasks = workflowData.tasks.filter(t => t.task.id); const approvals = tasks.flatMap(t => t.approvals || []); const totalApprovals = approvals.length; const passedApprovals = approvals.filter(a => a.status === 'approved').length; const rejectedApprovals = approvals.filter(a => a.status === 'rejected').length; return { total_reviews: totalApprovals, passed_reviews: passedApprovals, rejected_reviews: rejectedApprovals, approval_rate: totalApprovals > 0 ? (passedApprovals / totalApprovals * 100).toFixed(1) : 0 }; } generateArtifactInventory(workflowData) { const tasks = workflowData.tasks.filter(t => t.task.id); const artifacts = tasks.flatMap(t => t.artifacts || []); return { total_artifacts: artifacts.length, by_type: artifacts.reduce((acc, artifact) => { acc[artifact.artifact_type] = (acc[artifact.artifact_type] || 0) + 1; return acc; }, {}), artifacts: artifacts.map(artifact => ({ id: artifact.id, type: artifact.artifact_type, file_path: artifact.file_path, status: artifact.status, created_at: artifact.created_at })) }; } generateApprovalTrail(workflowData) { const tasks = workflowData.tasks.filter(t => t.task.id); const approvals = tasks.flatMap(t => (t.approvals || []).map(approval => ({ ...approval, task_name: t.task.step })) ); return approvals.sort((a, b) => new Date(a.created_at) - new Date(b.created_at)); } async ensureDirectoryExists(dirPath) { try { await fs.mkdir(dirPath, { recursive: true }); } catch (error) { if (error.code !== 'EEXIST') { throw error; } } } }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/bermingham85/mcp-puppet-pipeline'

If you have feedback or need assistance with the MCP directory API, please join our Discord server