Skip to main content
Glama
aegntic

Obsidian Elite RAG MCP Server

rebuild.ts4.31 kB
#!/usr/bin/env node /** * Copyright (c) 2024 AiAdvisors Romuald Czlonkowski * Licensed under the Sustainable Use License v1.0 */ import { createDatabaseAdapter } from '../database/database-adapter'; import { N8nNodeLoader } from '../loaders/node-loader'; import { NodeParser } from '../parsers/node-parser'; import { DocsMapper } from '../mappers/docs-mapper'; import { NodeRepository } from '../database/node-repository'; import * as fs from 'fs'; import * as path from 'path'; async function rebuild() { console.log('🔄 Rebuilding n8n node database...\n'); const db = await createDatabaseAdapter('./data/nodes.db'); const loader = new N8nNodeLoader(); const parser = new NodeParser(); const mapper = new DocsMapper(); const repository = new NodeRepository(db); // Initialize database const schema = fs.readFileSync(path.join(__dirname, '../../src/database/schema.sql'), 'utf8'); db.exec(schema); // Clear existing data db.exec('DELETE FROM nodes'); console.log('🗑️ Cleared existing data\n'); // Load all nodes const nodes = await loader.loadAllNodes(); console.log(`📦 Loaded ${nodes.length} nodes from packages\n`); // Statistics const stats = { successful: 0, failed: 0, aiTools: 0, triggers: 0, webhooks: 0, withProperties: 0, withOperations: 0, withDocs: 0 }; // Process each node for (const { packageName, nodeName, NodeClass } of nodes) { try { // Parse node const parsed = parser.parse(NodeClass, packageName); // Validate parsed data if (!parsed.nodeType || !parsed.displayName) { throw new Error('Missing required fields'); } // Get documentation const docs = await mapper.fetchDocumentation(parsed.nodeType); parsed.documentation = docs || undefined; // Save to database repository.saveNode(parsed); // Update statistics stats.successful++; if (parsed.isAITool) stats.aiTools++; if (parsed.isTrigger) stats.triggers++; if (parsed.isWebhook) stats.webhooks++; if (parsed.properties.length > 0) stats.withProperties++; if (parsed.operations.length > 0) stats.withOperations++; if (docs) stats.withDocs++; console.log(`✅ ${parsed.nodeType} [Props: ${parsed.properties.length}, Ops: ${parsed.operations.length}]`); } catch (error) { stats.failed++; console.error(`❌ Failed to process ${nodeName}: ${(error as Error).message}`); } } // Validation check console.log('\n🔍 Running validation checks...'); const validationResults = validateDatabase(repository); // Summary console.log('\n📊 Summary:'); console.log(` Total nodes: ${nodes.length}`); console.log(` Successful: ${stats.successful}`); console.log(` Failed: ${stats.failed}`); console.log(` AI Tools: ${stats.aiTools}`); console.log(` Triggers: ${stats.triggers}`); console.log(` Webhooks: ${stats.webhooks}`); console.log(` With Properties: ${stats.withProperties}`); console.log(` With Operations: ${stats.withOperations}`); console.log(` With Documentation: ${stats.withDocs}`); if (!validationResults.passed) { console.log('\n⚠️ Validation Issues:'); validationResults.issues.forEach(issue => console.log(` - ${issue}`)); } console.log('\n✨ Rebuild complete!'); db.close(); } function validateDatabase(repository: NodeRepository): { passed: boolean; issues: string[] } { const issues = []; // Check critical nodes const criticalNodes = ['nodes-base.httpRequest', 'nodes-base.code', 'nodes-base.webhook', 'nodes-base.slack']; for (const nodeType of criticalNodes) { const node = repository.getNode(nodeType); if (!node) { issues.push(`Critical node ${nodeType} not found`); continue; } if (node.properties.length === 0) { issues.push(`Node ${nodeType} has no properties`); } } // Check AI tools const aiTools = repository.getAITools(); if (aiTools.length === 0) { issues.push('No AI tools found - check detection logic'); } return { passed: issues.length === 0, issues }; } // Run if called directly if (require.main === module) { rebuild().catch(console.error); }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/aegntic/aegntic-MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server