Skip to main content
Glama
msdanyg

Smart Connections MCP Server

by msdanyg
search-engine.js7.05 kB
/** * Semantic search engine for Smart Connections */ import { findNearestNeighbors } from './embedding-utils.js'; export class SearchEngine { loader; embeddingModelKey; constructor(loader) { this.loader = loader; this.embeddingModelKey = loader.getEmbeddingModelKey(); } /** * Find similar notes to a given note path */ getSimilarNotes(notePath, threshold = 0.5, limit = 10) { const source = this.loader.getSource(notePath); if (!source) { throw new Error(`Note not found: ${notePath}`); } const embeddings = source.embeddings[this.embeddingModelKey]; if (!embeddings || !embeddings.vec) { throw new Error(`No embeddings found for note: ${notePath}`); } // Build vector dataset from all sources const vectors = Array.from(this.loader.getSources().entries()) .filter(([path]) => path !== notePath) // Exclude the query note itself .map(([path, src]) => { const emb = src.embeddings[this.embeddingModelKey]; return { id: path, vec: emb?.vec || [], metadata: { blocks: Object.keys(src.blocks || {}), lastModified: src.last_import?.mtime || 0 } }; }) .filter(item => item.vec.length > 0); // Find nearest neighbors const neighbors = findNearestNeighbors(embeddings.vec, vectors, limit, threshold); // Convert to SimilarNote format return neighbors.map(neighbor => ({ path: neighbor.id, similarity: neighbor.similarity, blocks: neighbor.metadata.blocks })); } /** * Get embedding neighbors for a given embedding vector */ getEmbeddingNeighbors(embeddingVector, k = 10, threshold = 0.5) { // Build vector dataset from all sources const vectors = Array.from(this.loader.getSources().entries()) .map(([path, src]) => { const emb = src.embeddings[this.embeddingModelKey]; return { id: path, vec: emb?.vec || [], metadata: { blocks: Object.keys(src.blocks || {}), lastModified: src.last_import?.mtime || 0 } }; }) .filter(item => item.vec.length > 0); // Find nearest neighbors const neighbors = findNearestNeighbors(embeddingVector, vectors, k, threshold); // Convert to SimilarNote format return neighbors.map(neighbor => ({ path: neighbor.id, similarity: neighbor.similarity, blocks: neighbor.metadata.blocks })); } /** * Build a connection graph starting from a note */ getConnectionGraph(notePath, depth = 2, threshold = 0.6, maxPerLevel = 5) { const visited = new Set(); const flatConnections = []; const buildGraph = (currentPath, currentDepth, parentSimilarity = 1.0) => { visited.add(currentPath); // Add to flat list (skip root at depth 0) if (currentDepth > 0) { flatConnections.push({ path: currentPath, depth: currentDepth, similarity: parentSimilarity }); } // Stop if we've reached max depth if (currentDepth >= depth) { return; } // Find similar notes try { const similar = this.getSimilarNotes(currentPath, threshold, maxPerLevel); // Recursively build connections for (const sim of similar) { // Skip already visited nodes to prevent cycles if (!visited.has(sim.path)) { buildGraph(sim.path, currentDepth + 1, sim.similarity); } } } catch (error) { console.error(`Error building graph for ${currentPath}:`, error); } }; buildGraph(notePath, 0); return { root: notePath, connections: flatConnections }; } /** * Search notes by content similarity */ searchByQuery(queryText, limit = 10, threshold = 0.5) { // For now, we'll do a simple keyword match since we don't have // a way to generate embeddings for arbitrary text without the model. // In a full implementation, you'd call the embedding model here. const results = []; const queryLower = queryText.toLowerCase(); for (const [path, source] of this.loader.getSources()) { try { const content = this.loader.readNoteContent(path).toLowerCase(); // Simple relevance scoring based on keyword matches const matches = (content.match(new RegExp(queryLower, 'gi')) || []).length; if (matches > 0) { // Normalize score (this is a crude approximation) const score = Math.min(matches / 10, 1.0); if (score >= threshold) { results.push({ path, similarity: score, blocks: Object.keys(source.blocks || {}) }); } } } catch (error) { // Skip notes that can't be read continue; } } // Sort by similarity and limit return results .sort((a, b) => b.similarity - a.similarity) .slice(0, limit); } /** * Get note content with matched blocks highlighted */ getNoteWithContext(notePath, includeBlocks = []) { const content = this.loader.readNoteContent(notePath); const source = this.loader.getSource(notePath); const availableBlocks = source ? Object.keys(source.blocks || {}) : []; return { path: notePath, content, blocks: availableBlocks }; } /** * Get statistics about the knowledge base */ getStats() { const sources = this.loader.getSources(); let totalBlocks = 0; let embeddingDim = 0; for (const source of sources.values()) { totalBlocks += Object.keys(source.blocks || {}).length; if (embeddingDim === 0) { const emb = source.embeddings[this.embeddingModelKey]; if (emb?.vec) { embeddingDim = emb.vec.length; } } } return { totalNotes: sources.size, totalBlocks, embeddingDimension: embeddingDim, modelKey: this.embeddingModelKey }; } } //# sourceMappingURL=search-engine.js.map

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/msdanyg/smart-connections-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server