Skip to main content
Glama

create_file_tree

Generate or load a file tree configuration by specifying a base directory and JSON filename. Use this tool to organize and understand your codebase structure within FileScopeMCP.

Instructions

Create or load a file tree configuration

Input Schema

TableJSON Schema
NameRequiredDescriptionDefault
baseDirectoryYesBase directory to scan for files
filenameYesName of the JSON file to store the file tree

Implementation Reference

  • Primary handler function for the 'create_file_tree' tool. Normalizes the input base directory and orchestrates the scanning process to build and return the complete FileNode tree structure.
    export async function createFileTree(baseDir: string): Promise<FileNode> { const normalizedBaseDir = path.normalize(baseDir); const nodes = await scanDirectory(normalizedBaseDir); // The first node should be the root directory if (nodes.isDirectory && nodes.path === normalizedBaseDir) { return nodes; } // If for some reason we didn't get a root node, create one const rootNode: FileNode = { path: normalizedBaseDir, name: path.basename(normalizedBaseDir), isDirectory: true, children: [] }; // Add all nodes that don't have a parent for (const node of nodes.children || []) { if (path.dirname(node.path) === normalizedBaseDir) { rootNode.children?.push(node); } } return rootNode; }
  • Core helper function that recursively scans directories, applies exclusion rules, extracts import dependencies, analyzes package dependencies, calculates file importance scores, and constructs the hierarchical FileNode tree.
    export async function scanDirectory(baseDir: string, currentDir: string = baseDir): Promise<FileNode> { log(`\nπŸ“ SCAN DIRECTORY: ${currentDir}`); log(` - Base dir: ${baseDir}`); // Handle special case for current directory const normalizedBaseDir = path.normalize(baseDir); const normalizedDirPath = path.normalize(currentDir); log(` - Normalized base dir: ${normalizedBaseDir}`); log(` - Normalized current dir: ${normalizedDirPath}`); // Create root node for this directory const rootNode: FileNode = { path: normalizedDirPath, name: path.basename(normalizedDirPath), isDirectory: true, children: [] }; // Read directory entries let entries: fs.Dirent[]; try { entries = await fsPromises.readdir(normalizedDirPath, { withFileTypes: true }); log(` - Read ${entries.length} entries in directory`); } catch (error) { log(` - ❌ Error reading directory ${normalizedDirPath}:`, error); return rootNode; } // Process each entry let excluded = 0; let included = 0; let dirProcessed = 0; let fileProcessed = 0; log(`\n Processing ${entries.length} entries in ${normalizedDirPath}...`); // ==================== CRITICAL CODE ==================== // Log the global config status before processing entries log(`\nπŸ” BEFORE PROCESSING: Is config loaded? ${getConfig() !== null ? 'YES βœ…' : 'NO ❌'}`); if (getConfig()) { const excludePatternsLength = getConfig()?.excludePatterns?.length || 0; log(` - Exclude patterns count: ${excludePatternsLength}`); if (excludePatternsLength > 0) { log(` - First few patterns: ${getConfig()?.excludePatterns?.slice(0, 3).join(', ')}`); } } // ====================================================== for (const entry of entries) { const fullPath = path.join(normalizedDirPath, entry.name); const normalizedFullPath = path.normalize(fullPath); log(`\n Entry: ${entry.name} (${entry.isDirectory() ? 'directory' : 'file'})`); log(` - Full path: ${normalizedFullPath}`); // Here's the critical exclusion check log(` πŸ” Checking if path should be excluded: ${normalizedFullPath}`); const shouldExclude = isExcluded(normalizedFullPath, normalizedBaseDir); log(` πŸ” Exclusion check result: ${shouldExclude ? 'EXCLUDE βœ…' : 'INCLUDE ❌'}`); if (shouldExclude) { log(` - βœ… Skipping excluded path: ${normalizedFullPath}`); excluded++; continue; } log(` - βœ… Including path: ${normalizedFullPath}`); included++; if (entry.isDirectory()) { log(` - Processing directory: ${normalizedFullPath}`); const childNode = await scanDirectory(normalizedBaseDir, fullPath); rootNode.children?.push(childNode); dirProcessed++; } else { log(` - Processing file: ${normalizedFullPath}`); fileProcessed++; const ext = path.extname(entry.name); const importPattern = IMPORT_PATTERNS[ext]; const dependencies: string[] = []; const packageDependencies: PackageDependency[] = []; if (importPattern) { try { const content = await fsPromises.readFile(fullPath, 'utf-8'); const matches = content.match(importPattern); log(`Found ${matches?.length || 0} potential imports in ${normalizedFullPath}`); if (matches) { for (const match of matches) { const importPath = extractImportPath(match); if (importPath) { // Skip if the importPath looks like an unresolved template literal if (isUnresolvedTemplateLiteral(importPath)) { log(`Skipping unresolved template literal: ${importPath}`); continue; } try { let resolvedPath; if (['.js', '.jsx', '.ts', '.tsx'].includes(ext)) { resolvedPath = resolveImportPath(importPath, normalizedFullPath, normalizedBaseDir); } else { resolvedPath = path.resolve(path.dirname(fullPath), importPath); } log(`Resolved path: ${resolvedPath}`); // Handle package imports if (resolvedPath.includes('node_modules') || importPath.startsWith('@') || (!importPath.startsWith('.') && !importPath.startsWith('/'))) { // Create a package dependency object with more information const pkgDep = PackageDependency.fromPath(resolvedPath); // Set the package name directly from the import path if it's empty if (!pkgDep.name) { // Skip if the importPath looks like an unresolved template literal if (isUnresolvedTemplateLiteral(importPath)) { log(`Skipping package dependency with template literal name: ${importPath}`); continue; } // For imports like '@scope/package' if (importPath.startsWith('@')) { const parts = importPath.split('/'); if (parts.length >= 2) { pkgDep.scope = parts[0]; pkgDep.name = `${parts[0]}/${parts[1]}`; } } // For imports like 'package' else if (importPath.includes('/')) { pkgDep.name = importPath.split('/')[0]; } else { pkgDep.name = importPath; } } // Skip if the resolved package name is a template literal if (isUnresolvedTemplateLiteral(pkgDep.name)) { log(`Skipping package with template literal name: ${pkgDep.name}`); continue; } // Try to extract version information if (pkgDep.name) { const version = await extractPackageVersion(pkgDep.name, normalizedBaseDir); if (version) { pkgDep.version = version; } // Check if it's a dev dependency try { const packageJsonPath = path.join(normalizedBaseDir, 'package.json'); const content = await fsPromises.readFile(packageJsonPath, 'utf-8'); const packageData = JSON.parse(content); if (packageData.devDependencies && packageData.devDependencies[pkgDep.name]) { pkgDep.isDevDependency = true; } } catch (error) { // Ignore package.json errors } } packageDependencies.push(pkgDep); continue; } // Try with different extensions for TypeScript/JavaScript files const possibleExtensions = ['.ts', '.tsx', '.js', '.jsx', '']; for (const extension of possibleExtensions) { const pathToCheck = resolvedPath + extension; try { await fsPromises.access(pathToCheck); log(`Found existing path: ${pathToCheck}`); dependencies.push(pathToCheck); break; } catch { // File doesn't exist with this extension, try next one } } } catch (error) { log(`Failed to resolve path for ${importPath}:`, error); } } } } } catch (error) { log(`Failed to read or process file ${fullPath}:`, error); } } const fileNode: FileNode = { path: normalizedFullPath, name: entry.name, isDirectory: false, importance: calculateInitialImportance(normalizedFullPath, normalizedBaseDir), dependencies: dependencies, packageDependencies: packageDependencies, dependents: [], summary: undefined }; rootNode.children?.push(fileNode); } } // Log summary for this directory log(`\n πŸ“Š DIRECTORY SCAN SUMMARY for ${normalizedDirPath}:`); log(` - Total entries: ${entries.length}`); log(` - Excluded: ${excluded}`); log(` - Included: ${included}`); log(` - Directories processed: ${dirProcessed}`); log(` - Files processed: ${fileProcessed}`); log(` πŸ“ END SCAN DIRECTORY: ${currentDir}\n`); return rootNode; }
  • FileNode class defines the data structure used for representing files and directories in the file tree output of the create_file_tree tool.
    export class FileNode { path: string = ''; name: string = ''; isDirectory: boolean = false; children?: FileNode[]; dependencies?: string[]; // Outgoing dependencies (local files this file imports) packageDependencies?: PackageDependency[]; // Outgoing dependencies (package files this file imports) dependents?: string[]; // Incoming dependencies (files that import this file) importance?: number; // 0-10 scale summary?: string; // Human-readable summary of the file mermaidDiagram?: MermaidDiagram; // Optional Mermaid diagram for this node }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/admica/FileScopeMCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server