import { MCPError, ErrorCode } from '../models/types.js';
import { ProjectManager } from '../services/project_manager.js';
import { readdir, stat } from 'fs/promises';
import { join } from 'path';
// Helper to crawl directory
async function crawlDirectory(rootPath: string, relPath: string = ''): Promise<any[]> {
const items: any[] = [];
const fullPath = join(rootPath, relPath);
try {
const entries = await readdir(fullPath, { withFileTypes: true });
for (const entry of entries) {
if (entry.name.startsWith('.') || entry.name === 'node_modules') continue;
const entryRelPath = join(relPath, entry.name);
if (entry.isDirectory()) {
items.push({
path: entryRelPath + '/',
type: 'directory'
});
// Recursive crawl? May be too heavy.
// For now, let's keep it flat or shallow?
// Spec example shows flattened list. Let's do recursive but limited depth?
// Or just return everything flattened.
const children = await crawlDirectory(rootPath, entryRelPath);
items.push(...children);
} else {
const stats = await stat(join(rootPath, entryRelPath));
items.push({
path: entryRelPath,
type: 'file',
size: stats.size,
modified: stats.mtime.toISOString() // simplified for now
});
}
}
} catch (error) {
// Ignore access errors
}
return items;
}
export async function getProjectFilesResource(projectId: string): Promise<{
project: string;
rootPath: string;
tree: any[];
}> {
const projectManager = new ProjectManager();
const project = await projectManager.getProject(projectId);
if (!project) {
throw new MCPError(ErrorCode.PROJECT_NOT_FOUND, `Project '${projectId}' not found`);
}
const tree = await crawlDirectory(project.rootPath);
return {
project: project.name,
rootPath: project.rootPath,
tree
};
}