server.js•41.2 kB
#!/usr/bin/env node
/**
* Enhanced Directory Context MCP Server
* Provides directory context AND file editing capabilities
* Supports creating, updating, deleting, and managing files
*/
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import {
CallToolRequestSchema,
ErrorCode,
ListResourcesRequestSchema,
ListToolsRequestSchema,
McpError,
ReadResourceRequestSchema,
} from '@modelcontextprotocol/sdk/types.js';
import fs from 'fs/promises';
import path from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
class EnhancedDirectoryContextServer {
constructor() {
this.workingDirectory = process.cwd();
this.server = new Server(
{
name: 'enhanced-directory-context-server',
version: '2.0.0',
},
{
capabilities: {
resources: {},
tools: {},
},
}
);
this.setupHandlers();
}
setupHandlers() {
// List available tools
this.server.setRequestHandler(ListToolsRequestSchema, async () => {
return {
tools: [
// Existing tools
{
name: 'set_working_directory',
description: 'Set the working directory for context extraction',
inputSchema: {
type: 'object',
properties: {
directory: {
type: 'string',
description: 'Path to the directory to analyze',
},
},
required: ['directory'],
},
},
{
name: 'get_directory_structure',
description: 'Get the complete directory structure with file metadata',
inputSchema: {
type: 'object',
properties: {
max_depth: {
type: 'number',
description: 'Maximum depth to traverse (default: 10)',
default: 10,
},
include_hidden: {
type: 'boolean',
description: 'Include hidden files and directories',
default: false,
},
file_types: {
type: 'array',
description: 'Filter by file extensions (e.g., [".js", ".py", ".md"])',
items: { type: 'string' },
},
},
},
},
{
name: 'get_file_contents',
description: 'Read contents of specific files',
inputSchema: {
type: 'object',
properties: {
files: {
type: 'array',
description: 'Array of file paths to read',
items: { type: 'string' },
},
encoding: {
type: 'string',
description: 'File encoding (default: utf8)',
default: 'utf8',
},
},
required: ['files'],
},
},
{
name: 'search_files',
description: 'Search for content within files using regex or text matching',
inputSchema: {
type: 'object',
properties: {
query: {
type: 'string',
description: 'Search query (text or regex pattern)',
},
file_types: {
type: 'array',
description: 'File extensions to search in',
items: { type: 'string' },
},
is_regex: {
type: 'boolean',
description: 'Whether the query is a regex pattern',
default: false,
},
max_results: {
type: 'number',
description: 'Maximum number of results to return',
default: 50,
},
},
required: ['query'],
},
},
{
name: 'analyze_project_context',
description: 'Analyze the project structure and provide intelligent context summary',
inputSchema: {
type: 'object',
properties: {
include_code_analysis: {
type: 'boolean',
description: 'Include code complexity and dependency analysis',
default: true,
},
focus_files: {
type: 'array',
description: 'Specific files to focus analysis on',
items: { type: 'string' },
},
},
},
},
{
name: 'get_git_context',
description: 'Extract Git repository context (branch, recent commits, status)',
inputSchema: {
type: 'object',
properties: {
include_diff: {
type: 'boolean',
description: 'Include current working directory changes',
default: true,
},
commit_count: {
type: 'number',
description: 'Number of recent commits to include',
default: 10,
},
},
},
},
// New file editing tools
{
name: 'create_file',
description: 'Create a new file with specified content',
inputSchema: {
type: 'object',
properties: {
path: {
type: 'string',
description: 'File path relative to working directory',
},
content: {
type: 'string',
description: 'Content to write to the file',
},
encoding: {
type: 'string',
description: 'File encoding (default: utf8)',
default: 'utf8',
},
overwrite: {
type: 'boolean',
description: 'Overwrite if file already exists',
default: false,
},
},
required: ['path', 'content'],
},
},
{
name: 'update_file',
description: 'Update specific parts of a file using search and replace',
inputSchema: {
type: 'object',
properties: {
path: {
type: 'string',
description: 'File path relative to working directory',
},
updates: {
type: 'array',
description: 'Array of search/replace operations',
items: {
type: 'object',
properties: {
search: {
type: 'string',
description: 'Text to search for (exact match)',
},
replace: {
type: 'string',
description: 'Text to replace with',
},
regex: {
type: 'boolean',
description: 'Use regex for search',
default: false,
},
all: {
type: 'boolean',
description: 'Replace all occurrences',
default: true,
},
},
required: ['search', 'replace'],
},
},
backup: {
type: 'boolean',
description: 'Create backup before updating (.bak extension)',
default: true,
},
},
required: ['path', 'updates'],
},
},
{
name: 'append_to_file',
description: 'Append content to the end of a file',
inputSchema: {
type: 'object',
properties: {
path: {
type: 'string',
description: 'File path relative to working directory',
},
content: {
type: 'string',
description: 'Content to append',
},
newline_before: {
type: 'boolean',
description: 'Add newline before appending',
default: true,
},
},
required: ['path', 'content'],
},
},
{
name: 'delete_file',
description: 'Delete a file',
inputSchema: {
type: 'object',
properties: {
path: {
type: 'string',
description: 'File path relative to working directory',
},
backup: {
type: 'boolean',
description: 'Create backup before deleting (.deleted extension)',
default: true,
},
},
required: ['path'],
},
},
{
name: 'rename_file',
description: 'Rename or move a file',
inputSchema: {
type: 'object',
properties: {
old_path: {
type: 'string',
description: 'Current file path',
},
new_path: {
type: 'string',
description: 'New file path',
},
overwrite: {
type: 'boolean',
description: 'Overwrite if destination exists',
default: false,
},
},
required: ['old_path', 'new_path'],
},
},
{
name: 'create_directory',
description: 'Create a new directory',
inputSchema: {
type: 'object',
properties: {
path: {
type: 'string',
description: 'Directory path relative to working directory',
},
recursive: {
type: 'boolean',
description: 'Create parent directories if needed',
default: true,
},
},
required: ['path'],
},
},
{
name: 'batch_file_operations',
description: 'Perform multiple file operations in a single transaction',
inputSchema: {
type: 'object',
properties: {
operations: {
type: 'array',
description: 'Array of file operations to perform',
items: {
type: 'object',
properties: {
operation: {
type: 'string',
enum: ['create', 'update', 'append', 'delete', 'rename'],
description: 'Type of operation',
},
params: {
type: 'object',
description: 'Parameters for the operation',
},
},
required: ['operation', 'params'],
},
},
rollback_on_error: {
type: 'boolean',
description: 'Rollback all operations if any fails',
default: true,
},
},
required: ['operations'],
},
},
],
};
});
// List available resources
this.server.setRequestHandler(ListResourcesRequestSchema, async () => {
const resources = [];
try {
const structure = await this.getDirectoryStructure(this.workingDirectory, 3);
// Add directory overview as a resource
resources.push({
uri: `context://directory-overview`,
mimeType: 'text/plain',
name: 'Directory Overview',
description: `Overview of ${this.workingDirectory}`,
});
// Add important files as resources
const importantFiles = this.identifyImportantFiles(structure);
for (const file of importantFiles) {
resources.push({
uri: `context://file/${file.path}`,
mimeType: this.getMimeType(file.path),
name: path.basename(file.path),
description: `Contents of ${file.path}`,
});
}
} catch (error) {
console.error('Error listing resources:', error);
}
return { resources };
});
// Read resource content
this.server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
const { uri } = request.params;
if (uri === 'context://directory-overview') {
const overview = await this.generateDirectoryOverview();
return {
contents: [
{
uri,
mimeType: 'text/plain',
text: overview,
},
],
};
}
if (uri.startsWith('context://file/')) {
const filePath = uri.replace('context://file/', '');
const fullPath = path.resolve(this.workingDirectory, filePath);
try {
const content = await fs.readFile(fullPath, 'utf8');
return {
contents: [
{
uri,
mimeType: this.getMimeType(filePath),
text: content,
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Failed to read file: ${error.message}`);
}
}
throw new McpError(ErrorCode.InvalidRequest, `Unknown resource: ${uri}`);
});
// Handle tool calls
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
try {
switch (name) {
// Existing tools
case 'set_working_directory':
return await this.setWorkingDirectory(args.directory);
case 'get_directory_structure':
return await this.handleGetDirectoryStructure(args);
case 'get_file_contents':
return await this.handleGetFileContents(args);
case 'search_files':
return await this.handleSearchFiles(args);
case 'analyze_project_context':
return await this.handleAnalyzeProjectContext(args);
case 'get_git_context':
return await this.handleGetGitContext(args);
// New file editing tools
case 'create_file':
return await this.handleCreateFile(args);
case 'update_file':
return await this.handleUpdateFile(args);
case 'append_to_file':
return await this.handleAppendToFile(args);
case 'delete_file':
return await this.handleDeleteFile(args);
case 'rename_file':
return await this.handleRenameFile(args);
case 'create_directory':
return await this.handleCreateDirectory(args);
case 'batch_file_operations':
return await this.handleBatchFileOperations(args);
default:
throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`);
}
} catch (error) {
if (error instanceof McpError) {
throw error;
}
throw new McpError(ErrorCode.InternalError, `Tool execution failed: ${error.message}`);
}
});
}
// New file editing methods
async handleCreateFile(args) {
const { path: filePath, content, encoding = 'utf8', overwrite = false } = args;
const fullPath = path.resolve(this.workingDirectory, filePath);
try {
// Check if file exists
try {
await fs.access(fullPath);
if (!overwrite) {
throw new Error('File already exists. Set overwrite=true to replace it.');
}
} catch (error) {
// File doesn't exist, which is what we want
}
// Ensure directory exists
const dir = path.dirname(fullPath);
await fs.mkdir(dir, { recursive: true });
// Write file
await fs.writeFile(fullPath, content, encoding);
return {
content: [
{
type: 'text',
text: `File created successfully: ${filePath}`,
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Failed to create file: ${error.message}`);
}
}
async handleUpdateFile(args) {
const { path: filePath, updates, backup = true } = args;
const fullPath = path.resolve(this.workingDirectory, filePath);
try {
// Read current content
let content = await fs.readFile(fullPath, 'utf8');
const originalContent = content;
// Create backup if requested
if (backup) {
await fs.writeFile(`${fullPath}.bak`, originalContent, 'utf8');
}
// Apply updates
let updateCount = 0;
for (const update of updates) {
const { search, replace, regex = false, all = true } = update;
if (regex) {
const flags = all ? 'g' : '';
const pattern = new RegExp(search, flags);
const matches = content.match(pattern);
if (matches) {
content = content.replace(pattern, replace);
updateCount += matches.length;
}
} else {
if (all) {
const parts = content.split(search);
if (parts.length > 1) {
content = parts.join(replace);
updateCount += parts.length - 1;
}
} else {
const index = content.indexOf(search);
if (index !== -1) {
content = content.substring(0, index) + replace + content.substring(index + search.length);
updateCount++;
}
}
}
}
// Write updated content
await fs.writeFile(fullPath, content, 'utf8');
return {
content: [
{
type: 'text',
text: `File updated successfully: ${filePath}\nReplacements made: ${updateCount}${backup ? '\nBackup created: ' + filePath + '.bak' : ''}`,
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Failed to update file: ${error.message}`);
}
}
async handleAppendToFile(args) {
const { path: filePath, content, newline_before = true } = args;
const fullPath = path.resolve(this.workingDirectory, filePath);
try {
// Check if file exists
let existingContent = '';
try {
existingContent = await fs.readFile(fullPath, 'utf8');
} catch (error) {
// File doesn't exist, will be created
}
// Prepare content to append
let finalContent = existingContent;
if (existingContent && newline_before && !existingContent.endsWith('\n')) {
finalContent += '\n';
}
finalContent += content;
// Write file
await fs.writeFile(fullPath, finalContent, 'utf8');
return {
content: [
{
type: 'text',
text: `Content appended to file: ${filePath}`,
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Failed to append to file: ${error.message}`);
}
}
async handleDeleteFile(args) {
const { path: filePath, backup = true } = args;
const fullPath = path.resolve(this.workingDirectory, filePath);
try {
// Check if file exists
await fs.access(fullPath);
// Create backup if requested
if (backup) {
const backupPath = `${fullPath}.deleted`;
await fs.copyFile(fullPath, backupPath);
}
// Delete file
await fs.unlink(fullPath);
return {
content: [
{
type: 'text',
text: `File deleted: ${filePath}${backup ? '\nBackup created: ' + filePath + '.deleted' : ''}`,
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Failed to delete file: ${error.message}`);
}
}
async handleRenameFile(args) {
const { old_path, new_path, overwrite = false } = args;
const oldFullPath = path.resolve(this.workingDirectory, old_path);
const newFullPath = path.resolve(this.workingDirectory, new_path);
try {
// Check if source exists
await fs.access(oldFullPath);
// Check if destination exists
try {
await fs.access(newFullPath);
if (!overwrite) {
throw new Error('Destination file already exists. Set overwrite=true to replace it.');
}
} catch (error) {
// Destination doesn't exist, which is fine
}
// Ensure destination directory exists
const destDir = path.dirname(newFullPath);
await fs.mkdir(destDir, { recursive: true });
// Rename/move file
await fs.rename(oldFullPath, newFullPath);
return {
content: [
{
type: 'text',
text: `File renamed/moved: ${old_path} → ${new_path}`,
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Failed to rename file: ${error.message}`);
}
}
async handleCreateDirectory(args) {
const { path: dirPath, recursive = true } = args;
const fullPath = path.resolve(this.workingDirectory, dirPath);
try {
await fs.mkdir(fullPath, { recursive });
return {
content: [
{
type: 'text',
text: `Directory created: ${dirPath}`,
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Failed to create directory: ${error.message}`);
}
}
async handleBatchFileOperations(args) {
const { operations, rollback_on_error = true } = args;
const results = [];
const performedOperations = [];
try {
for (const op of operations) {
const { operation, params } = op;
try {
let result;
switch (operation) {
case 'create':
result = await this.handleCreateFile(params);
performedOperations.push({ type: 'create', path: params.path });
break;
case 'update':
result = await this.handleUpdateFile({ ...params, backup: false });
performedOperations.push({ type: 'update', path: params.path, backupPath: `${params.path}.batch-backup` });
break;
case 'append':
result = await this.handleAppendToFile(params);
performedOperations.push({ type: 'append', path: params.path });
break;
case 'delete':
result = await this.handleDeleteFile({ ...params, backup: false });
performedOperations.push({ type: 'delete', path: params.path });
break;
case 'rename':
result = await this.handleRenameFile(params);
performedOperations.push({ type: 'rename', oldPath: params.old_path, newPath: params.new_path });
break;
default:
throw new Error(`Unknown operation: ${operation}`);
}
results.push({
operation,
status: 'success',
message: result.content[0].text,
});
} catch (error) {
results.push({
operation,
status: 'error',
message: error.message,
});
if (rollback_on_error) {
// Rollback performed operations
await this.rollbackOperations(performedOperations);
throw new Error(`Batch operation failed at step ${results.length}. All changes rolled back. Error: ${error.message}`);
}
}
}
return {
content: [
{
type: 'text',
text: JSON.stringify({
summary: `Batch operations completed: ${results.filter(r => r.status === 'success').length}/${operations.length} successful`,
results,
}, null, 2),
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InternalError, `Batch operations failed: ${error.message}`);
}
}
async rollbackOperations(operations) {
// Rollback in reverse order
for (let i = operations.length - 1; i >= 0; i--) {
const op = operations[i];
try {
switch (op.type) {
case 'create':
await fs.unlink(path.resolve(this.workingDirectory, op.path));
break;
case 'update':
if (op.backupPath) {
const fullPath = path.resolve(this.workingDirectory, op.path);
const backupPath = path.resolve(this.workingDirectory, op.backupPath);
await fs.copyFile(backupPath, fullPath);
await fs.unlink(backupPath);
}
break;
case 'rename':
await fs.rename(
path.resolve(this.workingDirectory, op.newPath),
path.resolve(this.workingDirectory, op.oldPath)
);
break;
// append and delete are harder to rollback without backups
}
} catch (error) {
console.error(`Failed to rollback operation:`, op, error);
}
}
}
// Existing methods (kept as-is)
async setWorkingDirectory(directory) {
try {
const resolvedPath = path.resolve(directory);
const stats = await fs.stat(resolvedPath);
if (!stats.isDirectory()) {
throw new Error('Path is not a directory');
}
this.workingDirectory = resolvedPath;
return {
content: [
{
type: 'text',
text: `Working directory set to: ${this.workingDirectory}`,
},
],
};
} catch (error) {
throw new McpError(ErrorCode.InvalidParams, `Invalid directory: ${error.message}`);
}
}
async handleGetDirectoryStructure(args) {
const { max_depth = 10, include_hidden = false, file_types } = args;
const structure = await this.getDirectoryStructure(
this.workingDirectory,
max_depth,
include_hidden,
file_types
);
return {
content: [
{
type: 'text',
text: JSON.stringify(structure, null, 2),
},
],
};
}
async handleGetFileContents(args) {
const { files, encoding = 'utf8' } = args;
const results = {};
for (const file of files) {
try {
const fullPath = path.resolve(this.workingDirectory, file);
const content = await fs.readFile(fullPath, encoding);
results[file] = content;
} catch (error) {
results[file] = { error: error.message };
}
}
return {
content: [
{
type: 'text',
text: JSON.stringify(results, null, 2),
},
],
};
}
async handleSearchFiles(args) {
const { query, file_types, is_regex = false, max_results = 50 } = args;
const results = await this.searchFiles(query, file_types, is_regex, max_results);
return {
content: [
{
type: 'text',
text: JSON.stringify(results, null, 2),
},
],
};
}
async handleAnalyzeProjectContext(args) {
const { include_code_analysis = true, focus_files } = args;
const analysis = await this.analyzeProjectContext(include_code_analysis, focus_files);
return {
content: [
{
type: 'text',
text: analysis,
},
],
};
}
async handleGetGitContext(args) {
const { include_diff = true, commit_count = 10 } = args;
const gitContext = await this.getGitContext(include_diff, commit_count);
return {
content: [
{
type: 'text',
text: gitContext,
},
],
};
}
async getDirectoryStructure(dirPath, maxDepth = 10, includeHidden = false, fileTypes = null, currentDepth = 0) {
if (currentDepth >= maxDepth) {
return null;
}
try {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
const structure = {
name: path.basename(dirPath),
path: path.relative(this.workingDirectory, dirPath) || '.',
type: 'directory',
children: [],
};
for (const entry of entries) {
if (!includeHidden && entry.name.startsWith('.')) {
continue;
}
const fullPath = path.join(dirPath, entry.name);
const relativePath = path.relative(this.workingDirectory, fullPath);
if (entry.isDirectory()) {
const subStructure = await this.getDirectoryStructure(
fullPath,
maxDepth,
includeHidden,
fileTypes,
currentDepth + 1
);
if (subStructure) {
structure.children.push(subStructure);
}
} else {
const ext = path.extname(entry.name);
if (!fileTypes || fileTypes.includes(ext)) {
const stats = await fs.stat(fullPath);
structure.children.push({
name: entry.name,
path: relativePath,
type: 'file',
size: stats.size,
modified: stats.mtime.toISOString(),
extension: ext,
});
}
}
}
return structure;
} catch (error) {
return {
name: path.basename(dirPath),
path: path.relative(this.workingDirectory, dirPath),
type: 'directory',
error: error.message,
};
}
}
async searchFiles(query, fileTypes, isRegex, maxResults) {
const results = [];
const searchPattern = isRegex ? new RegExp(query, 'gim') : query.toLowerCase();
async function searchInDirectory(dirPath) {
try {
const entries = await fs.readdir(dirPath, { withFileTypes: true });
for (const entry of entries) {
if (results.length >= maxResults) break;
const fullPath = path.join(dirPath, entry.name);
if (entry.isDirectory() && !entry.name.startsWith('.')) {
await searchInDirectory(fullPath);
} else if (entry.isFile()) {
const ext = path.extname(entry.name);
if (!fileTypes || fileTypes.includes(ext)) {
try {
const content = await fs.readFile(fullPath, 'utf8');
const matches = [];
if (isRegex) {
let match;
while ((match = searchPattern.exec(content)) !== null && matches.length < 10) {
const lineNumber = content.substring(0, match.index).split('\n').length;
const line = content.split('\n')[lineNumber - 1];
matches.push({
line: lineNumber,
content: line.trim(),
match: match[0],
});
}
} else {
const lines = content.split('\n');
lines.forEach((line, index) => {
if (line.toLowerCase().includes(searchPattern) && matches.length < 10) {
matches.push({
line: index + 1,
content: line.trim(),
match: query,
});
}
});
}
if (matches.length > 0) {
results.push({
file: path.relative(this.workingDirectory, fullPath),
matches,
});
}
} catch (error) {
// Skip files that can't be read as text
}
}
}
}
} catch (error) {
// Skip directories that can't be read
}
}
await searchInDirectory(this.workingDirectory);
return results;
}
async analyzeProjectContext(includeCodeAnalysis, focusFiles) {
const structure = await this.getDirectoryStructure(this.workingDirectory, 5);
const packageJsonPath = path.join(this.workingDirectory, 'package.json');
const readmePath = path.join(this.workingDirectory, 'README.md');
let analysis = `# Project Context Analysis\n\n`;
analysis += `**Working Directory:** ${this.workingDirectory}\n\n`;
// Project type detection
const projectType = await this.detectProjectType();
analysis += `**Project Type:** ${projectType}\n\n`;
// Package.json analysis
try {
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
analysis += `**Project Name:** ${packageJson.name || 'Unnamed'}\n`;
analysis += `**Version:** ${packageJson.version || 'No version'}\n`;
analysis += `**Description:** ${packageJson.description || 'No description'}\n\n`;
if (packageJson.dependencies) {
analysis += `**Dependencies:** ${Object.keys(packageJson.dependencies).length}\n`;
}
if (packageJson.devDependencies) {
analysis += `**Dev Dependencies:** ${Object.keys(packageJson.devDependencies).length}\n`;
}
analysis += '\n';
} catch (error) {
// No package.json found
}
// README analysis
try {
const readme = await fs.readFile(readmePath, 'utf8');
const lines = readme.split('\n').slice(0, 10);
analysis += `**README Preview:**\n${lines.join('\n')}\n\n`;
} catch (error) {
// No README found
}
// File statistics
const stats = this.calculateFileStats(structure);
analysis += `**File Statistics:**\n`;
analysis += `- Total Files: ${stats.totalFiles}\n`;
analysis += `- Total Directories: ${stats.totalDirectories}\n`;
analysis += `- File Types: ${Object.keys(stats.fileTypes).join(', ')}\n\n`;
// Important files
const importantFiles = this.identifyImportantFiles(structure);
if (importantFiles.length > 0) {
analysis += `**Important Files:**\n`;
importantFiles.slice(0, 10).forEach(file => {
analysis += `- ${file.path}\n`;
});
analysis += '\n';
}
return analysis;
}
async getGitContext(includeDiff, commitCount) {
const { exec } = await import('child_process');
const { promisify } = await import('util');
const execAsync = promisify(exec);
let gitContext = '# Git Context\n\n';
try {
// Check if it's a git repository
await execAsync('git rev-parse --git-dir', { cwd: this.workingDirectory });
// Get current branch
const { stdout: branch } = await execAsync('git branch --show-current', { cwd: this.workingDirectory });
gitContext += `**Current Branch:** ${branch.trim()}\n\n`;
// Get recent commits
const { stdout: commits } = await execAsync(
`git log --oneline -${commitCount}`,
{ cwd: this.workingDirectory }
);
gitContext += `**Recent Commits:**\n\`\`\`\n${commits}\`\`\`\n\n`;
// Get status
const { stdout: status } = await execAsync('git status --porcelain', { cwd: this.workingDirectory });
if (status.trim()) {
gitContext += `**Working Directory Status:**\n\`\`\`\n${status}\`\`\`\n\n`;
}
// Get diff if requested
if (includeDiff) {
try {
const { stdout: diff } = await execAsync('git diff HEAD', { cwd: this.workingDirectory });
if (diff.trim()) {
gitContext += `**Current Changes:**\n\`\`\`diff\n${diff.slice(0, 2000)}${diff.length > 2000 ? '\n... (truncated)' : ''}\n\`\`\`\n\n`;
}
} catch (error) {
// No diff available
}
}
} catch (error) {
gitContext += 'Not a Git repository or Git not available.\n';
}
return gitContext;
}
async detectProjectType() {
const indicators = {
'package.json': 'Node.js/JavaScript',
'requirements.txt': 'Python',
'Cargo.toml': 'Rust',
'go.mod': 'Go',
'pom.xml': 'Java (Maven)',
'build.gradle': 'Java (Gradle)',
'composer.json': 'PHP',
'Gemfile': 'Ruby',
'mix.exs': 'Elixir',
'pubspec.yaml': 'Dart/Flutter',
'Dockerfile': 'Docker',
'docker-compose.yml': 'Docker Compose',
};
for (const [file, type] of Object.entries(indicators)) {
try {
await fs.access(path.join(this.workingDirectory, file));
return type;
} catch (error) {
// File doesn't exist
}
}
return 'Unknown';
}
calculateFileStats(structure) {
const stats = {
totalFiles: 0,
totalDirectories: 0,
fileTypes: {},
};
function traverse(node) {
if (node.type === 'directory') {
stats.totalDirectories++;
if (node.children) {
node.children.forEach(traverse);
}
} else {
stats.totalFiles++;
const ext = node.extension || 'no extension';
stats.fileTypes[ext] = (stats.fileTypes[ext] || 0) + 1;
}
}
traverse(structure);
return stats;
}
identifyImportantFiles(structure) {
const important = [];
const importantPatterns = [
/^package\.json$/,
/^README\.md$/i,
/^index\.(js|ts|html)$/,
/^main\.(js|ts|py)$/,
/^app\.(js|ts|py)$/,
/^server\.(js|ts|py)$/,
/^config\.(js|ts|json|yaml|yml)$/,
/^docker-compose\.yml$/,
/^Dockerfile$/,
/^\.env$/,
];
function traverse(node) {
if (node.type === 'file') {
if (importantPatterns.some(pattern => pattern.test(node.name))) {
important.push(node);
}
} else if (node.children) {
node.children.forEach(traverse);
}
}
traverse(structure);
return important;
}
async generateDirectoryOverview() {
const analysis = await this.analyzeProjectContext(false);
const structure = await this.getDirectoryStructure(this.workingDirectory, 2);
let overview = analysis + '\n\n';
overview += '## Directory Structure\n\n';
overview += this.formatStructureTree(structure);
return overview;
}
formatStructureTree(node, indent = 0) {
let result = '';
const spaces = ' '.repeat(indent);
if (node.type === 'directory') {
result += `${spaces}📁 ${node.name}/\n`;
if (node.children) {
for (const child of node.children) {
result += this.formatStructureTree(child, indent + 1);
}
}
} else {
const icon = this.getFileIcon(node.extension);
result += `${spaces}${icon} ${node.name}\n`;
}
return result;
}
getFileIcon(extension) {
const icons = {
'.js': '📜',
'.ts': '📘',
'.py': '🐍',
'.html': '🌐',
'.css': '🎨',
'.json': '📋',
'.md': '📝',
'.yml': '⚙️',
'.yaml': '⚙️',
'.xml': '📄',
'.txt': '📄',
};
return icons[extension] || '📄';
}
getMimeType(filePath) {
const ext = path.extname(filePath).toLowerCase();
const mimeTypes = {
'.js': 'application/javascript',
'.ts': 'application/typescript',
'.json': 'application/json',
'.html': 'text/html',
'.css': 'text/css',
'.md': 'text/markdown',
'.py': 'text/x-python',
'.yml': 'text/yaml',
'.yaml': 'text/yaml',
'.xml': 'text/xml',
};
return mimeTypes[ext] || 'text/plain';
}
async run() {
const transport = new StdioServerTransport();
await this.server.connect(transport);
console.error('Enhanced Directory Context MCP Server running on stdio');
}
}
const server = new EnhancedDirectoryContextServer();
server.run().catch(console.error);