import fs from "fs-extra";
import path from "path";
import archiver from "archiver";
import StreamZip from "node-stream-zip";
export class AdvancedOperations {
constructor(securityManager) {
this.security = securityManager;
}
async compressFiles(sourcePaths, destinationPath, format = 'zip') {
try {
const validDestination = this.security.validatePath(destinationPath);
// Validate all source paths
const validSources = sourcePaths.map(sourcePath => {
const validPath = this.security.validatePath(sourcePath);
if (!fs.existsSync(validPath)) {
throw new Error(`Source path does not exist: ${validPath}`);
}
return validPath;
});
await fs.ensureDir(path.dirname(validDestination));
return new Promise((resolve, reject) => {
const output = fs.createWriteStream(validDestination);
const archive = archiver(format, {
zlib: { level: 9 } // Maximum compression
});
output.on('close', () => {
const size = archive.pointer();
resolve({
content: [{
type: "text",
text: `โ
Archive created successfully: ${path.basename(validDestination)}\n๐ฆ Size: ${this.formatFileSize(size)}\n๐ Files: ${validSources.length}`
}]
});
});
archive.on('error', (err) => {
reject(err);
});
archive.pipe(output);
// Add files and directories to archive
validSources.forEach(sourcePath => {
const stats = fs.statSync(sourcePath);
const baseName = path.basename(sourcePath);
if (stats.isDirectory()) {
archive.directory(sourcePath, baseName);
} else {
archive.file(sourcePath, { name: baseName });
}
});
archive.finalize();
});
} catch (error) {
return {
content: [{
type: "text",
text: `โ Error creating archive: ${error.message}`
}]
};
}
}
async extractArchive(archivePath, destinationPath) {
try {
const validArchive = this.security.validatePath(archivePath);
const validDestination = this.security.validatePath(destinationPath);
await this.security.checkPermissions(validArchive, 'read');
await this.security.validateFileSize(validArchive);
if (!fs.existsSync(validArchive)) {
throw new Error(`Archive file does not exist: ${validArchive}`);
}
await fs.ensureDir(validDestination);
return new Promise((resolve, reject) => {
const zip = new StreamZip.async({ file: validArchive });
zip.extract(null, validDestination).then(() => {
const extractedFiles = Object.keys(zip.entries());
resolve({
content: [{
type: "text",
text: `โ
Archive extracted successfully to: ${path.basename(validDestination)}\n๐ Extracted ${extractedFiles.length} items`
}]
});
}).catch(error => {
reject(error);
}).finally(() => {
zip.close();
});
});
} catch (error) {
return {
content: [{
type: "text",
text: `โ Error extracting archive: ${error.message}`
}]
};
}
}
async batchRename(directory, pattern, replacement) {
try {
const validDir = this.security.validatePath(directory);
await this.security.checkPermissions(validDir, 'read');
const files = await fs.readdir(validDir);
const renamedFiles = [];
const regex = new RegExp(pattern, 'g');
for (const file of files) {
const filePath = path.join(validDir, file);
const stats = await fs.stat(filePath);
if (stats.isFile() && regex.test(file)) {
const newName = file.replace(regex, replacement);
const newPath = path.join(validDir, newName);
if (newName !== file) {
await fs.move(filePath, newPath);
renamedFiles.push({
old: file,
new: newName
});
}
}
}
let result = `โ
Batch rename completed in: ${path.basename(validDir)}\n`;
result += `๐ Renamed ${renamedFiles.length} files\n\n`;
if (renamedFiles.length > 0) {
result += 'Renamed files:\n';
renamedFiles.forEach(file => {
result += ` ๐ ${file.old} โ ${file.new}\n`;
});
}
return {
content: [{
type: "text",
text: result
}]
};
} catch (error) {
return {
content: [{
type: "text",
text: `โ Error in batch rename: ${error.message}`
}]
};
}
}
async organizeFiles(directory, organizationType = 'extension') {
try {
const validDir = this.security.validatePath(directory);
await this.security.checkPermissions(validDir, 'read');
const files = await fs.readdir(validDir);
const organized = {};
for (const file of files) {
const filePath = path.join(validDir, file);
const stats = await fs.stat(filePath);
if (stats.isFile()) {
let category;
switch (organizationType) {
case 'extension':
category = path.extname(file).toLowerCase() || 'no-extension';
break;
case 'size':
category = this.getSizeCategory(stats.size);
break;
case 'date':
category = this.getDateCategory(stats.mtime);
break;
default:
category = 'misc';
}
if (!organized[category]) {
organized[category] = [];
}
organized[category].push(file);
}
}
// Create directories and move files
const movedFiles = [];
for (const [category, files] of Object.entries(organized)) {
const categoryDir = path.join(validDir, category);
await fs.ensureDir(categoryDir);
for (const file of files) {
const oldPath = path.join(validDir, file);
const newPath = path.join(categoryDir, file);
if (oldPath !== newPath) {
await fs.move(oldPath, newPath);
movedFiles.push({
file,
category
});
}
}
}
let result = `โ
Files organized in: ${path.basename(validDir)}\n`;
result += `๐ Organized ${movedFiles.length} files into ${Object.keys(organized).length} categories\n\n`;
result += 'Organization summary:\n';
Object.entries(organized).forEach(([category, files]) => {
result += ` ๐ ${category}: ${files.length} files\n`;
});
return {
content: [{
type: "text",
text: result
}]
};
} catch (error) {
return {
content: [{
type: "text",
text: `โ Error organizing files: ${error.message}`
}]
};
}
}
getSizeCategory(size) {
if (size < 1024) return 'small';
if (size < 1024 * 1024) return 'medium';
if (size < 1024 * 1024 * 10) return 'large';
return 'very-large';
}
getDateCategory(date) {
const now = new Date();
const diffTime = now - date;
const diffDays = Math.floor(diffTime / (1000 * 60 * 60 * 24));
if (diffDays < 1) return 'today';
if (diffDays < 7) return 'this-week';
if (diffDays < 30) return 'this-month';
if (diffDays < 365) return 'this-year';
return 'older';
}
formatFileSize(bytes) {
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
if (bytes === 0) return '0 Bytes';
const i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024)));
return Math.round(bytes / Math.pow(1024, i) * 100) / 100 + ' ' + sizes[i];
}
}