Skip to main content
Glama
index.ts6.49 kB
/** * ExploitDB integration module * Handles downloading and parsing the ExploitDB CSV file */ import * as fs from 'fs-extra'; import * as path from 'path'; import axios from 'axios'; import { createReadStream, createWriteStream } from 'fs'; import csv from 'csv-parser'; import { simpleGit } from 'simple-git'; import config from '../config.js'; import db, { Exploit } from '../db/index.js'; // CSV column names (matching the actual CSV header) const CSV_COLUMNS = { ID: 'id', FILE: 'file', DESCRIPTION: 'description', DATE_PUBLISHED: 'date_published', AUTHOR: 'author', TYPE: 'type', PLATFORM: 'platform', PORT: 'port', DATE_ADDED: 'date_added', DATE_UPDATED: 'date_updated', VERIFIED: 'verified' }; /** * Download the ExploitDB CSV file */ export const downloadCsvFile = async (): Promise<string> => { const csvFilePath = path.join(config.dataDir, 'files_exploits.csv'); try { const response = await axios.get(config.csvUrl, { responseType: 'stream' }); const writer = createWriteStream(csvFilePath); response.data.pipe(writer); return new Promise((resolve, reject) => { writer.on('finish', () => resolve(csvFilePath)); writer.on('error', reject); }); } catch (error) { console.error('Error downloading CSV file:', error); throw new Error('Failed to download ExploitDB CSV file'); } }; /** * Clone the ExploitDB repository */ export const cloneRepository = async (): Promise<string> => { const repoPath = path.join(config.dataDir, 'exploitdb'); try { const git = simpleGit(); // Check if the repository already exists if (await fs.pathExists(repoPath)) { // Pull the latest changes await git.cwd(repoPath).pull(); console.log('ExploitDB repository updated'); } else { // Clone the repository await git.clone(config.repositoryUrl, repoPath); console.log('ExploitDB repository cloned'); } return repoPath; } catch (error) { console.error('Error cloning repository:', error); throw new Error('Failed to clone ExploitDB repository'); } }; /** * Parse the CSV file and update the database */ export const parseCsvFile = async (csvFilePath: string): Promise<number> => { let count = 0; try { // Create a read stream for the CSV file const results: any[] = []; await new Promise<void>((resolve, reject) => { createReadStream(csvFilePath) .pipe(csv()) .on('data', (data) => results.push(data)) .on('end', () => { resolve(); }) .on('error', (error) => { reject(error); }); }); // Process the results for (const row of results) { try { // Extract CVE from description if available const cveMatch = row.description.match(/CVE-\d{4}-\d+/i); const cve = cveMatch ? cveMatch[0] : null; // Extract tags from description const tags = extractTags(row.description); // Use date_published, fallback to date_added, then date_updated, or use a default const date = row.date_published || row.date_added || row.date_updated || '1970-01-01'; // Create exploit object const exploit: Exploit = { id: parseInt(row.id, 10), file: row.file, description: row.description, date: date, author: row.author || 'Unknown', type: row.type || 'unknown', platform: row.platform || 'unknown', port: row.port ? parseInt(row.port, 10) : undefined, cve, verified: row.verified === '1', tags }; // Insert or update the exploit in the database await db.upsertExploit(exploit); count++; } catch (error) { console.error(`Error processing row ${row.id}:`, error); } } console.log(`Processed ${count} exploits`); return count; } catch (error) { console.error('Error parsing CSV file:', error); throw new Error('Failed to parse ExploitDB CSV file'); } }; /** * Extract tags from the description */ const extractTags = (description: string): string[] => { const tags: string[] = []; // Extract common keywords const keywords = [ 'buffer overflow', 'sql injection', 'xss', 'cross site scripting', 'remote code execution', 'rce', 'privilege escalation', 'authentication bypass', 'denial of service', 'dos', 'command injection', 'file inclusion', 'information disclosure', 'csrf', 'cross site request forgery' ]; for (const keyword of keywords) { if (description.toLowerCase().includes(keyword)) { tags.push(keyword); } } return tags; }; /** * Get the exploit code from the repository */ export const getExploitCode = async (filePath: string): Promise<string | null> => { try { // If we're using the repository, read the file from there if (config.cloneRepository) { const fullPath = path.join(config.dataDir, 'exploitdb', filePath); if (await fs.pathExists(fullPath)) { return fs.readFile(fullPath, 'utf-8'); } } // Otherwise, try to download the file from the raw URL const rawUrl = `https://gitlab.com/exploit-database/exploitdb/-/raw/main/${filePath}`; try { const response = await axios.get(rawUrl); return response.data; } catch (error) { console.error(`Error downloading exploit code from ${rawUrl}:`, error); return null; } } catch (error) { console.error(`Error getting exploit code for ${filePath}:`, error); return null; } }; /** * Update the database with the latest exploits */ export const updateDatabase = async (): Promise<number> => { try { // Ensure the data directory exists await fs.ensureDir(config.dataDir); // Initialize the database await db.initDatabase(); let csvFilePath: string; // Get the CSV file if (config.cloneRepository) { // Clone or update the repository const repoPath = await cloneRepository(); csvFilePath = path.join(repoPath, 'files_exploits.csv'); } else { // Download the CSV file csvFilePath = await downloadCsvFile(); } // Parse the CSV file and update the database const count = await parseCsvFile(csvFilePath); return count; } catch (error) { console.error('Error updating database:', error); throw new Error('Failed to update database'); } }; export default { updateDatabase, getExploitCode };

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Cyreslab-AI/exploitdb-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server