recon_directory_bruteforce
Discover hidden directories and files on web servers by systematically testing common paths and extensions with parallel HTTP requests for security testing and reconnaissance.
Instructions
Directory brute-force using parallel curl requests. Returns results (path/status/length), found_count, and paths_tested. Read-only GET requests, sends one request per wordlist entry per extension.
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
| target | Yes | Base URL, e.g. https://example.com | |
| wordlist | No | Path to wordlist file. Uses built-in common paths if not provided. | |
| threads | No | Concurrent request count | |
| extensions | No | Comma-separated extensions to append, e.g. 'php,html,txt' |
Implementation Reference
- src/tools/recon.ts:274-371 (handler)The handler for the 'recon_directory_bruteforce' tool, which performs directory brute-forcing using parallel curl requests.
server.tool( "recon_directory_bruteforce", "Directory brute-force using parallel curl requests. Returns results (path/status/length), found_count, and paths_tested. Read-only GET requests, sends one request per wordlist entry per extension.", { target: z.string().describe("Base URL, e.g. https://example.com"), wordlist: z .string() .optional() .describe("Path to wordlist file. Uses built-in common paths if not provided."), threads: z .number() .min(1) .max(50) .describe("Concurrent request count") .default(10), extensions: z .string() .optional() .describe("Comma-separated extensions to append, e.g. 'php,html,txt'"), }, async ({ target, wordlist, threads, extensions }) => { requireTool("curl"); const base = target.replace(/\/$/, ""); const defaultPaths = [ "admin", "login", "api", "dashboard", "config", "backup", "uploads", "images", "css", "js", "fonts", "media", ".git", ".env", ".htaccess", "wp-admin", "wp-login.php", "robots.txt", "sitemap.xml", "swagger", "api-docs", "graphql", "health", "status", "metrics", "debug", "phpinfo.php", "info.php", "test", "temp", "tmp", "old", "bak", "archive", "data", "db", "database", "console", "panel", "manager", "administrator", "user", "users", "account", "accounts", "profile", "settings", "docs", "documentation", "help", "search", "download", "file", "files", "upload", "static", "assets", "public", "private", "secret", "hidden", "internal", "server-status", "server-info", ]; let paths: string[]; if (wordlist && fs.existsSync(wordlist)) { const content = fs.readFileSync(wordlist, "utf-8"); paths = content .split("\n") .map((l) => l.trim()) .filter((l) => l.length > 0) .slice(0, 2000); } else { paths = defaultPaths; } // Expand with extensions const extList = extensions ? extensions.split(",") : []; const expandedPaths = [...paths]; for (const ext of extList) { const cleanExt = ext.trim().replace(/^\./, ""); for (const p of paths) { expandedPaths.push(`${p}.${cleanExt}`); } } async function checkPath(p: string): Promise<{ path: string; status: number; length: number } | null> { const res = await runCmd("curl", [ "-sk", "-o", "/dev/null", "-w", "%{http_code}:%{size_download}", "-m", "5", `${base}/${p}`, ]); const parts = res.stdout.split(":"); const status = parts.length > 0 ? parseInt(parts[0], 10) : 0; const length = parts.length > 1 ? parseInt(parts[1], 10) : 0; if (status !== 0 && status !== 404) { return { path: `/${p}`, status, length }; } return null; } // Run in batches const results: Array<{ path: string; status: number; length: number }> = []; for (let i = 0; i < expandedPaths.length; i += threads) { const batch = expandedPaths.slice(i, i + threads); const batchResults = await Promise.all(batch.map(checkPath)); for (const r of batchResults) { if (r !== null) results.push(r); } } const result = { results, found_count: results.length, paths_tested: expandedPaths.length, }; return { content: [{ type: "text", text: JSON.stringify(result, null, 2) }] }; } );