dirsearch_scan
Discover hidden directories and files on web servers using customizable wordlists and extensions to identify potential security vulnerabilities during penetration testing.
Instructions
Execute Dirsearch for advanced directory and file discovery with enhanced logging.
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
| additional_args | No | ||
| extensions | No | php,html,js,txt,xml,json | |
| recursive | No | ||
| threads | No | ||
| url | Yes | ||
| wordlist | No | /usr/share/wordlists/dirb/common.txt |
Implementation Reference
- src/mcp_server/app.py:734-761 (handler)Primary MCP handler for the 'dirsearch_scan' tool. Proxies parameters to the REST API endpoint '/api/dirsearch' and handles logging and response.@mcp.tool() def dirsearch_scan( url: str, extensions: str = "php,html,js,txt,xml,json", wordlist: str = "/usr/share/wordlists/dirb/common.txt", threads: int = 30, recursive: bool = False, additional_args: str = "", ) -> dict[str, Any]: """Run Dirsearch for advanced directory and file discovery with logging.""" data = { "url": url, "extensions": extensions, "wordlist": wordlist, "threads": threads, "recursive": recursive, "additional_args": additional_args, } logger.info(f"📁 Starting Dirsearch directory discovery on {url}") result = api_client.safe_post("api/dirsearch", data) if result.get("success"): logger.info(f"✅ Dirsearch scan completed on {url}") else: logger.error("❌ Dirsearch scan failed") return result
- src/mcp_server/app.py:735-742 (schema)Input schema defined by function parameters with type hints and defaults for the dirsearch_scan tool.def dirsearch_scan( url: str, extensions: str = "php,html,js,txt,xml,json", wordlist: str = "/usr/share/wordlists/dirb/common.txt", threads: int = 30, recursive: bool = False, additional_args: str = "", ) -> dict[str, Any]:
- Backend handler in REST API server that implements the dirsearch logic: parameter extraction, command building, execution, and result parsing.@tool(required_fields=["target"]) def execute_dirsearch(): """Execute Dirsearch for directory and file discovery.""" data = request.get_json() params = extract_dirsearch_params(data) logger.info(f"Executing Dirsearch on {params['url']}") command = build_dirsearch_command(params) execution_result = execute_command(command, timeout=600) return parse_dirsearch_result(execution_result)
- Helper function to construct the dirsearch command line from input parameters.def build_dirsearch_command(params: dict) -> str: """Build dirsearch command from parameters.""" import shlex cmd_parts = ["dirsearch", "-u", params["url"]] if params["extensions"]: cmd_parts.extend(["-e", params["extensions"]]) cmd_parts.extend(["-w", params["wordlist"]]) cmd_parts.extend(["-t", str(params["threads"])]) cmd_parts.extend(["--timeout", str(params["timeout"])]) if params["recursive"]: cmd_parts.append("-r") if params["max_recursion_depth"] > 1: cmd_parts.append("--max-recursion-depth") cmd_parts.append(str(params["max_recursion_depth"])) if params["exclude_status"]: cmd_parts.extend(["--exclude-status", params["exclude_status"]]) if params["rate_limit"]: rate_limit_value = params["rate_limit"] if isinstance(rate_limit_value, int | float) and rate_limit_value > 0: delay_ms = int(1000 / rate_limit_value) cmd_parts.extend(["--delay", str(delay_ms)]) cmd_parts.extend(["--format", "json", "-o", "/tmp/dirsearch_out.json"]) if params["additional_args"]: cmd_parts.extend(params["additional_args"].split()) return " ".join(shlex.quote(part) for part in cmd_parts)
- Helper function to parse the execution result, read JSON output, extract unique findings, and compute statistics.def parse_dirsearch_result(execution_result: dict) -> dict[str, Any]: """Parse dirsearch execution result and format response with findings.""" if not execution_result["success"]: return {"findings": [], "stats": create_stats(0, 0, 0), "version": None} stdout = execution_result.get("stdout", "") with open("/tmp/dirsearch_raw_output.log", "w") as f: f.write(stdout) json_file_path = "/tmp/dirsearch_out.json" findings = [] if execution_result["success"]: try: with open(json_file_path) as f: file_content = f.read().strip() findings = parse_dirsearch_json_output(file_content) os.remove(json_file_path) except FileNotFoundError: logger.warning("Dirsearch JSON output file not found.") except json.JSONDecodeError: logger.warning("Failed to parse dirsearch JSON file") except Exception as e: logger.warning(f"Error reading dirsearch JSON file: {e}") seen_urls = set() unique_findings = [] dupes_count = 0 for finding in findings: url = finding["evidence"]["url"] if url not in seen_urls: seen_urls.add(url) unique_findings.append(finding) else: dupes_count += 1 payload_bytes = len(stdout.encode("utf-8")) truncated = len(findings) > 100 stats = create_stats(len(unique_findings), dupes_count, payload_bytes) stats["truncated"] = truncated return { "findings": unique_findings, "stats": stats, }