refactory_verify
Verify that a decomposed module loads without errors, exports match the original, has no circular dependencies, and passes its test suite.
Instructions
Verify a decomposed module: loads without errors, exports match plan, no circular deps, tests pass.
Input Schema
| Name | Required | Description | Default |
|---|---|---|---|
| moduleDir | Yes | Directory containing extracted modules | |
| original | No | Path to the original monolith (for export comparison) | |
| testCmd | No | Test command to run (e.g., 'npm test') |
Implementation Reference
- src/tools/verify.js:6-54 (handler)Main verify function: syntax-checks each .js file with `node --check`, loads each module via require() to capture exports, runs an optional test command, and returns results with pass/fail status.
async function verify(args) { const moduleDir = path.resolve(args.moduleDir); const results = { modules: [], allClean: true, circularDeps: [], exportMismatch: [] }; const files = fs.readdirSync(moduleDir).filter((f) => f.endsWith(".js")); for (const file of files) { const filePath = path.join(moduleDir, file); const entry = { file, syntax: false, loads: false, exports: [] }; // Syntax check try { execSync(`node --check "${filePath}"`, { stdio: "pipe" }); entry.syntax = true; } catch (e) { entry.syntaxError = e.stderr?.toString().split("\n")[0] || "syntax error"; results.allClean = false; } // Load check if (entry.syntax) { try { const mod = require(filePath); entry.loads = true; entry.exports = Object.keys(mod); } catch (e) { entry.loadError = e.message.split("\n")[0]; results.allClean = false; } } results.modules.push(entry); } // Test command if (args.testCmd) { try { const output = execSync(args.testCmd, { stdio: "pipe", cwd: path.resolve("."), timeout: 60000 }); results.testOutput = output.toString().slice(-500); results.testsPassed = true; } catch (e) { results.testOutput = (e.stdout?.toString() || "").slice(-500); results.testsPassed = false; results.allClean = false; } } return results; } - src/server.js:78-90 (schema)Input schema for refactory_verify tool: accepts moduleDir (required), original (optional), testCmd (optional).
{ name: "refactory_verify", description: "Verify a decomposed module: loads without errors, exports match plan, no circular deps, tests pass.", inputSchema: { type: "object", properties: { moduleDir: { type: "string", description: "Directory containing extracted modules" }, original: { type: "string", description: "Path to the original monolith (for export comparison)" }, testCmd: { type: "string", description: "Test command to run (e.g., 'npm test')" }, }, required: ["moduleDir"], }, }, - src/server.js:36-180 (registration)Tool registration in TOOLS array alongside all other tools, with name 'refactory_verify' and input schema.
const TOOLS = [ { name: "refactory_analyze", description: "Analyze a source file for decomposition. Returns health score, function count, dependency graph, and recommended split points.", inputSchema: { type: "object", properties: { file: { type: "string", description: "Path to the monolith file to analyze" }, language: { type: "string", description: "Language (js, ts, py). Auto-detected if omitted." }, }, required: ["file"], }, }, { name: "refactory_plan", description: "Generate a decomposition plan — module boundaries, function assignments, dependency order. Uses AST analysis + LLM reasoning.", inputSchema: { type: "object", properties: { file: { type: "string", description: "Path to the monolith file" }, modules: { type: "number", description: "Target number of modules (auto if omitted)" }, maxLines: { type: "number", description: "Max lines per module (default: 500)" }, style: { type: "string", description: "Grouping style: 'functional' | 'domain' | 'layer'" }, }, required: ["file"], }, }, { name: "refactory_extract", description: "Extract one module from the monolith according to the plan. Routes to the cheapest capable free LLM API.", inputSchema: { type: "object", properties: { file: { type: "string", description: "Path to the monolith file" }, module: { type: "string", description: "Module name to extract (from the plan)" }, functions: { type: "array", items: { type: "string" }, description: "Function names to include" }, outputDir: { type: "string", description: "Output directory for extracted module" }, plan: { type: "string", description: "Path to the decomposition plan JSON" }, }, required: ["file", "module"], }, }, { name: "refactory_verify", description: "Verify a decomposed module: loads without errors, exports match plan, no circular deps, tests pass.", inputSchema: { type: "object", properties: { moduleDir: { type: "string", description: "Directory containing extracted modules" }, original: { type: "string", description: "Path to the original monolith (for export comparison)" }, testCmd: { type: "string", description: "Test command to run (e.g., 'npm test')" }, }, required: ["moduleDir"], }, }, { name: "refactory_metrics", description: "Calculate before/after metrics and the Refactory Score (0-1). Measures health improvement, module quality, test preservation.", inputSchema: { type: "object", properties: { original: { type: "string", description: "Path to the original monolith" }, moduleDir: { type: "string", description: "Directory containing extracted modules" }, testResults: { type: "string", description: "Path to test results JSON (before/after)" }, }, required: ["original", "moduleDir"], }, }, { name: "refactory_report", description: "Generate a decomposition report with metrics, dependency graphs, and Refactory Score. Outputs Markdown or HTML.", inputSchema: { type: "object", properties: { metricsFile: { type: "string", description: "Path to metrics JSON from refactory_metrics" }, format: { type: "string", description: "'markdown' (default) or 'html'" }, outputPath: { type: "string", description: "Where to write the report" }, }, required: ["metricsFile"], }, }, { name: "refactory_depmap", description: "Map dependencies for a file — who requires it (consumers), what it requires (dependencies), detect circular deps.", inputSchema: { type: "object", properties: { file: { type: "string", description: "Path to the file to map" }, projectDir: { type: "string", description: "Project root directory" }, }, required: ["file"], }, }, { name: "refactory_characterize", description: "Generate characterization tests and golden export snapshot BEFORE decomposition. Captures behavioral contract.", inputSchema: { type: "object", properties: { file: { type: "string", description: "Path to the module to characterize" }, outputDir: { type: "string", description: "Where to write test + golden files" }, }, required: ["file"], }, }, { name: "refactory_verify_exports", description: "Compare post-decomposition module against golden export snapshot. Reports missing, added, or type-changed exports.", inputSchema: { type: "object", properties: { goldenFile: { type: "string", description: "Path to .golden-exports.json from characterize" }, newFile: { type: "string", description: "Path to the new re-export module" }, }, required: ["goldenFile", "newFile"], }, }, { name: "refactory_fix_imports", description: "Mechanically fix broken require() paths after module extraction. No LLM needed — pure path resolution.", inputSchema: { type: "object", properties: { moduleDir: { type: "string", description: "Directory containing extracted modules" }, projectDir: { type: "string", description: "Project root to scan for consumers" }, dryRun: { type: "boolean", description: "Report changes without writing (default: false)" }, }, required: ["moduleDir"], }, }, { name: "refactory_decompose", description: "Full decomposition pipeline in one call: analyze, depmap, characterize, plan, extract ALL modules, fix-imports, verify, metrics, re-export, report. The 'just do it' tool.", inputSchema: { type: "object", properties: { file: { type: "string", description: "Path to the monolith file to decompose" }, outputDir: { type: "string", description: "Output directory (default: <dir>/lib/<basename>/ next to source)" }, maxLines: { type: "number", description: "Max lines per module (default: 500)" }, projectDir: { type: "string", description: "Project root for dependency mapping (optional)" }, }, required: ["file"], }, }, ]; - src/server.js:192-200 (registration)Switch-case dispatch that routes 'refactory_verify' calls to the verify() function via CallToolRequestSchema handler.
server.setRequestHandler(CallToolRequestSchema, async (request) => { const { name, arguments: args } = request.params; try { let result; switch (name) { case "refactory_analyze": result = await analyze(args); break; case "refactory_plan": result = await plan(args); break; case "refactory_extract": result = await extract(args); break; case "refactory_verify": result = await verify(args); break; - src/tools/characterize.js:67-93 (helper)verifyExports function: compares post-decomposition module exports against a golden snapshot. Reports missing, added, or type-changed exports. This is a separate tool (refactory_verify_exports), not the core refactory_verify.
function verifyExports({ goldenFile, newFile }) { const absGolden = path.resolve(goldenFile); const absNew = path.resolve(newFile); if (!fs.existsSync(absGolden)) { throw new Error(`Golden file not found: ${absGolden}`); } if (!fs.existsSync(absNew)) { throw new Error(`New module not found: ${absNew}`); } const golden = JSON.parse(fs.readFileSync(absGolden, "utf8")); const mod = freshRequire(absNew); const current = captureExports(mod); const goldenNames = Object.keys(golden.exports); const currentNames = Object.keys(current); const missing = goldenNames.filter((n) => !(n in current)); const added = currentNames.filter((n) => !(n in golden.exports)); const typeChanged = goldenNames.filter( (n) => n in current && current[n] !== golden.exports[n] ); const matches = missing.length === 0 && added.length === 0 && typeChanged.length === 0; return { matches, missing, added, typeChanged }; }