refactory_plan
Generate a decomposition plan for a monolith file: defines module boundaries, function assignments, and dependency order using AST analysis and LLM reasoning.
Instructions
Generate a decomposition plan — module boundaries, function assignments, dependency order. Uses AST analysis + LLM reasoning.
Input Schema
| Name | Required | Description | Default |
|---|---|---|---|
| file | Yes | Path to the monolith file | |
| modules | No | Target number of modules (auto if omitted) | |
| maxLines | No | Max lines per module (default: 500) | |
| style | No | Grouping style: 'functional' | 'domain' | 'layer' |
Implementation Reference
- src/tools/plan.js:171-319 (handler)The main handler function 'plan' - executes the tool logic for refactory_plan. Reads the source file, extracts function map, optionally preprocesses, then either uses mechanical prefix-grouping (for >150 functions) or calls an LLM via callWithFallback to generate a decomposition plan JSON.
async function plan(args) { const filePath = path.resolve(args.file); const source = fs.readFileSync(filePath, "utf8"); const maxLines = args.maxLines || 500; const style = args.style || "functional"; // Auto-unwrap IIFEs before extracting function map const { getPreprocessor } = require("../languages"); const preprocessor = getPreprocessor(filePath); let effectiveSource = source; if (preprocessor) { if (preprocessor.stripIgnoreRegions) { const { source: stripped, stripped: didStrip } = preprocessor.stripIgnoreRegions(source); if (didStrip) { effectiveSource = stripped; logger.debug("@refactory-ignore regions stripped"); } } if (preprocessor.unwrapIIFE) { const { source: unwrapped, unwrapped: didUnwrap } = preprocessor.unwrapIIFE(effectiveSource); if (didUnwrap) { effectiveSource = unwrapped; logger.debug("IIFE wrapper detected and unwrapped for planning"); } } } // Send function map instead of full source — fits in any provider's context const functionMap = extractFunctionMap(effectiveSource); const estimatedInputTokens = Math.ceil(JSON.stringify(functionMap).length / 4); // For very large function lists, use mechanical grouping by prefix — no LLM needed if (functionMap.functions.length > 150) { const groups = buildPrefixGroups(functionMap.functions, maxLines, args.maxModules || 25, args.maxFunctionsPerModule || 30); const modules = groups.map(g => ({ name: g.name + ".js", description: `${g.prefix}* functions (${g.functions.length} fns)`, functions: g.functions.map(f => f.name), estimatedLines: g.totalLines, dependencies: [], })); const planData = { modules, indexExports: [], sharedHelpers: [] }; planData._meta = { provider: "mechanical/prefix-grouping", sourceFile: filePath, sourceLines: functionMap.totalLines, functionCount: functionMap.functions.length, generatedAt: new Date().toISOString(), }; logger.step("PLAN", { file: filePath, modules: modules.length, provider: "mechanical/prefix-grouping", durationMs: 0, }); return planData; } const functionList = functionMap.functions.map((f) => ` ${f.line}-${f.endLine} (${f.estimatedLines}L): ${f.name}(${f.params})`).join("\n"); const conciseNote = functionMap.totalLines > 2000 ? "Note: large file — prefer fewer, coarser modules over many small ones." : ""; const prompt = `You are a senior software architect. Analyze this function map and produce a JSON decomposition plan. Target: split into modules of max ${maxLines} lines each. Grouping style: ${style} Total source lines: ${functionMap.totalLines} ${conciseNote} Function map (${functionMap.functions.length} functions): ${functionList} Dependencies (require): ${functionMap.requires.map((r) => ` ${r}`).join("\n")} Output ONLY valid JSON with this structure: { "modules": [ { "name": "module-name.js", "description": "what this module does", "functions": ["functionA", "functionB"], "estimatedLines": 300, "dependencies": ["other-module.js"] } ], "indexExports": ["list", "of", "original", "exports", "to", "preserve"], "sharedHelpers": ["functions", "needed", "by", "multiple", "modules"] }`; const startMs = Date.now(); const result = await callWithFallback(prompt, { minOutputTokens: 4000, estimatedInputTokens, }); const durationMs = Date.now() - startMs; // Parse JSON from response — with repair for common LLM output issues const jsonMatch = result.content.match(/\{[\s\S]*\}/); if (!jsonMatch) throw new Error("Failed to generate valid plan JSON"); let jsonStr = jsonMatch[0]; // Repair common LLM JSON issues // 1. Trailing commas before } or ] jsonStr = jsonStr.replace(/,\s*([\]}])/g, "$1"); // 2. Strip JS-style comments jsonStr = jsonStr.replace(/\/\/[^\n]*/g, ""); // 3. Fix truncated JSON — close unclosed brackets/braces let opens = 0, closes = 0; for (const ch of jsonStr) { if (ch === "{" || ch === "[") opens++; if (ch === "}" || ch === "]") closes++; } if (opens > closes) { // Find what needs closing by tracking the stack const stack = []; for (const ch of jsonStr) { if (ch === "{") stack.push("}"); if (ch === "[") stack.push("]"); if (ch === "}" || ch === "]") stack.pop(); } jsonStr += stack.reverse().join(""); } let planData; try { planData = JSON.parse(jsonStr); } catch (firstErr) { // Try replacing single quotes with double quotes try { planData = JSON.parse(jsonStr.replace(/'/g, '"')); } catch { // Log the problematic JSON for debugging logger.debug(`Plan JSON failed. First 200 chars: ${jsonStr.slice(0, 200)}`); logger.debug(`Last 200 chars: ${jsonStr.slice(-200)}`); throw new Error(`Plan JSON parse failed: ${firstErr.message}`); } } planData._meta = { provider: result.provider, sourceFile: filePath, sourceLines: functionMap.totalLines, functionCount: functionMap.functions.length, generatedAt: new Date().toISOString(), }; logger.step("PLAN", { file: filePath, modules: planData.modules.length, provider: result.provider, durationMs, }); return planData; } - src/server.js:49-61 (schema)Input schema for refactory_plan tool registration. Defines input parameters: file (required), modules, maxLines, and style.
{ name: "refactory_plan", description: "Generate a decomposition plan — module boundaries, function assignments, dependency order. Uses AST analysis + LLM reasoning.", inputSchema: { type: "object", properties: { file: { type: "string", description: "Path to the monolith file" }, modules: { type: "number", description: "Target number of modules (auto if omitted)" }, maxLines: { type: "number", description: "Max lines per module (default: 500)" }, style: { type: "string", description: "Grouping style: 'functional' | 'domain' | 'layer'" }, }, required: ["file"], }, - src/server.js:49-62 (registration)Registration of refactory_plan in the TOOLS array served via MCP ListToolsRequestSchema, and routing to plan() via switch statement on line 198.
{ name: "refactory_plan", description: "Generate a decomposition plan — module boundaries, function assignments, dependency order. Uses AST analysis + LLM reasoning.", inputSchema: { type: "object", properties: { file: { type: "string", description: "Path to the monolith file" }, modules: { type: "number", description: "Target number of modules (auto if omitted)" }, maxLines: { type: "number", description: "Max lines per module (default: 500)" }, style: { type: "string", description: "Grouping style: 'functional' | 'domain' | 'layer'" }, }, required: ["file"], }, }, - src/server.js:198-198 (registration)MCP tool dispatcher switch case routing 'refactory_plan' to the plan() handler function.
case "refactory_plan": result = await plan(args); break; - src/tools/plan.js:11-45 (helper)Helper function 'extractFunctionMap' - parses source code to extract function names, signatures, line ranges, and require() dependencies. Used by the plan handler to build a condensed function map for LLM or mechanical planning.
function extractFunctionMap(source) { const lines = source.split("\n"); const functions = []; const requires = []; for (let i = 0; i < lines.length; i++) { const line = lines[i]; const trimmed = line.trimStart(); // Named function declarations (any indent level — handles IIFEs) const fnMatch = trimmed.match(/^(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/); if (fnMatch) { functions.push({ name: fnMatch[1], params: fnMatch[2].trim(), line: i + 1 }); } // var/const/let name = function( or arrow if (!fnMatch) { const exprMatch = trimmed.match(/^(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(?:function\s*)?\(([^)]*)\)/); if (exprMatch) { functions.push({ name: exprMatch[1], params: exprMatch[2].trim(), line: i + 1 }); } } const reqMatch = line.match(/require\(["']([^"']+)["']\)/); if (reqMatch && !requires.includes(reqMatch[1])) { requires.push(reqMatch[1]); } } // Estimate function end lines (next function start or EOF) for (let i = 0; i < functions.length; i++) { const next = functions[i + 1]; functions[i].endLine = next ? next.line - 1 : lines.length; functions[i].estimatedLines = functions[i].endLine - functions[i].line + 1; } return { functions, requires, totalLines: lines.length }; }