import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
import { z } from 'zod';
export function registerParserGrammarScaffoldTool(server: McpServer): void {
server.registerTool(
'gerbil_parser_grammar_scaffold',
{
title: 'Parser Grammar Scaffold',
description:
'Generate a parser skeleton using Gerbil\'s :std/parser subsystem. ' +
'Given a grammar description (rules with names and alternatives), generates ' +
'lexer rules (deflexer), grammar productions (defparser), and an evaluator.',
annotations: {
readOnlyHint: true,
idempotentHint: true,
},
inputSchema: {
grammar_name: z
.string()
.describe('Name for the parser/grammar (e.g. "expr", "config")'),
tokens: z
.array(
z.object({
name: z.string().describe('Token name (e.g. NUMBER, IDENT, PLUS)'),
pattern: z.string().describe('Regex pattern for the token'),
}),
)
.describe('Lexer token definitions'),
rules: z
.array(
z.object({
name: z.string().describe('Rule name (e.g. expr, term, factor)'),
alternatives: z.array(z.string()).describe('Production alternatives'),
}),
)
.optional()
.describe('Grammar production rules'),
},
},
async ({ grammar_name, tokens, rules }) => {
const sections: string[] = [];
sections.push(`;;; ${grammar_name} parser — generated by gerbil_parser_grammar_scaffold`);
sections.push(';;;');
sections.push(';;; NOTE: The :std/parser subsystem is largely undocumented.');
sections.push(';;; Verify imports with gerbil_module_exports before using.');
sections.push(';;; This scaffold provides a starting template — adjust to actual API.');
sections.push('');
sections.push('(import :std/parser/deflexer');
sections.push(' :std/parser/defparser');
sections.push(' :std/parser/lexer');
sections.push(' :std/parser/stream');
sections.push(' :std/sugar)');
sections.push('');
sections.push(`(export ${grammar_name}-parse)`);
sections.push('');
// Lexer
sections.push(`;;; Lexer rules for ${grammar_name}`);
sections.push(`(deflexer ${grammar_name}-lexer`);
sections.push(' ;; Whitespace (skip)');
sections.push(' (#\\space (skip))');
sections.push(' (#\\tab (skip))');
sections.push(' (#\\newline (skip))');
for (const token of tokens) {
sections.push(` ;; ${token.name}`);
sections.push(` ("${token.pattern}" '${token.name})`);
}
sections.push(')');
sections.push('');
// Grammar rules
if (rules && rules.length > 0) {
sections.push(`;;; Grammar for ${grammar_name}`);
sections.push(`(defparser ${grammar_name}-grammar`);
for (const rule of rules) {
sections.push(` ;; ${rule.name}`);
for (let i = 0; i < rule.alternatives.length; i++) {
const alt = rule.alternatives[i];
if (i === 0) {
sections.push(` (${rule.name} ${alt})`);
} else {
sections.push(` (${rule.name} ${alt})`);
}
}
}
sections.push(')');
sections.push('');
}
// Parser entry point
sections.push(`;;; Parse a string using the ${grammar_name} grammar`);
sections.push(`(def (${grammar_name}-parse input-string)`);
sections.push(' ;; Tokenize');
sections.push(` (let* ((tokens (lex ${grammar_name}-lexer input-string))`);
if (rules && rules.length > 0) {
sections.push(` (result (parse ${grammar_name}-grammar tokens)))`);
} else {
sections.push(' (result tokens))');
}
sections.push(' result))');
sections.push('');
// Example usage
sections.push(';;; Example usage:');
sections.push(`;;; (${grammar_name}-parse "your input string here")`);
const code = sections.join('\n');
const output = [
`## Parser Scaffold: ${grammar_name}`,
'',
`Tokens: ${tokens.length}`,
`Rules: ${rules?.length || 0}`,
'',
'```scheme',
code,
'```',
'',
'### Important Notes',
'- The :std/parser subsystem has 11 modules but very limited documentation',
'- Verify all imports with `gerbil_module_exports` before using',
'- Test incrementally with `gerbil_eval` to discover the actual API',
'- deflexer/defparser syntax may differ from what is shown here',
'',
'### Suggested Next Steps',
'1. `gerbil_module_exports :std/parser/deflexer` — check actual lexer API',
'2. `gerbil_module_exports :std/parser/defparser` — check parser API',
'3. `gerbil_check_syntax` — validate the generated code',
'4. `gerbil_compile_check` — test compilation',
];
return {
content: [{ type: 'text' as const, text: output.join('\n') }],
};
},
);
}