Skip to main content
Glama

Web-LLM MCP Server

by ragingwind
index.ts12.3 kB
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; import { chromium, Browser, Page } from 'playwright'; import { z } from 'zod'; import { fileURLToPath } from 'url'; import { dirname, join } from 'path'; const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); let browser: Browser | null = null; let page: Page | null = null; let isInitialized = false; interface WebLLMInterface { isReady(): boolean; getCurrentModel(): string; getLastResponse(): string; generateMessage( prompt: string, options?: { systemPrompt?: string; maxTokens?: number; temperature?: number; } ): Promise<string>; setModel(model: string): Promise<boolean>; clearChat(): void; addMessage(role: 'user' | 'assistant', content: string): void; } declare global { interface Window { webllmInterface: WebLLMInterface; } } const server = new McpServer({ name: 'Web-LLM Playwright', version: '1.0.0', }); async function initializeBrowser(): Promise<void> { if (isInitialized) return; try { console.error('[info] Initializing Playwright browser...'); browser = await chromium.launch({ headless: true, args: [ '--disable-web-security', '--disable-features=VizDisplayCompositor', ], }); page = await browser.newPage(); page.on('console', (msg) => { console.error(`[browser] ${msg.type()}: ${msg.text()}`); }); // Load HTML file console.error('[info] Loading HTML file...'); const htmlPath = join(process.cwd(), 'index.html'); await page.goto(`file://${htmlPath}`); console.error('[info] Waiting for Web-LLM interface to be defined...'); try { await page.waitForFunction(() => window.webllmInterface !== undefined, { timeout: 60000, }); console.error('[info] Web-LLM interface found'); } catch (error) { console.error('[error] Timeout waiting for webllmInterface to be defined'); // Debug: Check what's available on window const windowKeys = await page.evaluate(() => Object.keys(window).filter(k => !k.startsWith('webkit'))); console.error('[debug] Available window keys:', windowKeys.slice(0, 20)); throw error; } console.error('[info] Waiting for Web-LLM to initialize...'); try { await page.waitForFunction(() => window.webllmInterface.isReady(), { timeout: 300000, // 5 minutes for model loading }); console.error('[info] Web-LLM initialization complete'); } catch (error) { console.error('[error] Timeout waiting for Web-LLM to initialize'); // Debug: Check interface status const interfaceStatus = await page.evaluate(() => { if (window.webllmInterface) { return { exists: true, isReady: window.webllmInterface.isReady(), currentModel: window.webllmInterface.getCurrentModel(), }; } return { exists: false }; }); console.error('[debug] Interface status:', interfaceStatus); throw error; } isInitialized = true; console.error('[info] Web-LLM Playwright interface ready'); } catch (error) { console.error(`[error] Failed to initialize: ${(error as Error).message}`); throw error; } } async function cleanup(): Promise<void> { console.error('[info] Cleaning up...'); try { if (browser) { await browser.close(); browser = null; page = null; isInitialized = false; } } catch (error) { console.error('[error] Cleanup failed:', error); } console.error('[info] Cleanup complete'); } function handleExit(signal: string) { console.error(`[info] Received ${signal}, shutting down gracefully...`); cleanup() .then(() => { console.error('[info] Server shutdown complete'); process.exit(0); }) .catch((error) => { console.error('[error] Error during cleanup:', error); process.exit(1); }); } process.on('SIGINT', () => handleExit('SIGINT')); process.on('SIGTERM', () => handleExit('SIGTERM')); process.on('uncaughtException', (error) => { console.error('[error] Uncaught exception:', error); handleExit('uncaughtException'); }); process.on('unhandledRejection', (reason) => { console.error('[error] Unhandled rejection:', reason); handleExit('unhandledRejection'); }); server.tool( 'playwright_llm_generate', 'Generate text using Web-LLM through Playwright browser interface', { prompt: z.string().describe('The prompt to generate text from'), systemPrompt: z .string() .optional() .describe('System prompt to set context'), maxTokens: z.number().optional().describe('Maximum tokens to generate'), temperature: z .number() .optional() .describe('Temperature for generation (0-1)'), model: z .string() .optional() .describe('Model to use (will reinitialize if different)'), }, async ({ prompt, systemPrompt, maxTokens, temperature, model }) => { try { if (!isInitialized) { await initializeBrowser(); } if (!page) { throw new Error('Browser page not initialized'); } if (model) { const currentModel = await page.evaluate(() => window.webllmInterface.getCurrentModel() ); if (currentModel !== model) { console.error(`[info] Switching to model: ${model}`); await page.evaluate( (newModel: string) => window.webllmInterface.setModel(newModel), model ); await page.waitForFunction(() => window.webllmInterface.isReady(), { timeout: 120000 * 1000, }); } } console.error( `[info] Generating text with prompt: "${prompt.substring(0, 50)}..."` ); const response = await page.evaluate( async (options: { prompt: string; systemPrompt: string | undefined; maxTokens: number | undefined; temperature: number | undefined; }) => { return await window.webllmInterface.generateMessage(options.prompt, { ...(options.systemPrompt !== undefined && { systemPrompt: options.systemPrompt, }), ...(options.maxTokens !== undefined && { maxTokens: options.maxTokens, }), ...(options.temperature !== undefined && { temperature: options.temperature, }), }); }, { prompt, systemPrompt, maxTokens, temperature } ); console.error(`[info] Generated ${response.length} characters`); return { content: [ { type: 'text' as const, text: response, }, ], }; } catch (error) { console.error(`[error] Generation failed: ${(error as Error).message}`); return { content: [ { type: 'text' as const, text: `Error generating text: ${(error as Error).message}`, }, ], }; } } ); server.tool( 'playwright_llm_chat', 'Start an interactive chat session and return the response', { message: z.string().describe('Message to send in the chat'), clearHistory: z .boolean() .optional() .describe('Clear chat history before sending'), }, async ({ message, clearHistory }) => { try { if (!isInitialized) { await initializeBrowser(); } if (!page) { throw new Error('Browser page not initialized'); } if (clearHistory) { await page.evaluate(() => window.webllmInterface.clearChat()); } await page.evaluate( (msg: string) => window.webllmInterface.addMessage('user', msg), message ); const response = await page.evaluate(async (msg: string) => { return await window.webllmInterface.generateMessage(msg); }, message); await page.evaluate( (resp: string) => window.webllmInterface.addMessage('assistant', resp), response ); return { content: [ { type: 'text' as const, text: response, }, ], }; } catch (error) { console.error(`[error] Chat failed: ${(error as Error).message}`); return { content: [ { type: 'text' as const, text: `Error in chat: ${(error as Error).message}`, }, ], }; } } ); server.tool( 'playwright_llm_status', 'Get the current status of the Web-LLM Playwright interface', {}, async () => { try { if (!isInitialized) { return { content: [ { type: 'text' as const, text: 'Not initialized', }, ], }; } if (!page) { throw new Error('Browser page not initialized'); } const isReady = await page.evaluate(() => window.webllmInterface.isReady() ); const currentModel = await page.evaluate(() => window.webllmInterface.getCurrentModel() ); const lastResponse = await page.evaluate(() => window.webllmInterface.getLastResponse() ); const status = { initialized: isInitialized, ready: isReady, currentModel, lastResponse: lastResponse.substring(0, 100) + (lastResponse.length > 100 ? '...' : ''), }; return { content: [ { type: 'text' as const, text: JSON.stringify(status, null, 2), }, ], }; } catch (error) { return { content: [ { type: 'text' as const, text: `Error getting status: ${(error as Error).message}`, }, ], }; } } ); server.tool( 'playwright_llm_set_model', 'Change the current Web-LLM model', { model: z.string().describe('Model ID to switch to'), }, async ({ model }) => { try { if (!isInitialized) { await initializeBrowser(); } if (!page) { throw new Error('Browser page not initialized'); } console.error(`[info] Switching to model: ${model}`); await page.evaluate( (newModel: string) => window.webllmInterface.setModel(newModel), model ); await page.waitForFunction(() => window.webllmInterface.isReady(), { timeout: 120000 * 1000, }); return { content: [ { type: 'text' as const, text: `Successfully switched to model: ${model}`, }, ], }; } catch (error) { console.error(`[error] Model switch failed: ${(error as Error).message}`); return { content: [ { type: 'text' as const, text: `Error switching model: ${(error as Error).message}`, }, ], }; } } ); server.tool( 'playwright_llm_screenshot', 'Take a screenshot of the Web-LLM interface', { path: z.string().optional().describe('Path to save screenshot (optional)'), }, async ({ path }) => { try { if (!isInitialized) { await initializeBrowser(); } if (!page) { throw new Error('Browser page not initialized'); } const screenshotPath = path || join(__dirname, 'screenshot.png'); await page.screenshot({ path: screenshotPath, fullPage: true }); return { content: [ { type: 'text' as const, text: `Screenshot saved to: ${screenshotPath}`, }, ], }; } catch (error) { return { content: [ { type: 'text' as const, text: `Error taking screenshot: ${(error as Error).message}`, }, ], }; } } ); async function main(): Promise<void> { const transport = new StdioServerTransport(); await server.connect(transport); console.error('[info] Web-LLM Playwright MCP server started'); } main().catch((error) => { console.error(error); process.exit(1); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ragingwind/web-llm-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server