Skip to main content
Glama

Web-LLM MCP Server

by ragingwind
index.html8.62 kB
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>Web-LLM with Playwright Interface</title> <script type="module"> import * as webllm from "https://esm.run/@mlc-ai/web-llm"; let engine = null; let isReady = false; let currentModel = 'Llama-3.2-1B-Instruct-q4f32_1-MLC'; // Get DOM elements const statusDiv = document.getElementById('status'); const progressDiv = document.getElementById('progress'); const chatContainer = document.getElementById('chatContainer'); const messageInput = document.getElementById('messageInput'); const sendButton = document.getElementById('sendButton'); // Expose functions to window for Playwright access window.webllmInterface = { isReady: () => isReady, getCurrentModel: () => currentModel, setModel: async (model) => { currentModel = model; return await initializeEngine(); }, generateMessage: async (prompt, options = {}) => { if (!isReady || !engine) { throw new Error('Engine not ready'); } const messages = [{ role: 'user', content: prompt }]; if (options.systemPrompt) { messages.unshift({ role: 'system', content: options.systemPrompt }); } const completion = await engine.chat.completions.create({ messages, max_tokens: options.maxTokens || 1000, temperature: options.temperature || 0.7, }); return completion.choices[0]?.message?.content || ''; }, addMessage: (role, content) => { if (chatContainer) { const messageDiv = document.createElement('div'); messageDiv.className = `message ${role}`; messageDiv.textContent = content; chatContainer.appendChild(messageDiv); chatContainer.scrollTop = chatContainer.scrollHeight; } }, clearChat: () => { if (chatContainer) { chatContainer.innerHTML = '<div class="message assistant">Chat cleared. Send me a message!</div>'; } }, getLastResponse: () => { if (!chatContainer) return ''; const messages = chatContainer.querySelectorAll('.message.assistant'); return messages.length > 0 ? messages[messages.length - 1].textContent || '' : ''; }, }; async function initializeEngine() { try { if (statusDiv) { statusDiv.className = 'status loading'; statusDiv.textContent = `Loading model: ${currentModel}...`; } if (progressDiv) { progressDiv.style.display = 'block'; } if (messageInput) messageInput.disabled = true; if (sendButton) sendButton.disabled = true; isReady = false; engine = new webllm.MLCEngine(); await engine.reload(currentModel, { initProgressCallback: (progress) => { console.log(`Loading: ${progress.text} (${Math.round(progress.progress * 100)}%)`); if (progressDiv) { progressDiv.textContent = `Loading: ${progress.text} (${Math.round(progress.progress * 100)}%)`; } }, }); if (statusDiv) { statusDiv.className = 'status ready'; statusDiv.textContent = `Ready! Model: ${currentModel}`; } if (progressDiv) { progressDiv.style.display = 'none'; } if (messageInput) messageInput.disabled = false; if (sendButton) sendButton.disabled = false; isReady = true; return true; } catch (error) { console.error('Engine initialization error:', error); if (statusDiv) { statusDiv.className = 'status error'; statusDiv.textContent = `Error: ${error.message}`; } if (progressDiv) { progressDiv.style.display = 'none'; } isReady = false; throw error; } } // Initialize on load initializeEngine().catch((error) => { console.error('Failed to initialize:', error); }); </script> <style> body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; margin: 0; padding: 20px; background-color: #f5f5f5; } .container { max-width: 800px; margin: 0 auto; background: white; border-radius: 8px; padding: 20px; box-shadow: 0 2px 10px rgba(0,0,0,0.1); } .status { padding: 10px; border-radius: 4px; margin-bottom: 20px; font-weight: 500; } .status.loading { background-color: #fff3cd; border: 1px solid #ffeaa7; color: #856404; } .status.ready { background-color: #d4edda; border: 1px solid #c3e6cb; color: #155724; } .status.error { background-color: #f8d7da; border: 1px solid #f5c6cb; color: #721c24; } .chat-container { border: 1px solid #ddd; border-radius: 8px; height: 400px; overflow-y: auto; padding: 15px; margin-bottom: 20px; background-color: #fafafa; } .message { margin-bottom: 15px; padding: 10px 15px; border-radius: 8px; max-width: 70%; word-wrap: break-word; } .message.user { background-color: #007bff; color: white; margin-left: auto; text-align: right; } .message.assistant { background-color: #e9ecef; color: #333; } .input-container { display: flex; gap: 10px; align-items: center; } #messageInput { flex: 1; padding: 12px; border: 1px solid #ddd; border-radius: 6px; font-size: 14px; } #sendButton { padding: 12px 20px; background-color: #007bff; color: white; border: none; border-radius: 6px; cursor: pointer; font-size: 14px; transition: background-color 0.2s; } #sendButton:hover:not(:disabled) { background-color: #0056b3; } #sendButton:disabled { background-color: #6c757d; cursor: not-allowed; } #progress { margin-top: 10px; font-size: 14px; color: #666; display: none; } .model-info { font-size: 12px; color: #666; margin-bottom: 15px; text-align: center; } </style> </head> <body> <div class="container"> <h1>Web-LLM with Playwright Interface</h1> <div class="model-info"> This interface provides local LLM inference using Web-LLM technology </div> <div id="status" class="status loading">Initializing...</div> <div id="progress"></div> <div id="chatContainer" class="chat-container"> <div class="message assistant">Welcome! I'm loading the AI model. This may take a few moments...</div> </div> <div class="input-container"> <input type="text" id="messageInput" placeholder="Type your message here..." disabled /> <button id="sendButton" disabled>Send</button> </div> </div> </body> </html>

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ragingwind/web-llm-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server