Skip to main content
Glama

OpenFeature MCP Server

build-prompts.js3.31 kB
#!/usr/bin/env node /** * Script to bundle prompts into a TypeScript file for Cloudflare Workers * This allows us to include prompt content at build time since Workers don't have file system access * * Usage: * node scripts/build-prompts.js */ import fs from 'fs/promises'; import path from 'path'; import { fileURLToPath } from 'url'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); async function getAvailableTechnologies() { try { const files = await fs.readdir(PROMPTS_DIR); return files .filter(file => file.endsWith('.md') && file !== 'README.md') .map(file => file.replace('.md', '')) .sort(); } catch (error) { console.warn('⚠️ Could not read prompts directory, using empty list'); return []; } } const PROMPTS_DIR = path.join(__dirname, '..', 'prompts'); const OUTPUT_FILE = path.join(__dirname, '..', 'src', 'tools', 'promptsBundle.generated.ts'); async function readPromptFile(technology) { const fileName = `${technology}.md`; const filePath = path.join(PROMPTS_DIR, fileName); try { const content = await fs.readFile(filePath, 'utf-8'); return content; } catch (error) { console.warn(`⚠️ ${technology}: Local prompt file not found`); return null; } } async function buildPromptsBundle() { console.log('🔨 Building prompts bundle...'); const availableTechnologies = await getAvailableTechnologies(); const prompts = {}; let loadedCount = 0; for (const technology of availableTechnologies) { const content = await readPromptFile(technology); if (content !== null) { prompts[technology] = content; loadedCount++; console.log(`✅ ${technology}: Bundled`); } } // Generate TypeScript file const technologyKeys = Object.keys(prompts); const tsContent = `// AUTO-GENERATED FILE - Do not edit manually // Generated by scripts/build-prompts.js import { z } from 'zod'; export const INSTALL_TECHNOLOGIES = [ ${technologyKeys.map(key => ` '${key}',`).join('\n')} ] as const; export const InstallTechnologySchema = z.enum(INSTALL_TECHNOLOGIES); export type InstallTechnology = z.infer<typeof InstallTechnologySchema>; export const BUNDLED_PROMPTS: Record<InstallTechnology, string> = { ${Object.entries(prompts) .map(([technology, content]) => ` "${technology}": ${JSON.stringify(content)},`) .join('\n')} }; `; await fs.writeFile(OUTPUT_FILE, tsContent, 'utf-8'); console.log('\n📊 Bundle Summary:'); console.log(` ✅ Bundled: ${loadedCount}`); console.log(` 📁 Available technologies: ${availableTechnologies.length}`); console.log(` 📄 Output: ${path.relative(process.cwd(), OUTPUT_FILE)}`); if (loadedCount > 0) { console.log(`\n🎉 Prompts bundle created successfully!`); } else { console.log(`\n⚠️ No prompts were bundled. Make sure prompts exist in the prompts/ directory.`); } } async function main() { try { await buildPromptsBundle(); } catch (error) { console.error('❌ Error building prompts bundle:', error); process.exit(1); } } // Check if this is the main module (ES module equivalent of require.main === module) if (import.meta.url === `file://${process.argv[1]}`) { main(); } export { buildPromptsBundle };

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/DevCycleHQ-Sandbox/openfeature-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server