Skip to main content
Glama

DollhouseMCP

by DollhouseMCP
build-readme.test.jsโ€ข5.95 kB
/** * Unit tests for the modular README build script * These tests validate helper functions and logic without executing the full script * For integration tests that run the actual script, see build-readme.integration.test.js * * @fileoverview Unit tests for scripts/build-readme.js */ import { describe, test, expect, beforeEach, afterEach } from '@jest/globals'; import fs from 'fs/promises'; import path from 'path'; import { fileURLToPath } from 'url'; import { exec } from 'child_process'; import { promisify } from 'util'; const execAsync = promisify(exec); const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); // Paths const PROJECT_ROOT = path.resolve(__dirname, '../../..'); const BUILD_SCRIPT = path.join(PROJECT_ROOT, 'scripts', 'build-readme.js'); const TEST_README_DIR = path.join(PROJECT_ROOT, 'test-temp', 'readme-test'); const TEST_CONFIG_PATH = path.join(TEST_README_DIR, 'config.json'); const TEST_CHUNKS_DIR = path.join(TEST_README_DIR, 'chunks'); describe('README Builder Unit Tests', () => { beforeEach(async () => { // Create test directory structure await fs.mkdir(TEST_README_DIR, { recursive: true }); await fs.mkdir(TEST_CHUNKS_DIR, { recursive: true }); }); afterEach(async () => { // Clean up test files try { await fs.rm(TEST_README_DIR, { recursive: true, force: true }); } catch (error) { // Ignore cleanup errors } }); test('should successfully build NPM README with valid chunks', async () => { // Create test chunks await fs.writeFile( path.join(TEST_CHUNKS_DIR, 'header.md'), '# Test Project\n\nThis is a test project.' ); await fs.writeFile( path.join(TEST_CHUNKS_DIR, 'installation.md'), '## Installation\n\n```bash\nnpm install test\n```' ); // Create test config const testConfig = { versions: { npm: { description: 'Test NPM README', chunks: ['header', 'installation'], output: './test-output.md' } }, chunkDirectory: 'chunks', separator: '\n\n' }; await fs.writeFile(TEST_CONFIG_PATH, JSON.stringify(testConfig, null, 2)); // Run build script (would need to modify script to accept custom config path) // For now, just verify the test setup works const outputPath = path.join(TEST_README_DIR, 'test-output.md'); // Simulate what the build script would do const chunks = await Promise.all( testConfig.versions.npm.chunks.map(async (chunk) => { const content = await fs.readFile( path.join(TEST_CHUNKS_DIR, `${chunk}.md`), 'utf-8' ); return content.trim(); }) ); const output = chunks.join(testConfig.separator); await fs.writeFile(outputPath, output); // Verify output const result = await fs.readFile(outputPath, 'utf-8'); expect(result).toContain('# Test Project'); expect(result).toContain('## Installation'); expect(result).toContain('npm install test'); }); test('should handle missing chunks gracefully', async () => { // Create config with missing chunk const testConfig = { versions: { test: { description: 'Test with missing chunk', chunks: ['header', 'missing-chunk'], output: './test-output.md' } }, chunkDirectory: 'chunks', separator: '\n\n' }; await fs.writeFile(TEST_CONFIG_PATH, JSON.stringify(testConfig, null, 2)); // Create only one chunk await fs.writeFile( path.join(TEST_CHUNKS_DIR, 'header.md'), '# Test Project' ); // Simulate build with missing chunk const chunks = []; for (const chunkName of testConfig.versions.test.chunks) { try { const content = await fs.readFile( path.join(TEST_CHUNKS_DIR, `${chunkName}.md`), 'utf-8' ); chunks.push(content.trim()); } catch (error) { // Missing chunk - should continue if (error.code !== 'ENOENT') { throw error; } } } expect(chunks.length).toBe(1); expect(chunks[0]).toBe('# Test Project'); }); test('should validate config structure', async () => { // Test invalid config const invalidConfigs = [ {}, // Missing versions { versions: 'not-an-object' }, // Invalid versions type { versions: {}, chunkDirectory: 123 }, // Invalid chunkDirectory type ]; for (const invalidConfig of invalidConfigs) { // Validation that would be in the build script let isValid = true; if (!invalidConfig.versions || typeof invalidConfig.versions !== 'object') { isValid = false; } if (invalidConfig.chunkDirectory && typeof invalidConfig.chunkDirectory !== 'string') { isValid = false; } expect(isValid).toBe(false); } }); test('should detect unclosed markdown code blocks', async () => { const contentWithUnclosedBlock = '# Title\n\n```javascript\nconst test = true;\n'; // Check for unclosed code blocks const codeBlockCount = (contentWithUnclosedBlock.match(/```/g) || []).length; const hasUnclosedBlock = codeBlockCount % 2 !== 0; expect(hasUnclosedBlock).toBe(true); }); test('should handle empty chunks', async () => { // Create empty chunk await fs.writeFile(path.join(TEST_CHUNKS_DIR, 'empty.md'), ''); // Read and validate const content = await fs.readFile( path.join(TEST_CHUNKS_DIR, 'empty.md'), 'utf-8' ); expect(content.length).toBe(0); }); test('should resolve relative output paths correctly', () => { const baseDir = '/test/docs/readme'; const relativePath = '../../output.md'; const resolved = path.resolve(baseDir, relativePath); expect(resolved).toBe('/test/output.md'); }); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/DollhouseMCP/DollhouseMCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server