Skip to main content
Glama

Reviewer MCP

by jaggederest
config.test.ts1.8 kB
import { describe, it, expect, beforeEach, vi } from 'vitest'; import { readFile } from 'fs/promises'; import { loadProjectConfig, resetConfigCache } from './config.js'; vi.mock('fs/promises'); describe('config utilities', () => { beforeEach(() => { vi.clearAllMocks(); resetConfigCache(); delete process.env.OPENAI_API_KEY; delete process.env.OPENAI_MODEL; }); describe('loadProjectConfig', () => { it('should return default config when no .reviewer.json exists', async () => { vi.mocked(readFile).mockRejectedValueOnce(new Error('File not found')); const config = await loadProjectConfig(); expect(config).toEqual({ testCommand: 'npm test', lintCommand: 'npm run lint', openaiModel: 'o1-preview', aiProvider: 'openai', ollamaBaseUrl: 'http://localhost:11434', ollamaModel: 'llama2', }); }); it('should merge user config with defaults', async () => { const userConfig = { testCommand: 'yarn test', openaiModel: 'gpt-4', }; vi.mocked(readFile).mockResolvedValueOnce(JSON.stringify(userConfig)); const config = await loadProjectConfig(); expect(config).toEqual({ testCommand: 'yarn test', lintCommand: 'npm run lint', openaiModel: 'gpt-4', aiProvider: 'openai', ollamaBaseUrl: 'http://localhost:11434', ollamaModel: 'llama2', }); }); it('should use OPENAI_MODEL env var when set', async () => { process.env.OPENAI_MODEL = 'o3-mini'; vi.mocked(readFile).mockRejectedValueOnce(new Error('File not found')); const config = await loadProjectConfig(); expect(config.openaiModel).toBe('o3-mini'); }); }); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/jaggederest/mcp_reviewer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server