Skip to main content
Glama

MCP Hub

by sentilabs01
chat.test.js1.06 kB
const request = require('supertest'); const app = require('./index.js').default; describe('/api/chat provider routing', () => { it('should use the selected provider (gemini)', async () => { const res = await request(app) .post('/api/chat') .field('message', 'test gemini') .field('provider', 'gemini'); expect(res.statusCode).toBe(200); // Optionally, check for a Gemini-specific response or log output }); it('should use the selected provider (anthropic)', async () => { const res = await request(app) .post('/api/chat') .field('message', 'test anthropic') .field('provider', 'anthropic'); expect(res.statusCode).toBe(200); // Optionally, check for an Anthropic-specific response or log output }); it('should default to openai if no provider is given', async () => { const res = await request(app) .post('/api/chat') .field('message', 'test openai'); expect(res.statusCode).toBe(200); // Optionally, check for an OpenAI-specific response or log output }); });

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/sentilabs01/mcpserver'

If you have feedback or need assistance with the MCP directory API, please join our Discord server