We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/ricardobarreto-vitai/qi140-mcp-multi'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
import dotenv from 'dotenv';
dotenv.config();
export const config = {
apiPort: process.env.API_PORT || 8080,
provider: process.env.PROVIDER || 'GENERIC',
llmBaseUrl: process.env.LLM_BASE_URL,
llmPort: process.env.LLM_PORT,
llmModel: process.env.LLM_MODEL,
openAiKey: process.env.OPENAI_API_KEY,
openAiBaseUrl: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
promptFile: process.env.PROMPT_FILE || '/app/prompts/prompt.txt',
dbUrl: process.env.DATABASE_URL!,
};