We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/mfuechec/SkyFiMCP'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
{
"models": {
"main": {
"provider": "openai",
"modelId": "gpt-4o",
"maxTokens": 16000,
"temperature": 0.2
},
"research": {
"provider": "openai",
"modelId": "gpt-4o",
"maxTokens": 16000,
"temperature": 0.1
},
"fallback": {
"provider": "openai",
"modelId": "gpt-4o-mini",
"maxTokens": 16000,
"temperature": 0.2
}
},
"global": {
"logLevel": "info",
"debug": false,
"defaultNumTasks": 10,
"defaultSubtasks": 5,
"defaultPriority": "medium",
"projectName": "Taskmaster",
"ollamaBaseURL": "http://localhost:11434/api",
"bedrockBaseURL": "https://bedrock.us-east-1.amazonaws.com",
"responseLanguage": "English",
"enableCodebaseAnalysis": true,
"enableProxy": false,
"defaultTag": "master",
"azureOpenaiBaseURL": "https://your-endpoint.openai.azure.com/",
"userId": "1234567890"
},
"claudeCode": {},
"codexCli": {},
"grokCli": {
"timeout": 120000,
"workingDirectory": null,
"defaultModel": "grok-4-latest"
}
}