Skip to main content
Glama

EX MCP Server

by Zazzles2908
auggie-config.json1.98 kB
{ "auggie": { "default_model": "auto", "templates": { "auto_use": true, "directory": "templates/auggie/" }, "selector": { "explanations": true, "auto_optimize": true }, "wrappers": { "show_progress": true, "compact_output": true, "error_detail": "detailed" }, "models": { "provider_order": ["KIMI", "GLM", "CUSTOM"], "capabilities": { "glm-4.5-air": {"reasoning": "medium", "speed": "high"}, "glm-4.5": {"reasoning": "medium", "speed": "medium"}, "glm-4.5-flash": {"reasoning": "low", "speed": "very_high"}, "kimi-k2-0711-preview": {"reasoning": "high", "speed": "medium"}, "kimi-k2-0905-preview": {"reasoning": "very_high", "speed": "medium"}, "kimi-k2-turbo-preview": {"reasoning": "medium", "speed": "high"}, "kimi-k2-thinking": {"reasoning": "high", "speed": "medium"} } }, "fallback": { "chat": ["glm-4.5-air", "glm-4.5-flash", "kimi-k2-turbo-preview"], "reasoning": ["kimi-k2-thinking", "kimi-k2-0711-preview", "glm-4.5"], "coding": ["glm-4.5", "glm-4.5-air"] } }, "mcpServers": { "exai": { "type": "stdio", "trust": true, "command": "C:/Project/EX-AI-MCP-Server/.venv/Scripts/python.exe", "args": ["-u", "C:/Project/EX-AI-MCP-Server/scripts/mcp_server_wrapper.py"], "cwd": "C:/Project/EX-AI-MCP-Server", "env": { "AUGGIE_CLI": "true", "ALLOW_AUGGIE": "true", "PYTHONUNBUFFERED": "1", "PYTHONPATH": "C:/Project/EX-AI-MCP-Server", "ENV_FILE": "C:/Project/EX-AI-MCP-Server/.env", "AUGGIE_CONFIG": "C:/Project/EX-AI-MCP-Server/auggie-config.json", "MCP_SERVER_NAME": "exai", "MCP_SERVER_ID": "exai-server", "LOG_LEVEL": "INFO", "LOG_FORMAT": "plain", "ACTIVITY_LOG": "true", "STREAM_PROGRESS": "true", "STDERR_BREADCRUMBS": "true" } } } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Zazzles2908/EX_AI-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server