Skip to main content
Glama
example_config.json1.07 kB
{ "ollama_url": "http://localhost:11434", "openai_api_key": "sk-proj-...", "gemini_api_key": "AIza...", "claude_api_key": "sk-ant-...", "default_model": "openai:gpt-4o-mini", "fallback_chain": ["openai:gpt-4o-mini", "gemini:gemini-1.5-flash", "ollama:llama3"], "mcp_host": "127.0.0.1", "mcp_port": 9000, "allow_remote": false, "log_level": "info", "log_dir": "logs", "cache_enabled": true, "cache_type": "json", "cache_path": "context_cache.json", "timeout_seconds": 30, "max_retries": 3, "retry_delay": 1.0, "models": { "ollama": { "available": ["llama3", "mistral", "codellama", "phi"], "default": "llama3" }, "openai": { "available": ["gpt-4o", "gpt-4o-mini", "gpt-4-turbo"], "default": "gpt-4o-mini" }, "gemini": { "available": ["gemini-1.5-pro-latest", "gemini-1.5-flash-latest"], "default": "gemini-1.5-flash-latest" }, "claude": { "available": ["claude-3-5-sonnet-20241022", "claude-3-opus-20240229"], "default": "claude-3-5-sonnet-20241022" } } }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/jaskirat1616/fusion360-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server