Skip to main content
Glama

Gemini MCP Server

dial_models.jsonโ€ข6.88 kB
{ "_README": { "description": "Model metadata for the DIAL (Data & AI Layer) aggregation provider.", "documentation": "https://github.com/BeehiveInnovations/zen-mcp-server/blob/main/docs/configuration.md", "usage": "Models listed here are exposed through the DIAL provider. Aliases are case-insensitive.", "field_notes": "Matches providers/shared/model_capabilities.py.", "field_descriptions": { "model_name": "The model identifier as exposed by DIAL (typically deployment name)", "aliases": "Array of shorthand names users can type instead of the full model name", "context_window": "Total number of tokens the model can process (input + output combined)", "max_output_tokens": "Maximum number of tokens the model can generate in a single response", "supports_extended_thinking": "Whether the model supports extended reasoning tokens", "supports_json_mode": "Whether the model can guarantee valid JSON output", "supports_function_calling": "Whether the model supports function/tool calling", "supports_images": "Whether the model can process images/visual input", "max_image_size_mb": "Maximum total size in MB for all images combined", "supports_temperature": "Whether the model accepts the temperature parameter", "temperature_constraint": "Temperature constraint hint: 'fixed', 'range', or 'discrete'", "description": "Human-readable description of the model", "intelligence_score": "1-20 human rating used as the primary signal for auto-mode ordering" } }, "models": [ { "model_name": "o3-2025-04-16", "friendly_name": "DIAL (O3)", "aliases": ["o3"], "intelligence_score": 14, "description": "OpenAI O3 via DIAL - Strong reasoning model", "context_window": 200000, "max_output_tokens": 100000, "supports_extended_thinking": false, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": false, "temperature_constraint": "fixed" }, { "model_name": "o4-mini-2025-04-16", "friendly_name": "DIAL (O4-mini)", "aliases": ["o4-mini"], "intelligence_score": 11, "description": "OpenAI O4-mini via DIAL - Fast reasoning model", "context_window": 200000, "max_output_tokens": 100000, "supports_extended_thinking": false, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": false, "temperature_constraint": "fixed" }, { "model_name": "anthropic.claude-sonnet-4.1-20250805-v1:0", "friendly_name": "DIAL (Sonnet 4.1)", "aliases": ["sonnet-4.1", "sonnet-4"], "intelligence_score": 10, "description": "Claude Sonnet 4.1 via DIAL - Balanced performance", "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 5.0, "supports_temperature": true, "temperature_constraint": "range" }, { "model_name": "anthropic.claude-sonnet-4.1-20250805-v1:0-with-thinking", "friendly_name": "DIAL (Sonnet 4.1 Thinking)", "aliases": ["sonnet-4.1-thinking", "sonnet-4-thinking"], "intelligence_score": 11, "description": "Claude Sonnet 4.1 with thinking mode via DIAL", "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": true, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 5.0, "supports_temperature": true, "temperature_constraint": "range" }, { "model_name": "anthropic.claude-opus-4.1-20250805-v1:0", "friendly_name": "DIAL (Opus 4.1)", "aliases": ["opus-4.1", "opus-4"], "intelligence_score": 14, "description": "Claude Opus 4.1 via DIAL - Most capable Claude model", "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": false, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 5.0, "supports_temperature": true, "temperature_constraint": "range" }, { "model_name": "anthropic.claude-opus-4.1-20250805-v1:0-with-thinking", "friendly_name": "DIAL (Opus 4.1 Thinking)", "aliases": ["opus-4.1-thinking", "opus-4-thinking"], "intelligence_score": 15, "description": "Claude Opus 4.1 with thinking mode via DIAL", "context_window": 200000, "max_output_tokens": 64000, "supports_extended_thinking": true, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 5.0, "supports_temperature": true, "temperature_constraint": "range" }, { "model_name": "gemini-2.5-pro-preview-03-25-google-search", "friendly_name": "DIAL (Gemini 2.5 Pro Search)", "aliases": ["gemini-2.5-pro-search"], "intelligence_score": 17, "description": "Gemini 2.5 Pro with Google Search via DIAL", "context_window": 1000000, "max_output_tokens": 65536, "supports_extended_thinking": false, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range" }, { "model_name": "gemini-2.5-pro-preview-05-06", "friendly_name": "DIAL (Gemini 2.5 Pro)", "aliases": ["gemini-2.5-pro"], "intelligence_score": 18, "description": "Gemini 2.5 Pro via DIAL - Deep reasoning", "context_window": 1000000, "max_output_tokens": 65536, "supports_extended_thinking": false, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range" }, { "model_name": "gemini-2.5-flash-preview-05-20", "friendly_name": "DIAL (Gemini Flash 2.5)", "aliases": ["gemini-2.5-flash"], "intelligence_score": 10, "description": "Gemini 2.5 Flash via DIAL - Ultra-fast", "context_window": 1000000, "max_output_tokens": 65536, "supports_extended_thinking": false, "supports_function_calling": false, "supports_json_mode": true, "supports_images": true, "max_image_size_mb": 20.0, "supports_temperature": true, "temperature_constraint": "range" } ] }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/BeehiveInnovations/gemini-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server