Skip to main content
Glama

Gemini MCP Server

constants.pyโ€ข1.52 kB
"""Internal defaults and constants for clink.""" from __future__ import annotations from dataclasses import dataclass, field from pathlib import Path DEFAULT_TIMEOUT_SECONDS = 1800 DEFAULT_STREAM_LIMIT = 10 * 1024 * 1024 # 10MB per stream PROJECT_ROOT = Path(__file__).resolve().parent.parent BUILTIN_PROMPTS_DIR = PROJECT_ROOT / "systemprompts" / "clink" CONFIG_DIR = PROJECT_ROOT / "conf" / "cli_clients" USER_CONFIG_DIR = Path.home() / ".zen" / "cli_clients" @dataclass(frozen=True) class CLIInternalDefaults: """Internal defaults applied to a CLI client during registry load.""" parser: str additional_args: list[str] = field(default_factory=list) env: dict[str, str] = field(default_factory=dict) default_role_prompt: str | None = None timeout_seconds: int = DEFAULT_TIMEOUT_SECONDS runner: str | None = None INTERNAL_DEFAULTS: dict[str, CLIInternalDefaults] = { "gemini": CLIInternalDefaults( parser="gemini_json", additional_args=["-o", "json"], default_role_prompt="systemprompts/clink/default.txt", runner="gemini", ), "codex": CLIInternalDefaults( parser="codex_jsonl", additional_args=["exec"], default_role_prompt="systemprompts/clink/default.txt", runner="codex", ), "claude": CLIInternalDefaults( parser="claude_json", additional_args=["--print", "--output-format", "json"], default_role_prompt="systemprompts/clink/default.txt", runner="claude", ), }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/BeehiveInnovations/gemini-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server