Skip to main content
Glama

Gemini MCP Server

openrouter.pyโ€ข1.58 kB
"""OpenRouter model registry for managing model configurations and aliases.""" from __future__ import annotations from ..shared import ModelCapabilities, ProviderType from .base import CAPABILITY_FIELD_NAMES, CapabilityModelRegistry class OpenRouterModelRegistry(CapabilityModelRegistry): """Capability registry backed by ``conf/openrouter_models.json``.""" def __init__(self, config_path: str | None = None) -> None: super().__init__( env_var_name="OPENROUTER_MODELS_CONFIG_PATH", default_filename="openrouter_models.json", provider=ProviderType.OPENROUTER, friendly_prefix="OpenRouter ({model})", config_path=config_path, ) def _finalise_entry(self, entry: dict) -> tuple[ModelCapabilities, dict]: provider_override = entry.get("provider") if isinstance(provider_override, str): entry_provider = ProviderType(provider_override.lower()) elif isinstance(provider_override, ProviderType): entry_provider = provider_override else: entry_provider = ProviderType.OPENROUTER if entry_provider == ProviderType.CUSTOM: entry.setdefault("friendly_name", f"Custom ({entry['model_name']})") else: entry.setdefault("friendly_name", f"OpenRouter ({entry['model_name']})") filtered = {k: v for k, v in entry.items() if k in CAPABILITY_FIELD_NAMES} filtered.setdefault("provider", entry_provider) capability = ModelCapabilities(**filtered) return capability, {}

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/BeehiveInnovations/gemini-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server