Skip to main content
Glama

MCP Waifu Queue

by waifuai
openrouter.py3.51 kB
""" OpenRouter AI Provider Implementation. This module provides a client for the OpenRouter API, which serves as the default AI provider for the MCP Waifu Queue system. It handles API key resolution, model selection, and text generation requests to the OpenRouter service. Key Features: - OpenRouter API integration for text generation - Flexible API key resolution (environment variables or files) - Model selection via files or defaults - Error handling for API failures and missing keys - Configurable request timeouts - JSON payload construction and response parsing API Configuration: - API URL: https://openrouter.ai/api/v1/chat/completions - Default Model: deepseek/deepseek-chat-v3-0324:free - Temperature: 0.2 (for consistent responses) - Timeout: 60 seconds (configurable) Authentication: The module supports multiple authentication methods in order of precedence: 1. OPENROUTER_API_KEY environment variable 2. ~/.api-openrouter file containing the API key Model Selection: Models can be configured via: 1. ~/.model-openrouter file containing the model name 2. Default model fallback: deepseek/deepseek-chat-v3-0324:free Usage: This module is typically used through the respond.py module, which provides the main interface for text generation with provider fallback logic. Dependencies: - requests: For HTTP API calls - os, pathlib: For file system and environment operations - typing: For type hints """ # mcp_waifu_queue/providers/openrouter.py # Minimal OpenRouter client kept separate for potential direct imports if needed. from __future__ import annotations import os from pathlib import Path from typing import Optional import requests OPENROUTER_API_URL = "https://openrouter.ai/api/v1/chat/completions" DEFAULT_OPENROUTER_MODEL = "deepseek/deepseek-chat-v3-0324:free" OPENROUTER_API_KEY_FILE_PATH = Path.home() / ".api-openrouter" MODEL_FILE_PATH = Path.home() / ".model-openrouter" def _read_single_line(path: Path) -> Optional[str]: try: if path.is_file(): val = path.read_text(encoding="utf-8").strip() return val or None except Exception: return None return None def resolve_model() -> str: return _read_single_line(MODEL_FILE_PATH) or DEFAULT_OPENROUTER_MODEL def resolve_api_key() -> Optional[str]: env_key = os.getenv("OPENROUTER_API_KEY") if env_key and env_key.strip(): return env_key.strip() return _read_single_line(OPENROUTER_API_KEY_FILE_PATH) def generate(prompt: str, model: Optional[str] = None, timeout: int = 60) -> str: api_key = resolve_api_key() if not api_key: raise RuntimeError("OpenRouter API key not available via env or ~/.api-openrouter") payload = { "model": model or resolve_model(), "messages": [{"role": "user", "content": prompt}], "temperature": 0.2, } headers = { "Authorization": f"Bearer {api_key}", "Content-Type": "application/json", } resp = requests.post(OPENROUTER_API_URL, headers=headers, json=payload, timeout=timeout) if resp.status_code != 200: raise RuntimeError(f"OpenRouter non-200: {resp.status_code} body: {resp.text[:500]}") data = resp.json() choices = data.get("choices", []) if not choices: raise RuntimeError("OpenRouter response missing choices") content = (choices[0].get("message", {}).get("content") or "").strip() if not content: raise RuntimeError("OpenRouter response empty content") return content

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/waifuai/mcp-waifu-queue'

If you have feedback or need assistance with the MCP directory API, please join our Discord server