Skip to main content
Glama
akiani

Epic Patient API MCP Server

by akiani
openrouter.py1.55 kB
"""OpenRouter LLM client implementation.""" import httpx from .base import LLMClient class OpenRouterClient(LLMClient): """OpenRouter API client.""" def __init__(self, api_key: str, model: str = "anthropic/claude-3.5-sonnet"): """Initialize OpenRouter client. Args: api_key: OpenRouter API key model: Model name to use (e.g., "anthropic/claude-3.5-sonnet") """ self.api_key = api_key self.model = model self.base_url = "https://openrouter.ai/api/v1/chat/completions" def generate(self, prompt: str) -> str: """Generate a response using OpenRouter. Args: prompt: The prompt to send to the LLM Returns: The generated text response Raises: Exception: If the OpenRouter API call fails """ headers = { "Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json", } payload = { "model": self.model, "messages": [ { "role": "user", "content": prompt } ] } with httpx.Client() as client: response = client.post( self.base_url, json=payload, headers=headers, timeout=30.0 ) response.raise_for_status() data = response.json() return data["choices"][0]["message"]["content"]

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/akiani/mock-epic-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server