Skip to main content
Glama
akiani

Epic Patient API MCP Server

by akiani
factory.py1.54 kB
"""Factory for creating LLM clients.""" import os from .base import LLMClient from .gemini import GeminiClient from .openrouter import OpenRouterClient def create_llm_client() -> LLMClient: """Create an LLM client based on environment variables. Checks for LLM_PROVIDER environment variable to determine which client to create. Falls back to Gemini if not specified. Environment variables: LLM_PROVIDER: "gemini" or "openrouter" (default: "gemini") GEMINI_API_KEY: Required if using Gemini GEMINI_MODEL: Optional model name for Gemini (default: "gemini-2.0-flash-exp") OPENROUTER_API_KEY: Required if using OpenRouter OPENROUTER_MODEL: Optional model name for OpenRouter (default: "anthropic/claude-3.5-sonnet") Returns: Configured LLM client Raises: ValueError: If required API key is not set """ provider = os.getenv("LLM_PROVIDER", "gemini").lower() if provider == "openrouter": api_key = os.getenv("OPENROUTER_API_KEY") if not api_key: raise ValueError("OPENROUTER_API_KEY not set") model = os.getenv("OPENROUTER_MODEL", "anthropic/claude-3.5-sonnet") return OpenRouterClient(api_key=api_key, model=model) else: # Default to Gemini api_key = os.getenv("GEMINI_API_KEY") if not api_key: raise ValueError("GEMINI_API_KEY not set") model = os.getenv("GEMINI_MODEL", "gemini-2.0-flash-exp") return GeminiClient(api_key=api_key, model=model)

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/akiani/mock-epic-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server