"""OpenRouter LLM client implementation."""
import httpx
from .base import LLMClient
class OpenRouterClient(LLMClient):
"""OpenRouter API client."""
def __init__(self, api_key: str, model: str = "anthropic/claude-3.5-sonnet"):
"""Initialize OpenRouter client.
Args:
api_key: OpenRouter API key
model: Model name to use (e.g., "anthropic/claude-3.5-sonnet")
"""
self.api_key = api_key
self.model = model
self.base_url = "https://openrouter.ai/api/v1/chat/completions"
def generate(self, prompt: str) -> str:
"""Generate a response using OpenRouter.
Args:
prompt: The prompt to send to the LLM
Returns:
The generated text response
Raises:
Exception: If the OpenRouter API call fails
"""
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
}
payload = {
"model": self.model,
"messages": [
{
"role": "user",
"content": prompt
}
]
}
with httpx.Client() as client:
response = client.post(
self.base_url,
json=payload,
headers=headers,
timeout=30.0
)
response.raise_for_status()
data = response.json()
return data["choices"][0]["message"]["content"]