"""
LLM Client for OpenAI-compatible APIs.
Supports LM Studio, OpenRouter, OpenAI, and any OpenAI-compatible endpoint.
"""
import re
from typing import Dict, List, Optional
import httpx
from .providers import ProviderConfig, ProviderType
class LLMClient:
"""Unified client for OpenAI-compatible APIs (LM Studio, OpenRouter, OpenAI)."""
def __init__(
self,
provider_config: Optional[ProviderConfig] = None,
base_url: Optional[str] = None,
api_key: Optional[str] = None,
model: Optional[str] = None,
):
"""
Initialize LLM client.
Args:
provider_config: ProviderConfig from ProviderManager (preferred).
base_url: Direct API URL (legacy mode, used if provider_config is None).
api_key: API key for authentication.
model: Model name to use.
"""
# If provider_config is given, use it
if provider_config:
self.provider_type = provider_config.type
self.base_url = (
provider_config.api_url.rstrip("/")
if provider_config.api_url
else "http://localhost:1234/v1"
)
self.api_key = provider_config.api_key
self.model = provider_config.model
self.provider_name = provider_config.name
else:
# Legacy mode: use base_url directly
self.provider_type = ProviderType.LM_STUDIO
self.base_url = (
base_url.rstrip("/") if base_url else "http://localhost:1234/v1"
)
self.api_key = api_key
self.model = model
self.provider_name = "OpenAI-compatible API"
self.conversation: List[Dict[str, str]] = []
self.system_prompt: Optional[str] = None
# Initialize HTTP client
self._http_client = httpx.Client(timeout=120.0)
def set_system_prompt(self, prompt: str) -> None:
"""Set the system prompt for the conversation."""
self.system_prompt = prompt
self.conversation = [] # Reset conversation
def send_message(self, message: str) -> str:
"""Send a message and get response via OpenAI-compatible API."""
self.conversation.append({"role": "user", "content": message})
try:
messages = []
if self.system_prompt:
messages.append({"role": "system", "content": self.system_prompt})
messages.extend(self.conversation)
# Build request headers
headers = {"Content-Type": "application/json"}
if self.api_key:
headers["Authorization"] = f"Bearer {self.api_key}"
# OpenRouter requires additional headers
if self.provider_type == ProviderType.OPENROUTER:
headers["HTTP-Referer"] = "https://github.com/localvoicemode"
headers["X-Title"] = "LocalVoiceMode"
payload = {
"messages": messages,
"model": self.model or "default", # Always include model field
"temperature": 0.8,
"max_tokens": 1024,
"stream": False,
}
response = self._http_client.post(
f"{self.base_url}/chat/completions", headers=headers, json=payload
)
response.raise_for_status()
data = response.json()
assistant_message = data["choices"][0]["message"]["content"]
# Filter out special tokens from some models
assistant_message = re.sub(r"<\|[^|]+\|>", "", assistant_message).strip()
self.conversation.append(
{"role": "assistant", "content": assistant_message}
)
return assistant_message
except Exception as e:
error_msg = f"[Error communicating with {self.provider_name}: {e}]"
# Remove the failed user message
if self.conversation and self.conversation[-1]["role"] == "user":
self.conversation.pop()
return error_msg
def reset(self) -> None:
"""Reset conversation history."""
self.conversation = []
def get_provider_info(self) -> str:
"""Get info about the current provider."""
return f"{self.provider_name} ({self.model or 'default model'})"
def close(self) -> None:
"""Close the HTTP client."""
self._http_client.close()
def __enter__(self):
"""Context manager entry."""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
self.close()
return False