"""
LLM Provider detection and management.
Supports:
- LM Studio (local, scans common ports)
- OpenRouter (API key via OPENROUTER_API_KEY)
- OpenAI (API key via OPENAI_API_KEY)
"""
import os
from dataclasses import dataclass
from enum import Enum
from typing import List, Optional
import httpx
class ProviderType(Enum):
"""LLM provider types."""
LM_STUDIO = "lm_studio"
OPENROUTER = "openrouter"
OPENAI = "openai"
@dataclass
class ProviderConfig:
"""Configuration for a detected provider."""
type: ProviderType
name: str
api_url: Optional[str] = None
api_key: Optional[str] = None
model: Optional[str] = None
available: bool = False
class ProviderManager:
"""Detects and manages LLM providers."""
LM_STUDIO_PORTS = [1234, 1235, 1236, 8080, 5000]
@classmethod
def detect_all(cls, verbose: bool = False) -> List[ProviderConfig]:
"""Detect all available providers."""
providers = []
# Check LM Studio
if verbose:
print("Scanning for LM Studio...")
lm_studio = cls._detect_lm_studio()
if lm_studio:
if verbose:
print(
f" Found on port {lm_studio.api_url.split(':')[-1].split('/')[0]}"
)
providers.append(lm_studio)
elif verbose:
print(" Not found")
# Check OpenRouter
if verbose:
print("Checking OpenRouter...")
openrouter = cls._detect_openrouter()
if openrouter:
if verbose:
print(" API key found")
providers.append(openrouter)
elif verbose:
print(" No API key (OPENROUTER_API_KEY)")
# Check OpenAI
if verbose:
print("Checking OpenAI...")
openai = cls._detect_openai()
if openai:
if verbose:
print(" API key found")
providers.append(openai)
elif verbose:
print(" No API key (OPENAI_API_KEY)")
return providers
@classmethod
def get_best_provider(
cls, force_provider: Optional[str] = None, verbose: bool = False
) -> Optional[ProviderConfig]:
"""Get the best available provider."""
providers = cls.detect_all(verbose=verbose)
if not providers:
return None
# If forcing a specific provider
if force_provider:
for p in providers:
if p.type.value == force_provider:
return p
# Return first available (priority order: LM Studio > OpenRouter > OpenAI)
for p in providers:
if p.available:
return p
return None
@classmethod
def _detect_lm_studio(cls) -> Optional[ProviderConfig]:
"""Detect LM Studio on common ports."""
for port in cls.LM_STUDIO_PORTS:
try:
resp = httpx.get(f"http://localhost:{port}/v1/models", timeout=0.2)
if resp.status_code == 200:
data = resp.json()
model = None
if data.get("data"):
model = data["data"][0].get("id")
return ProviderConfig(
type=ProviderType.LM_STUDIO,
name=f"LM Studio (port {port})",
api_url=f"http://localhost:{port}/v1",
model=model,
available=True,
)
except Exception:
continue
return None
@classmethod
def _detect_openrouter(cls) -> Optional[ProviderConfig]:
"""Check if OpenRouter API key is available."""
api_key = os.environ.get("OPENROUTER_API_KEY")
if api_key:
return ProviderConfig(
type=ProviderType.OPENROUTER,
name="OpenRouter",
api_url="https://openrouter.ai/api/v1",
api_key=api_key,
model="deepseek/deepseek-v3.2", # Default model on OpenRouter
available=True,
)
return None
@classmethod
def _detect_openai(cls) -> Optional[ProviderConfig]:
"""Check if OpenAI API key is available."""
api_key = os.environ.get("OPENAI_API_KEY")
if api_key:
return ProviderConfig(
type=ProviderType.OPENAI,
name="OpenAI",
api_url="https://api.openai.com/v1",
api_key=api_key,
model="gpt-4o",
available=True,
)
return None
@classmethod
def get_status_report(cls) -> str:
"""Get a formatted status report of available providers."""
providers = cls.detect_all()
if not providers:
return (
"No LLM providers available.\n\n"
"Options:\n"
"- Start LM Studio with local server enabled\n"
"- Set OPENROUTER_API_KEY environment variable\n"
"- Set OPENAI_API_KEY environment variable"
)
lines = ["Available providers:\n"]
for i, p in enumerate(providers, 1):
status = "[OK]" if p.available else "[--]"
lines.append(f"{i}. {status} {p.name}")
if p.model:
lines.append(f" Model: {p.model}")
if p.api_url:
lines.append(f" URL: {p.api_url}")
return "\n".join(lines)