import os
import json
import time
from typing import Any, Dict, Optional
def resolve_provider_config(
provider_preset: Optional[str] = None,
provider: Optional[Dict[str, Any]] = None,
env: Dict[str, str] = os.environ
) -> Dict[str, Any]:
"""
Resolve provider configuration from preset or explicit config.
Enforces no secrets in args.
"""
if provider_preset and provider:
raise ValueError("Cannot specify both provider_preset and provider.")
resolved = {
"mode": "openai_cloud",
"base_url": None,
"api_key_env": "OPENAI_API_KEY",
"default_headers": {},
"request_api": "chat_completions"
}
if provider_preset == "openrouter":
headers = {}
if env.get("OPENROUTER_HTTP_REFERER"):
headers["HTTP-Referer"] = env["OPENROUTER_HTTP_REFERER"]
if env.get("OPENROUTER_X_TITLE"):
headers["X-Title"] = env["OPENROUTER_X_TITLE"]
resolved.update({
"mode": "openai_compatible",
"base_url": "https://openrouter.ai/api/v1",
"api_key_env": "OPENROUTER_API_KEY",
"default_headers": headers
})
elif provider_preset == "openai":
resolved.update({
"mode": "openai_cloud",
"api_key_env": "OPENAI_API_KEY"
})
elif provider_preset == "ollama_local":
resolved.update({
"mode": "openai_compatible",
"base_url": "http://localhost:11434/v1",
"api_key_env": "OLLAMA_API_KEY",
"default_api_key": "ollama" # Dummy key for Ollama since it doesn't require auth
})
elif provider_preset == "vllm_local":
resolved.update({
"mode": "openai_compatible",
"base_url": "http://localhost:8000/v1",
"api_key_env": "VLLM_API_KEY"
})
elif provider_preset == "litellm_proxy":
resolved.update({
"mode": "openai_compatible",
"base_url": env.get("LITELLM_PROXY_API_BASE", "http://localhost:4000"),
"api_key_env": "LITELLM_PROXY_API_KEY"
})
elif provider:
# Enforce no secrets
if "api_key" in provider:
raise ValueError("Secrets (api_key) are not allowed in provider config. Use api_key_env.")
resolved.update(provider)
# Final check for openai_compatible
if resolved["mode"] == "openai_compatible" and not resolved["base_url"]:
raise ValueError("base_url is required for openai_compatible mode.")
# Check for required API key
api_key_env = resolved.get("api_key_env")
if api_key_env and not env.get(api_key_env) and not resolved.get("default_api_key"):
if provider_preset == "openrouter":
msg = (
f"Missing {api_key_env} for OpenRouter. "
"Set the environment variable or use a different provider (e.g., ollama_local).\n"
"Example:\n"
f"export {api_key_env}=\"sk-or-v1-...\"\n\n"
"To test with curl:\n"
"curl https://openrouter.ai/api/v1/chat/completions \\\n"
f" -H \"Authorization: Bearer ${api_key_env}\" \\\n"
" -H \"Content-Type: application/json\" \\\n"
" -d '{\"model\": \"anthropic/claude-sonnet-4.5\", \"messages\": [{\"role\": \"user\", \"content\": \"hi\"}]}'"
)
else:
msg = f"Missing required environment variable: {api_key_env}"
raise ValueError(msg)
return resolved
def _canonicalize_model_id(model_id: str, provider_preset: Optional[str]) -> str:
"""
Handle common model ID aliases and version suffixes for specific providers.
"""
if not model_id:
return model_id
if provider_preset == "openrouter":
# Gemini 2.0/2.5 Flash aliases
if "gemini-2.0-flash-lite" in model_id or "gemini-2.5-flash-lite" in model_id:
return "google/gemini-2.0-flash-lite-001"
if "gemini-2.0-flash" in model_id or "gemini-2.5-flash" in model_id:
return "google/gemini-2.0-flash-001"
# DeepSeek aliases
if "deepseek-r1" in model_id:
if "distill-qwen-32b" in model_id:
return "deepseek/deepseek-r1-distill-qwen-32b"
if ":free" in model_id:
return "deepseek/deepseek-r1:free" # Keep :free if explicit
return "deepseek/deepseek-r1"
return model_id
def prepare_backend_kwargs(
resolved_provider: Dict[str, Any],
model_name: str,
temperature: float,
env: Dict[str, str] = os.environ,
extra_kwargs: Optional[Dict[str, Any]] = None
) -> Dict[str, Any]:
"""
Build backend_kwargs for RLM based on resolved provider config.
"""
# Canonicalize model ID based on the resolved provider
preset = None
if resolved_provider.get("base_url") == "https://openrouter.ai/api/v1":
preset = "openrouter"
can_model_name = _canonicalize_model_id(model_name, preset)
backend_kwargs = {
"model_name": can_model_name,
"temperature": temperature
}
if resolved_provider["mode"] in ("openai_cloud", "openai_compatible"):
api_key = env.get(resolved_provider["api_key_env"])
if api_key:
backend_kwargs["api_key"] = api_key
elif resolved_provider.get("default_api_key"):
backend_kwargs["api_key"] = resolved_provider["default_api_key"]
if resolved_provider.get("base_url"):
backend_kwargs["base_url"] = resolved_provider["base_url"]
if resolved_provider.get("default_headers"):
backend_kwargs["default_headers"] = resolved_provider["default_headers"]
if extra_kwargs:
# Filter sensitive keys from extra_kwargs just in case
safe_extras = {k: v for k, v in extra_kwargs.items() if k not in ("api_key", "base_url")}
backend_kwargs.update(safe_extras)
return backend_kwargs