"""
Automatic AI Configuration Detection
This module automatically detects available AI models and configures them
based on what's actually available, eliminating the need for --ai-env parameters.
"""
import os
import logging
import urllib.request
import json
from typing import List, Tuple, Optional
from .models import ModelConfig, ModelProvider, AIModelManager
from .data_privacy import PRIVACY_CONFIGS
logger = logging.getLogger(__name__)
def check_ollama_available() -> bool:
"""Check if Ollama is available and has models."""
try:
with urllib.request.urlopen('http://localhost:11434/api/tags', timeout=2) as response:
if response.status == 200:
data = json.loads(response.read().decode())
models = data.get('models', [])
return len(models) > 0
except:
pass
return False
def get_ollama_models() -> List[str]:
"""Get list of available Ollama models."""
try:
with urllib.request.urlopen('http://localhost:11434/api/tags', timeout=2) as response:
if response.status == 200:
data = json.loads(response.read().decode())
models = data.get('models', [])
return [model['name'].split(':')[0] for model in models]
except:
pass
return []
def has_valid_api_key(key_name: str) -> bool:
"""Check if API key is configured and not a placeholder."""
key = os.getenv(key_name, '')
return key and not 'XXXX' in key and len(key) > 20
def auto_configure_ai() -> Optional[AIModelManager]:
"""
Automatically configure AI based on available resources.
Returns:
Configured AIModelManager or None if no AI available
"""
logger.info("🔍 Auto-detecting AI configuration...")
# Load environment from .env.ai if it exists
from dotenv import load_dotenv
if os.path.exists('.env.ai'):
load_dotenv('.env.ai')
logger.info("📄 Loaded configuration from .env.ai")
# Detect available resources
has_claude = has_valid_api_key('CLAUDE_API_KEY')
has_openai = has_valid_api_key('OPENAI_API_KEY')
has_ollama = check_ollama_available()
ollama_models = get_ollama_models() if has_ollama else []
# Determine privacy level
privacy_level = os.getenv('AI_PRIVACY_LEVEL', 'moderate').lower()
if privacy_level not in ['none', 'basic', 'moderate', 'strict', 'paranoid']:
privacy_level = 'moderate'
# Map privacy levels to configs (using available configs)
privacy_map = {
'none': PRIVACY_CONFIGS.get("development", PRIVACY_CONFIGS["demo"]),
'basic': PRIVACY_CONFIGS.get("development", PRIVACY_CONFIGS["demo"]),
'moderate': PRIVACY_CONFIGS.get("production", PRIVACY_CONFIGS["demo"]),
'strict': PRIVACY_CONFIGS.get("production", PRIVACY_CONFIGS["demo"]),
'paranoid': PRIVACY_CONFIGS.get("enterprise", PRIVACY_CONFIGS["demo"])
}
privacy_config = privacy_map.get(privacy_level, PRIVACY_CONFIGS.get("production", PRIVACY_CONFIGS["demo"]))
# Create manager
manager = AIModelManager(privacy_config)
models_added = 0
# Add Ollama models (preferred - local and secure)
if has_ollama and ollama_models:
default_model = os.getenv('OLLAMA_DEFAULT_MODEL', ollama_models[0])
if default_model not in ollama_models:
default_model = ollama_models[0]
manager.register_model(
f"{default_model}-local",
ModelConfig(
provider=ModelProvider.LOCAL_OLLAMA,
model_name=default_model,
base_url=os.getenv('OLLAMA_BASE_URL', 'http://localhost:11434'),
privacy_config=privacy_config
),
set_as_default=True
)
models_added += 1
logger.info(f"✅ Registered Ollama model: {default_model}")
# Add other Ollama models
for model in ollama_models:
if model != default_model:
manager.register_model(
f"{model}-local",
ModelConfig(
provider=ModelProvider.LOCAL_OLLAMA,
model_name=model,
base_url=os.getenv('OLLAMA_BASE_URL', 'http://localhost:11434'),
privacy_config=privacy_config
)
)
models_added += 1
# Add Claude if available
if has_claude:
claude_model = os.getenv('CLAUDE_MODEL', 'claude-3-haiku-20240307')
manager.register_model(
"claude",
ModelConfig(
provider=ModelProvider.ANTHROPIC_CLAUDE,
model_name=claude_model,
api_key=os.getenv('CLAUDE_API_KEY'),
base_url=os.getenv('CLAUDE_BASE_URL', 'https://api.anthropic.com'),
privacy_config=privacy_config
),
set_as_default=(models_added == 0) # Default if no Ollama
)
models_added += 1
logger.info(f"✅ Registered Claude model: {claude_model}")
# Add OpenAI if available
if has_openai:
openai_model = os.getenv('OPENAI_MODEL', 'gpt-3.5-turbo')
manager.register_model(
"openai",
ModelConfig(
provider=ModelProvider.OPENAI,
model_name=openai_model,
api_key=os.getenv('OPENAI_API_KEY'),
base_url=os.getenv('OPENAI_BASE_URL', 'https://api.openai.com/v1'),
privacy_config=privacy_config
),
set_as_default=(models_added == 0) # Default if no others
)
models_added += 1
logger.info(f"✅ Registered OpenAI model: {openai_model}")
# Summary
if models_added > 0:
logger.info(f"🤖 AI configured with {models_added} model(s), privacy: {privacy_level}")
return manager
else:
logger.warning("⚠️ No AI models available - check Ollama installation or API keys")
return None
def get_ai_summary() -> dict:
"""Get summary of detected AI configuration."""
has_claude = has_valid_api_key('CLAUDE_API_KEY')
has_openai = has_valid_api_key('OPENAI_API_KEY')
has_ollama = check_ollama_available()
ollama_models = get_ollama_models() if has_ollama else []
return {
"ollama_available": has_ollama,
"ollama_models": ollama_models,
"claude_configured": has_claude,
"openai_configured": has_openai,
"privacy_level": os.getenv('AI_PRIVACY_LEVEL', 'moderate'),
"total_models": len(ollama_models) + (1 if has_claude else 0) + (1 if has_openai else 0)
}