"""Configuration management for Percepta MCP."""
from typing import Any, Dict, List, Optional
from pydantic import BaseModel, Field
from pydantic_settings import BaseSettings
import yaml
from pathlib import Path
class AIProviderConfig(BaseModel):
"""AI provider configuration."""
name: str
type: str # openai, anthropic, google, ollama
api_key: Optional[str] = None
base_url: Optional[str] = None
model: str
priority: int = 1 # 1=highest, 10=lowest
max_tokens: int = 4000
temperature: float = 0.7
timeout: int = 30
retry_attempts: int = 3
cost_per_token: float = 0.0 # for cost tracking
rate_limit: int = 60 # requests per minute
enabled: bool = True
class BrowserConfig(BaseModel):
"""Browser automation configuration."""
browser_type: str = "chromium" # chromium, firefox, webkit
headless: bool = True
timeout: int = 30000
viewport_width: int = 1920
viewport_height: int = 1080
user_agent: Optional[str] = None
proxy: Optional[str] = None
class MCPServerConfig(BaseModel):
"""MCP server configuration."""
host: str = "localhost"
port: int = 8080
transport: str = "stdio" # stdio, sse, websocket
max_connections: int = 10
request_timeout: int = 30
class MonitoringConfig(BaseModel):
"""Monitoring and logging configuration."""
log_level: str = "INFO"
log_file: Optional[str] = "logs/percepta_mcp.log"
enable_metrics: bool = True
metrics_port: int = 9090
enable_devtools_capture: bool = True
class Settings(BaseSettings):
"""Main application settings."""
# AI Providers
ai_providers: List[AIProviderConfig] = Field(default_factory=list)
default_provider: str = "ollama"
# Browser settings
browser: BrowserConfig = Field(default_factory=BrowserConfig)
# MCP Server settings
mcp_server: MCPServerConfig = Field(default_factory=MCPServerConfig)
# Monitoring settings
monitoring: MonitoringConfig = Field(default_factory=MonitoringConfig)
# Feature flags
enable_ocr: bool = True
enable_visual_analysis: bool = True
enable_cost_tracking: bool = True
class Config:
env_file = ".env"
env_nested_delimiter = "__"
@classmethod
def load_from_yaml(cls, config_path: str) -> "Settings":
"""Load settings from YAML file."""
config_file = Path(config_path)
if not config_file.exists():
raise FileNotFoundError(f"Config file not found: {config_path}")
with open(config_file, 'r', encoding='utf-8') as f:
config_data = yaml.safe_load(f)
return cls(**config_data)
def save_to_yaml(self, config_path: str) -> None:
"""Save settings to YAML file."""
config_file = Path(config_path)
config_file.parent.mkdir(parents=True, exist_ok=True)
with open(config_file, 'w', encoding='utf-8') as f:
yaml.dump(self.model_dump(), f, default_flow_style=False)
# Default configuration
def get_default_settings() -> Settings:
"""Get default settings with standard AI providers."""
import os
# Create AI providers based on available API keys
providers = []
# Ollama (local - always available)
ollama_url = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
ollama_model = os.getenv("OLLAMA_DEFAULT_MODEL", "llama2:7b")
providers.append(AIProviderConfig(
name="ollama-local",
type="ollama",
base_url=ollama_url,
model=ollama_model,
priority=1,
cost_per_token=0.0,
rate_limit=1000,
enabled=True
))
# Google (if API key available)
google_key = os.getenv("GOOGLE_API_KEY")
if google_key:
providers.append(AIProviderConfig(
name="gemini-free",
type="google",
api_key=google_key,
model="gemini-pro",
priority=2,
cost_per_token=0.0005,
rate_limit=60,
enabled=True
))
# Anthropic (if API key available)
anthropic_key = os.getenv("ANTHROPIC_API_KEY")
if anthropic_key:
providers.append(AIProviderConfig(
name="claude-sonnet",
type="anthropic",
api_key=anthropic_key,
model="claude-3-sonnet-20240229",
priority=3,
cost_per_token=0.003,
rate_limit=40,
enabled=True
))
# OpenAI (if API key available)
openai_key = os.getenv("OPENAI_API_KEY")
if openai_key:
providers.append(AIProviderConfig(
name="gpt-4",
type="openai",
api_key=openai_key,
model="gpt-4-turbo-preview",
priority=4,
cost_per_token=0.01,
rate_limit=40,
enabled=True
))
# Override priorities if specified in environment
priority_config = os.getenv("AI_PROVIDER_PRIORITY")
if priority_config:
priority_map = {}
for item in priority_config.split(","):
if ":" in item:
name, priority = item.strip().split(":")
priority_map[name] = int(priority)
for provider in providers:
if provider.name in priority_map:
provider.priority = priority_map[provider.name]
# Set default provider
default_provider = os.getenv("DEFAULT_AI_PROVIDER", "ollama-local")
return Settings(
ai_providers=providers,
default_provider=default_provider,
monitoring=MonitoringConfig(
log_level=os.getenv("PERCEPTA_LOG_LEVEL", "INFO")
),
browser=BrowserConfig(
browser_type=os.getenv("DEFAULT_BROWSER", "chromium"),
headless=os.getenv("HEADLESS_MODE", "true").lower() == "true",
timeout=int(os.getenv("BROWSER_TIMEOUT", "30000"))
),
enable_ocr=os.getenv("ENABLE_OCR", "true").lower() == "true",
enable_visual_analysis=os.getenv("ENABLE_VISUAL_ANALYSIS", "true").lower() == "true",
enable_cost_tracking=os.getenv("ENABLE_COST_TRACKING", "true").lower() == "true"
)