Skip to main content
Glama

Katamari MCP Server

by ciphernaut
config.py7.16 kB
""" Configuration management utilities with environment-specific support. """ import os import json import logging from pathlib import Path from typing import Any, Dict, Optional, Union logger = logging.getLogger(__name__) class Config: """Configuration manager with environment-specific support.""" def __init__(self, env: Optional[str] = None): self._config: Dict[str, Any] = {} self._env = env or os.getenv('KATAMARI_ENV', 'development') self._workspace_root = Path(os.getenv('KATAMARI_WORKSPACE', os.getcwd())) self._load_defaults() self._load_environment_config() self._load_user_config() def _load_defaults(self): """Load default configuration.""" self._config = { # Core settings 'acp_enabled': True, 'llm_model': 'Qwen/Qwen2-0.5B-Instruct', 'code_generation_model': 'Qwen/Qwen2-1.5B-Instruct', 'max_search_results': 5, 'request_timeout': 30, 'debug': False, 'workspace_root': str(self._workspace_root), # LLM backend settings 'lm_studio_url': 'http://localhost:1234', # Default LM Studio port 'openai_api_key': None, 'openai_model': 'gpt-3.5-turbo', 'openai_api_base': None, # Performance settings 'performance_tracking': True, 'adaptive_learning': True, 'heuristic_optimization': True, # Security settings 'secret_scanning': True, 'capability_validation': True, 'sandbox_execution': False, # Development settings 'log_level': 'INFO', 'enable_profiling': False, 'auto_reload': False, # Production settings 'metrics_collection': True, 'health_checks': True, 'graceful_shutdown': True, # File paths 'data_dir': str(self._workspace_root / '.katamari'), 'logs_dir': str(self._workspace_root / '.katamari' / 'logs'), 'cache_dir': str(self._workspace_root / '.katamari' / 'cache'), } def _load_environment_config(self): """Load environment-specific configuration.""" config_file = self._workspace_root / 'config' / f'{self._env}.json' if config_file.exists(): try: with open(config_file, 'r') as f: env_config = json.load(f) self._config.update(env_config) logger.info(f"Loaded {self._env} configuration from {config_file}") except Exception as e: logger.warning(f"Failed to load {self._env} config: {e}") def _load_user_config(self): """Load user-specific configuration overrides.""" user_config_file = self._workspace_root / '.katamari' / 'user_config.json' if user_config_file.exists(): try: with open(user_config_file, 'r') as f: user_config = json.load(f) self._config.update(user_config) logger.info("Loaded user configuration overrides") except Exception as e: logger.warning(f"Failed to load user config: {e}") # Override with environment variables env_overrides = { 'KATAMARI_DEBUG': ('debug', lambda v: v.lower() == 'true'), 'KATAMARI_LOG_LEVEL': ('log_level', str), 'KATAMARI_ACP_ENABLED': ('acp_enabled', lambda v: v.lower() == 'true'), 'KATAMARI_LLM_MODEL': ('llm_model', str), 'KATAMARI_REQUEST_TIMEOUT': ('request_timeout', int), 'KATAMARI_MAX_SEARCH_RESULTS': ('max_search_results', int), 'KATAMARI_WORKSPACE': ('workspace_root', str), 'KATAMARI_CODE_GENERATION_MODEL': ('code_generation_model', str), 'KATAMARI_LM_STUDIO_URL': ('lm_studio_url', str), 'OPENAI_API_KEY': ('openai_api_key', str), 'OPENAI_MODEL': ('openai_model', str), 'OPENAI_API_BASE': ('openai_api_base', str), } for env_var, (config_key, converter) in env_overrides.items(): if env_var in os.environ: try: self._config[config_key] = converter(os.environ[env_var]) logger.debug(f"Environment override: {config_key} = {self._config[config_key]}") except (ValueError, TypeError) as e: logger.warning(f"Invalid environment variable {env_var}: {e}") def get(self, key: str, default: Optional[Any] = None) -> Any: """Get configuration value.""" return self._config.get(key, default) def set(self, key: str, value: Any): """Set configuration value.""" self._config[key] = value def update(self, config_dict: Dict[str, Any]): """Update multiple configuration values.""" self._config.update(config_dict) def get_environment(self) -> str: """Get current environment.""" return self._env def is_production(self) -> bool: """Check if running in production.""" return self._env == 'production' def is_development(self) -> bool: """Check if running in development.""" return self._env == 'development' def get_data_dir(self) -> Path: """Get data directory path.""" return Path(self.get('data_dir')) def get_logs_dir(self) -> Path: """Get logs directory path.""" return Path(self.get('logs_dir')) def get_cache_dir(self) -> Path: """Get cache directory path.""" return Path(self.get('cache_dir')) def ensure_directories(self): """Ensure all required directories exist.""" for dir_key in ['data_dir', 'logs_dir', 'cache_dir']: dir_path = Path(self.get(dir_key)) dir_path.mkdir(parents=True, exist_ok=True) def save_user_config(self): """Save current configuration as user config.""" user_config_file = self._workspace_root / '.katamari' / 'user_config.json' user_config_file.parent.mkdir(parents=True, exist_ok=True) try: with open(user_config_file, 'w') as f: json.dump(self._config, f, indent=2) logger.info(f"Saved user configuration to {user_config_file}") except Exception as e: logger.error(f"Failed to save user config: {e}") def to_dict(self) -> Dict[str, Any]: """Return configuration as dictionary.""" return self._config.copy() def get_section(self, section: str) -> Dict[str, Any]: """Get a configuration section.""" section_config = {} prefix = f"{section}_" for key, value in self._config.items(): if key.startswith(prefix): section_config[key[len(prefix):]] = value return section_config

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ciphernaut/katamari-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server