Skip to main content
Glama

AnyDocs MCP Server

by funky1688
logging.pyโ€ข14.3 kB
#!/usr/bin/env python3 """ Logging Utilities Provides structured logging configuration and utilities. """ import os import sys import logging import logging.handlers from pathlib import Path from typing import Any, Dict, Optional, Union from datetime import datetime import structlog from structlog.types import FilteringBoundLogger # Default log format DEFAULT_LOG_FORMAT = "%(asctime)s [%(levelname)8s] %(name)s: %(message)s" DEFAULT_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" # Structlog processors STRUCTLOG_PROCESSORS = [ structlog.contextvars.merge_contextvars, structlog.processors.add_log_level, structlog.processors.StackInfoRenderer(), structlog.dev.set_exc_info, structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S", utc=False), ] class LoggingConfig: """Logging configuration manager.""" def __init__( self, level: Union[str, int] = logging.INFO, format_string: str = DEFAULT_LOG_FORMAT, date_format: str = DEFAULT_DATE_FORMAT, log_file: Optional[str] = None, max_file_size: int = 10 * 1024 * 1024, # 10MB backup_count: int = 5, enable_console: bool = True, enable_structlog: bool = True, json_logs: bool = False, ): """ Initialize logging configuration. Args: level: Logging level format_string: Log message format date_format: Date format for timestamps log_file: Path to log file (optional) max_file_size: Maximum log file size in bytes backup_count: Number of backup files to keep enable_console: Enable console logging enable_structlog: Enable structured logging json_logs: Output logs in JSON format """ self.level = level if isinstance(level, int) else getattr(logging, level.upper()) self.format_string = format_string self.date_format = date_format self.log_file = log_file self.max_file_size = max_file_size self.backup_count = backup_count self.enable_console = enable_console self.enable_structlog = enable_structlog self.json_logs = json_logs self._configured = False def configure(self) -> None: """Configure logging system.""" if self._configured: return # Configure standard logging self._configure_standard_logging() # Configure structured logging if enabled if self.enable_structlog: self._configure_structlog() self._configured = True def _configure_standard_logging(self) -> None: """Configure standard Python logging.""" # Create root logger root_logger = logging.getLogger() root_logger.setLevel(self.level) # Clear existing handlers root_logger.handlers.clear() # Create formatter if self.json_logs: formatter = JsonFormatter() else: formatter = logging.Formatter( fmt=self.format_string, datefmt=self.date_format ) # Console handler if self.enable_console: console_handler = logging.StreamHandler(sys.stdout) console_handler.setLevel(self.level) console_handler.setFormatter(formatter) root_logger.addHandler(console_handler) # File handler if self.log_file: self._setup_file_handler(root_logger, formatter) # Set levels for noisy libraries self._configure_library_loggers() def _setup_file_handler(self, logger: logging.Logger, formatter: logging.Formatter) -> None: """Setup rotating file handler.""" try: # Ensure log directory exists log_path = Path(self.log_file) log_path.parent.mkdir(parents=True, exist_ok=True) # Create rotating file handler file_handler = logging.handlers.RotatingFileHandler( filename=self.log_file, maxBytes=self.max_file_size, backupCount=self.backup_count, encoding='utf-8' ) file_handler.setLevel(self.level) file_handler.setFormatter(formatter) logger.addHandler(file_handler) except Exception as e: print(f"Failed to setup file logging: {e}", file=sys.stderr) def _configure_library_loggers(self) -> None: """Configure logging levels for third-party libraries.""" library_levels = { 'urllib3': logging.WARNING, 'requests': logging.WARNING, 'httpx': logging.WARNING, 'sqlalchemy.engine': logging.WARNING, 'sqlalchemy.pool': logging.WARNING, 'asyncio': logging.WARNING, } for library, level in library_levels.items(): logging.getLogger(library).setLevel(level) def _configure_structlog(self) -> None: """Configure structured logging.""" processors = STRUCTLOG_PROCESSORS.copy() if self.json_logs: processors.append(structlog.processors.JSONRenderer()) else: processors.append(structlog.dev.ConsoleRenderer(colors=True)) structlog.configure( processors=processors, wrapper_class=structlog.make_filtering_bound_logger(self.level), logger_factory=structlog.PrintLoggerFactory(), cache_logger_on_first_use=True, ) class JsonFormatter(logging.Formatter): """JSON log formatter.""" def format(self, record: logging.LogRecord) -> str: """Format log record as JSON.""" import json log_data = { 'timestamp': datetime.fromtimestamp(record.created).isoformat(), 'level': record.levelname, 'logger': record.name, 'message': record.getMessage(), 'module': record.module, 'function': record.funcName, 'line': record.lineno, } # Add exception info if present if record.exc_info: log_data['exception'] = self.formatException(record.exc_info) # Add extra fields for key, value in record.__dict__.items(): if key not in { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', 'getMessage', 'exc_info', 'exc_text', 'stack_info' }: log_data[key] = value return json.dumps(log_data, default=str) class ContextualLogger: """Contextual logger wrapper.""" def __init__(self, logger: FilteringBoundLogger, **context): """Initialize with logger and context.""" self.logger = logger.bind(**context) self.context = context def bind(self, **kwargs) -> 'ContextualLogger': """Bind additional context.""" new_context = {**self.context, **kwargs} return ContextualLogger(self.logger, **new_context) def debug(self, message: str, **kwargs) -> None: """Log debug message.""" self.logger.debug(message, **kwargs) def info(self, message: str, **kwargs) -> None: """Log info message.""" self.logger.info(message, **kwargs) def warning(self, message: str, **kwargs) -> None: """Log warning message.""" self.logger.warning(message, **kwargs) def error(self, message: str, **kwargs) -> None: """Log error message.""" self.logger.error(message, **kwargs) def critical(self, message: str, **kwargs) -> None: """Log critical message.""" self.logger.critical(message, **kwargs) def exception(self, message: str, **kwargs) -> None: """Log exception with traceback.""" self.logger.exception(message, **kwargs) def setup_logging( level: Union[str, int] = "INFO", log_file: Optional[str] = None, json_logs: bool = False, enable_console: bool = True, **kwargs ) -> LoggingConfig: """Setup logging configuration. Args: level: Logging level log_file: Path to log file json_logs: Enable JSON log format enable_console: Enable console logging **kwargs: Additional configuration options Returns: Configured LoggingConfig instance """ config = LoggingConfig( level=level, log_file=log_file, json_logs=json_logs, enable_console=enable_console, **kwargs ) config.configure() return config def get_logger(name: str, **context) -> ContextualLogger: """Get a contextual logger. Args: name: Logger name **context: Initial context to bind Returns: ContextualLogger instance """ structlog_logger = structlog.get_logger(name) return ContextualLogger(structlog_logger, **context) def setup_default_logging() -> LoggingConfig: """Setup default logging configuration from environment.""" # Get configuration from environment log_level = os.getenv('LOG_LEVEL', 'INFO').upper() log_file = os.getenv('LOG_FILE') json_logs = os.getenv('JSON_LOGS', 'false').lower() == 'true' # Setup logging return setup_logging( level=log_level, log_file=log_file, json_logs=json_logs, ) # Performance logging utilities class PerformanceLogger: """Performance logging utilities.""" def __init__(self, logger: ContextualLogger): """Initialize with logger.""" self.logger = logger def log_duration(self, operation: str, duration: float, **context) -> None: """Log operation duration.""" self.logger.info( f"Operation completed: {operation}", operation=operation, duration_ms=round(duration * 1000, 2), **context ) def log_slow_operation(self, operation: str, duration: float, threshold: float = 1.0, **context) -> None: """Log slow operation if duration exceeds threshold.""" if duration > threshold: self.logger.warning( f"Slow operation detected: {operation}", operation=operation, duration_ms=round(duration * 1000, 2), threshold_ms=round(threshold * 1000, 2), **context ) # Context managers for logging class LoggingContext: """Context manager for logging operations.""" def __init__(self, logger: ContextualLogger, operation: str, **context): """Initialize logging context.""" self.logger = logger self.operation = operation self.context = context self.start_time = None def __enter__(self) -> 'LoggingContext': """Enter context.""" import time self.start_time = time.time() self.logger.debug(f"Starting operation: {self.operation}", **self.context) return self def __exit__(self, exc_type, exc_val, exc_tb) -> None: """Exit context.""" import time duration = time.time() - self.start_time if exc_type is None: self.logger.info( f"Operation completed: {self.operation}", duration_ms=round(duration * 1000, 2), **self.context ) else: self.logger.error( f"Operation failed: {self.operation}", duration_ms=round(duration * 1000, 2), error=str(exc_val), **self.context ) def log_operation(logger: ContextualLogger, operation: str, **context) -> LoggingContext: """Create logging context for an operation. Args: logger: Logger instance operation: Operation name **context: Additional context Returns: LoggingContext instance """ return LoggingContext(logger, operation, **context) # Audit logging class AuditLogger: """Audit logging for security and compliance.""" def __init__(self, logger: ContextualLogger): """Initialize audit logger.""" self.logger = logger.bind(audit=True) def log_authentication(self, user_id: str, method: str, success: bool, **context) -> None: """Log authentication attempt.""" self.logger.info( "Authentication attempt", user_id=user_id, auth_method=method, success=success, **context ) def log_authorization(self, user_id: str, resource: str, action: str, granted: bool, **context) -> None: """Log authorization check.""" self.logger.info( "Authorization check", user_id=user_id, resource=resource, action=action, granted=granted, **context ) def log_data_access(self, user_id: str, resource_type: str, resource_id: str, action: str, **context) -> None: """Log data access.""" self.logger.info( "Data access", user_id=user_id, resource_type=resource_type, resource_id=resource_id, action=action, **context ) def log_configuration_change(self, user_id: str, setting: str, old_value: Any, new_value: Any, **context) -> None: """Log configuration change.""" self.logger.info( "Configuration change", user_id=user_id, setting=setting, old_value=str(old_value) if not isinstance(old_value, str) else old_value, new_value=str(new_value) if not isinstance(new_value, str) else new_value, **context ) def get_audit_logger() -> AuditLogger: """Get audit logger instance.""" logger = get_logger('audit') return AuditLogger(logger)

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/funky1688/AnyDocs-MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server