# ==============================================================================
# Crawl4AI RAG MCP Server Configuration Template
# ==============================================================================
# Copy this file to .env and configure the settings below for your deployment
#
# Usage:
# 1. Copy this file: cp .env_template.txt .env
# 2. Edit .env with your configuration
# 3. Start the server: docker compose up -d
#
# ==============================================================================
# ------------------------------------------------------------------------------
# SERVER CONFIGURATION
# ------------------------------------------------------------------------------
# IS_SERVER: Set to true for server deployment mode
# This runs both the REST API and MCP server in the same container
# Options: true | false
IS_SERVER=true
# SERVER_HOST: IP address the server binds to
# Use 0.0.0.0 to accept connections from any network interface (recommended for Docker)
# Use 127.0.0.1 to only accept localhost connections
# Default: 0.0.0.0
SERVER_HOST=0.0.0.0
# SERVER_PORT: Port for the REST API server
# The REST API will be available at http://localhost:8080
# Default: 8080
SERVER_PORT=8080
# ------------------------------------------------------------------------------
# REMOTE SERVER CONFIGURATION (for client mode fallback)
# ------------------------------------------------------------------------------
# REMOTE_API_URL: URL of the remote REST API server
# Only used if this instance needs to connect to a remote server
# Format: http://IP_ADDRESS:PORT or https://DOMAIN:PORT
# Example: https://192.168.10.50:8080
REMOTE_API_URL=https://192.168.10.50:8080
# REMOTE_API_KEY: API key for authenticating with the remote server
# Must match the LOCAL_API_KEY on the remote server
# Generate a secure random string (recommended: base64 encoded, 32+ characters)
# Example: openssl rand -base64 32
REMOTE_API_KEY=<YOURAPIKEY>
# ------------------------------------------------------------------------------
# LOCAL API AUTHENTICATION
# ------------------------------------------------------------------------------
# LOCAL_API_KEY: API key required for all REST API requests to this server
# All clients must include this in the Authorization header: "Authorization: Bearer YOUR_KEY"
# SECURITY: Use a strong, unique key! Generate with: openssl rand -base64 32
# This key protects your server from unauthorized access
LOCAL_API_KEY=<YOURAPIKEY>
# ------------------------------------------------------------------------------
# BLOCKED DOMAIN PROTECTION
# ------------------------------------------------------------------------------
# BLOCKED_DOMAIN_KEYWORD: Secret keyword required to remove blocked domain patterns
# This prevents accidental or unauthorized removal of domain blocks
# IMPORTANT: Keep this secret! Only share with trusted administrators
# NOTE: Changing this requires recreating the container: docker compose down && docker compose up -d
# Generate a secure keyword: openssl rand -base64 16
BLOCKED_DOMAIN_KEYWORD=<YourKeyword>
# ------------------------------------------------------------------------------
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# DB_PATH: Path to the SQLite database file inside the container
# The database stores crawled content, embeddings, and metadata
# This path is inside the container; the actual file is in the mounted volume
# Default: /app/data/crawl4ai_rag.db
# NOTE: The data directory is mounted from ../../data (see docker-compose.yml)
DB_PATH=/app/data/crawl4ai_rag.db
# ------------------------------------------------------------------------------
# CRAWL4AI SERVICE
# ------------------------------------------------------------------------------
# CRAWL4AI_URL: URL of the Crawl4AI service for web content extraction
# Uses the internal Docker network name 'crawl4ai' (defined in docker-compose)
# Default: http://crawl4ai:11235
# NOTE: This is the Docker service name, not a public URL
CRAWL4AI_URL=http://crawl4ai:11235
# ------------------------------------------------------------------------------
# SECURITY SETTINGS
# ------------------------------------------------------------------------------
# ENABLE_CORS: Enable Cross-Origin Resource Sharing for API requests
# Set to true to allow web applications from different domains to access the API
# Set to false for stricter security if only server-to-server communication is needed
# Options: true | false
# Default: true
ENABLE_CORS=true
# MAX_REQUEST_SIZE: Maximum size of incoming requests in bytes
# Protects against large malicious payloads
# Default: 10485760 (10 MB)
# Format: bytes (1 MB = 1048576 bytes)
MAX_REQUEST_SIZE=10485760
# RATE_LIMIT_PER_MINUTE: Maximum API requests per minute per API key
# Protects against abuse and denial-of-service attacks
# Set to 0 to disable rate limiting (not recommended for production)
# Default: 60 requests/minute
RATE_LIMIT_PER_MINUTE=60
# ------------------------------------------------------------------------------
# LOGGING CONFIGURATION
# ------------------------------------------------------------------------------
# LOG_LEVEL: Verbosity of application logs
# Options: DEBUG | INFO | WARNING | ERROR | CRITICAL
# DEBUG: Detailed information for debugging (verbose)
# INFO: General informational messages (recommended for production)
# WARNING: Warning messages only
# ERROR: Error messages only
# Default: INFO
LOG_LEVEL=INFO
# LOG_FILE: Path to the application log file inside the container
# Logs are written to this file for persistence and debugging
# The data directory is mounted, so logs persist across container restarts
# Default: /app/data/crawl4ai_api.log
LOG_FILE=/app/data/crawl4ai_api.log
# ==============================================================================
# ADDITIONAL NOTES
# ==============================================================================
#
# 1. DOCKER VOLUMES:
# The ../../data directory is mounted to /app/data in the container
# This ensures database and logs persist across container restarts
#
# 2. DOCKER NETWORKS:
# The server connects to the 'crawler_default' external network
# This allows communication with the Crawl4AI service
# Ensure the network exists: docker network create crawler_default
#
# 3. PORTS:
# - 8080: REST API (mapped to host)
# - 3000: MCP Server (mapped to host)
# - 11235: Crawl4AI service (internal Docker network only)
#
# 4. SECURITY BEST PRACTICES:
# - Always use strong, unique API keys (32+ characters)
# - Keep BLOCKED_DOMAIN_KEYWORD secret
# - Enable rate limiting in production
# - Use HTTPS in production (configure reverse proxy)
# - Regularly backup the database (../../data/crawl4ai_rag.db)
#
# 5. CHANGING ENVIRONMENT VARIABLES:
# Most variables can be changed by editing .env and recreating the container:
# docker compose down && docker compose up -d
#
# ==============================================================================