.brum.example.toml•6.25 kB
# Brummer Example Configuration File
# Copy this to .brum.toml in your project or home directory
# Package manager preferences
# preferred_package_manager = "pnpm"
# MCP (Model Context Protocol) Settings
# mcp_port = 7777
# no_mcp = false
# Network robustness (experimental)
# use_robust_networking = false
# Proxy Server Settings
# proxy_port = 19888
# proxy_mode = "reverse"
# proxy_url = ""
# standard_proxy = false
# no_proxy = false
# AI Coder Configuration
#
# Variable Replacement in CLI Arguments:
# You can use ${VAR_NAME} syntax in base_args to inject dynamic values.
# Brummer automatically provides these variables:
# - ${BRUMMER_MCP_URL}: The full MCP server URL (e.g., http://localhost:7777/mcp)
# - ${BRUMMER_MCP_PORT}: Just the port number (e.g., 7777)
# - Any environment variable from your system
#
[ai_coders]
enabled = true
max_concurrent = 3
workspace_base_dir = "~/.brummer/ai-coders"
default_provider = "claude" # Available: claude, opencode, gemini, terminal, local, mock
timeout_minutes = 30
auto_cleanup = true
cleanup_after_hours = 24
# Claude Provider Configuration
[ai_coders.providers.claude]
api_key_env = "ANTHROPIC_API_KEY"
model = "claude-3-5-sonnet-20241022"
max_tokens = 4096
temperature = 0.7
request_timeout_seconds = 30
[ai_coders.providers.claude.rate_limit]
requests_per_minute = 50
tokens_per_minute = 150000
# OpenCode CLI Tool Configuration
[ai_coders.providers.opencode]
[ai_coders.providers.opencode.cli_tool]
command = "opencode"
base_args = ["run"] # Use 'run' subcommand
[ai_coders.providers.opencode.cli_tool.flag_mapping]
model = "--model"
prompt = "--prompt"
debug = "--log-level"
[ai_coders.providers.opencode.cli_tool.environment]
# OpenCode uses API keys from its own config
# Gemini Provider Configuration
[ai_coders.providers.gemini]
api_key_env = "GEMINI_API_KEY"
model = "gemini-1.5-pro"
max_tokens = 8192
temperature = 0.7
request_timeout_seconds = 30
[ai_coders.providers.gemini.rate_limit]
requests_per_minute = 15
tokens_per_minute = 1000000
# Terminal Provider Configuration (Local Shell Execution)
[ai_coders.providers.terminal]
model = "bash" # Available: bash, sh, zsh, fish, python, node, ruby
max_tokens = 1000000
temperature = 0.0
request_timeout_seconds = 300
[ai_coders.providers.terminal.rate_limit]
requests_per_minute = 30
tokens_per_minute = 1000000
# Local Model Provider (e.g., Ollama)
[ai_coders.providers.local]
base_url = "http://localhost:11434"
model = "codellama"
max_tokens = 2048
temperature = 0.7
request_timeout_seconds = 60
# Aider CLI Tool Configuration
[ai_coders.providers.aider]
[ai_coders.providers.aider.cli_tool]
command = "aider"
base_args = ["--yes"] # Auto-confirm changes
working_dir = "."
[ai_coders.providers.aider.cli_tool.flag_mapping]
model = "--model"
message = "--message"
[ai_coders.providers.aider.cli_tool.environment]
# Add any environment variables Aider needs
# OPENAI_API_KEY = "your-key-here" # Will use existing env var
# Claude CLI Tool Configuration (multiple configs possible)
# Variable Replacement: Use ${VAR_NAME} in base_args for dynamic values
# BRUMMER_MCP_URL and BRUMMER_MCP_PORT are automatically set by Brummer
[ai_coders.providers.claude]
[ai_coders.providers.claude.cli_tool]
command = "claude"
# The ${BRUMMER_MCP_URL} will be replaced with the actual MCP server URL at runtime
base_args = ["--mcp-config", "{\"mcpServers\":{\"brummer\":{\"type\":\"http\",\"url\":\"${BRUMMER_MCP_URL}\"}}}", "--dangerously-skip-permissions"] # For rapid development
model = "sonnet" # Default model
[ai_coders.providers.claude.cli_tool.flag_mapping]
model = "--model"
output_format = "--output-format"
debug = "--debug"
[ai_coders.providers.claude.cli_tool.environment]
# BRUMMER_MCP_URL and BRUMMER_MCP_PORT are automatically set by Brummer
# Claude Secure (without dangerous permissions)
[ai_coders.providers.claude-secure]
[ai_coders.providers.claude-secure.cli_tool]
command = "claude"
base_args = ["--mcp-config", "{\"mcpServers\":{\"brummer\":{\"type\":\"http\",\"url\":\"${BRUMMER_MCP_URL}\"}}}"] # No dangerous permissions - will prompt for each action
model = "sonnet" # Default model
[ai_coders.providers.claude-secure.cli_tool.flag_mapping]
model = "--model"
output_format = "--output-format"
debug = "--debug"
# Claude with Opus model
[ai_coders.providers.opus]
[ai_coders.providers.opus.cli_tool]
command = "claude"
base_args = ["--mcp-config", "{\"mcpServers\":{\"brummer\":{\"type\":\"http\",\"url\":\"${BRUMMER_MCP_URL}\"}}}", "--model", "opus", "--dangerously-skip-permissions"]
model = "opus"
[ai_coders.providers.opus.cli_tool.flag_mapping]
model = "--model"
output_format = "--output-format"
debug = "--debug"
# Claude with Sonnet model
[ai_coders.providers.sonnet]
[ai_coders.providers.sonnet.cli_tool]
command = "claude"
base_args = ["--mcp-config", "{\"mcpServers\":{\"brummer\":{\"type\":\"http\",\"url\":\"${BRUMMER_MCP_URL}\"}}}", "--model", "sonnet", "--dangerously-skip-permissions"]
model = "sonnet"
[ai_coders.providers.sonnet.cli_tool.flag_mapping]
model = "--model"
output_format = "--output-format"
debug = "--debug"
# Gemini CLI Tool Configuration (if available)
[ai_coders.providers.gemini-cli]
[ai_coders.providers.gemini-cli.cli_tool]
command = "gemini"
base_args = []
[ai_coders.providers.gemini-cli.cli_tool.flag_mapping]
model = "--model"
prompt = "--prompt"
debug = "--debug"
[ai_coders.providers.gemini-cli.cli_tool.environment]
# Gemini uses API keys from its own config
# Mock Provider for Testing
[ai_coders.providers.mock]
model = "mock-model"
max_tokens = 1000
temperature = 0.5
request_timeout_seconds = 5
# Resource Limits
[ai_coders.resource_limits]
max_memory_mb = 512
max_disk_space_mb = 1024
max_cpu_percent = 50
max_processes = 5
max_files_per_coder = 100
# Workspace Settings
[ai_coders.workspace]
template = "basic"
gitignore_rules = ["node_modules/", ".env", "*.log", "dist/", "build/"]
allowed_extensions = [".go", ".js", ".ts", ".py", ".md", ".json", ".yaml", ".toml", ".txt", ".sh"]
forbidden_paths = ["/etc", "/var", "/sys", "/proc"]
max_file_size_mb = 10
backup_enabled = true
# AI Coder Logging
[ai_coders.logging]
level = "info"
output_file = "ai-coders.log"
rotate_size_mb = 50
keep_rotations = 5
include_ai_output = false