Skip to main content
Glama

CodeGraph CLI MCP Server

by Jakedismo
setup-build-optimization.sh4.36 kB
#!/bin/bash # ABOUTME: Sets up build optimization tools and configuration for CodeGraph # ABOUTME: Installs sccache, configures Cargo, and verifies the setup set -euo pipefail info() { printf '\033[1;36m[INFO]\033[0m %s\n' "$1"; } success() { printf '\033[1;32m[SUCCESS]\033[0m %s\n' "$1"; } warn() { printf '\033[1;33m[WARN]\033[0m %s\n' "$1"; } fail() { printf '\033[1;31m[ERROR]\033[0m %s\n' "$1"; exit 1; } info "Setting up build optimization for CodeGraph..." # Check if running on macOS if [[ "${OSTYPE:-}" != darwin* ]]; then warn "This script is optimized for macOS. Adjust paths for other platforms." fi # 1. Install sccache if not already installed info "Checking for sccache..." if command -v sccache >/dev/null 2>&1; then success "sccache is already installed: $(sccache --version)" else info "Installing sccache via cargo..." cargo install sccache success "sccache installed successfully" fi # 2. Get the sccache path SCCACHE_PATH=$(which sccache) info "sccache path: $SCCACHE_PATH" # 3. Create or update ~/.cargo/config.toml CARGO_CONFIG="$HOME/.cargo/config.toml" info "Configuring Cargo build settings..." # Backup existing config if it exists if [[ -f "$CARGO_CONFIG" ]]; then cp "$CARGO_CONFIG" "$CARGO_CONFIG.backup" info "Backed up existing config to $CARGO_CONFIG.backup" fi # Detect CPU cores (macOS) CPU_CORES=$(sysctl -n hw.ncpu 2>/dev/null || echo "8") JOBS=$((CPU_CORES - 2)) # Leave 2 cores for system if [[ $JOBS -lt 4 ]]; then JOBS=4 fi info "Detected $CPU_CORES CPU cores, using $JOBS parallel jobs" # Create optimized config cat > "$CARGO_CONFIG" <<EOF # CodeGraph Build Optimization Configuration # Generated by setup-build-optimization.sh [build] rustc-wrapper = "$SCCACHE_PATH" jobs = $JOBS pipelining = true [net] git-fetch-with-cli = true [profile.dev] split-debuginfo = "unpacked" # Faster linking on macOS [term] color = "always" verbose = false EOF success "Created optimized Cargo config at $CARGO_CONFIG" # 4. Set sccache environment variables info "Configuring sccache cache size..." export SCCACHE_CACHE_SIZE="10G" export SCCACHE_DIR="$HOME/.cache/sccache" # Add to shell profile SHELL_PROFILE="" if [[ -f "$HOME/.zshrc" ]]; then SHELL_PROFILE="$HOME/.zshrc" elif [[ -f "$HOME/.bashrc" ]]; then SHELL_PROFILE="$HOME/.bashrc" fi if [[ -n "$SHELL_PROFILE" ]]; then if ! grep -q "SCCACHE_CACHE_SIZE" "$SHELL_PROFILE" 2>/dev/null; then cat >> "$SHELL_PROFILE" <<EOF # sccache configuration (added by setup-build-optimization.sh) export SCCACHE_CACHE_SIZE="10G" export SCCACHE_DIR="\$HOME/.cache/sccache" EOF success "Added sccache config to $SHELL_PROFILE" else info "sccache config already present in $SHELL_PROFILE" fi fi # 5. Show sccache stats info "Current sccache statistics:" sccache --show-stats || warn "sccache stats not available (first run)" # 6. Summary cat <<EOF ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ ✅ Build Optimization Setup Complete! ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 📊 Configuration: • sccache: $SCCACHE_PATH • Cache size: 10GB • Cache dir: $HOME/.cache/sccache • Parallel jobs: $JOBS • Cargo config: $CARGO_CONFIG 🚀 Next Steps: 1. Restart your shell or run: source $SHELL_PROFILE 2. Run a build: cargo build 3. Check cache stats: sccache --show-stats 📈 Expected Performance Improvements: • Clean builds: 40-50% faster (after first build caches) • Incremental builds: 50-70% faster • Rebuild unchanged: 90% faster (fully cached) 💡 Tips: • Use 'cargo build --profile fast-dev' for fastest iteration • Clear cache if needed: sccache --stop-server && rm -rf ~/.cache/sccache • Monitor cache: sccache --show-stats ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ EOF success "Setup complete! Enjoy faster builds! 🎉"

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Jakedismo/codegraph-rust'

If you have feedback or need assistance with the MCP directory API, please join our Discord server