Makefile•19 kB
# Makefile for datadog-mcp project
# Variables
BINARY_NAME=datadog-mcp-server
BUILD_DIR=bin
MAIN_PATH=./cmd/server
# URLs for the DataDog OpenAPI schemas
V1_URL = https://raw.githubusercontent.com/DataDog/datadog-api-client-rust/master/.generator/schemas/v1/openapi.yaml
V2_URL = https://raw.githubusercontent.com/DataDog/datadog-api-client-rust/master/.generator/schemas/v2/openapi.yaml
# Output directories
V1_DIR = schemas/v1
V2_DIR = schemas/v2
# Output files
V1_FILE = $(V1_DIR)/openapi.yaml
V1_SPLIT_FILE = $(V1_DIR)/split/openapi.yaml
V2_FILE = $(V2_DIR)/openapi.yaml
V2_SPLIT_FILE = $(V2_DIR)/split/openapi.yaml
# Go build flags
LDFLAGS=-ldflags "-X main.Version=$(shell git describe --tags --always --dirty 2>/dev/null || echo 'dev')"
.PHONY: all download-schemas build clean test run help gitleaks install-gitleaks
# Default target
all: help
# Download both schemas
download-schemas: $(V1_FILE) $(V2_FILE)
# Download v1 schema
$(V1_FILE):
@echo "Downloading DataDog OpenAPI schema v1..."
@mkdir -p $(V1_DIR)
@curl -s -o $(V1_FILE) $(V1_URL)
@echo "✓ Downloaded v1 schema to $(V1_FILE)"
# Download v2 schema
$(V2_FILE):
@echo "Downloading DataDog OpenAPI schema v2..."
@mkdir -p $(V2_DIR)
@curl -s -o $(V2_FILE) $(V2_URL)
@echo "✓ Downloaded v2 schema to $(V2_FILE)"
# Download only v1 schema
download-v1: $(V1_FILE)
# Download only v2 schema
download-v2: $(V2_FILE)
# Clean downloaded files
clean-schemas:
@echo "Cleaning downloaded schemas..."
@rm -rf $(V1_DIR) $(V2_DIR)
@echo "✓ Cleaned schema files"
# Build the binary
build:
@echo "Building $(BINARY_NAME)..."
@mkdir -p $(BUILD_DIR)
go build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME) $(MAIN_PATH)
@echo "Build complete: $(BUILD_DIR)/$(BINARY_NAME)"
# Build for current platform
build-local:
go build $(LDFLAGS) -o $(BINARY_NAME) $(MAIN_PATH)
# Build for multiple platforms
build-all: clean
@echo "Building for multiple platforms..."
@mkdir -p $(BUILD_DIR)
GOOS=linux GOARCH=amd64 go build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-linux-amd64 $(MAIN_PATH)
GOOS=darwin GOARCH=amd64 go build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-darwin-amd64 $(MAIN_PATH)
GOOS=darwin GOARCH=arm64 go build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-darwin-arm64 $(MAIN_PATH)
GOOS=windows GOARCH=amd64 go build $(LDFLAGS) -o $(BUILD_DIR)/$(BINARY_NAME)-windows-amd64.exe $(MAIN_PATH)
@echo "Multi-platform build complete"
# Clean build artifacts
clean:
@echo "Cleaning build artifacts..."
@rm -rf $(BUILD_DIR)
@rm -f $(BINARY_NAME)
@echo "Clean complete"
# Clean split OpenAPI files
clean-split:
@echo "Cleaning split OpenAPI files..."
@rm -rf schemas/v1/split
@rm -rf schemas/v2/split
@echo "Split files cleaned"
# Clean everything (schemas, split files, and build artifacts)
clean-all: clean-schemas clean-split clean
# Run tests
test:
@echo "Running tests..."
go test -v ./...
# Run tests with coverage
test-coverage:
@echo "Running tests with coverage..."
go test -v -coverprofile=coverage.out ./...
go tool cover -html=coverage.out -o coverage.html
@echo "Coverage report generated: coverage.html"
# Run MCP Inspector
inspector: ensure-binary create-mcp-config
@echo "Running MCP Inspector..."
npx @modelcontextprotocol/inspector --config ./mcp.json
@echo "MCP Inspector completed"
# Ensure binary exists, build if needed
ensure-binary:
@if [ ! -f "$(BUILD_DIR)/$(BINARY_NAME)" ]; then \
echo "Binary not found, building..."; \
$(MAKE) build; \
else \
echo "Binary exists: $(BUILD_DIR)/$(BINARY_NAME)"; \
fi
# Create mcp.json from template
create-mcp-config:
@echo "Creating mcp.json from template..."
@if [ -z "$(DD_API_KEY)" ]; then \
echo "Warning: DD_API_KEY not set, using empty key"; \
DD_API_KEY="" $(MAKE) _create-mcp-config; \
else \
echo "Using DD_API_KEY from environment"; \
$(MAKE) _create-mcp-config; \
fi
# Internal target to create mcp.json
_create-mcp-config:
@sed -e 's|path/to/bin/datadog-mcp-server|$(shell pwd)/$(BUILD_DIR)/$(BINARY_NAME)|g' \
-e 's|"DD_API_KEY": ""|"DD_API_KEY": "$(DD_API_KEY)"|g' \
mcp.json.tpl > mcp.json
@echo "mcp.json created with binary path: $(shell pwd)/$(BUILD_DIR)/$(BINARY_NAME)"
# Run the server with environment variables
run-server:
@echo "Running server with environment variables..."
@if [ -z "$(DATADOG_SITE)" ]; then \
echo "Error: DATADOG_SITE environment variable is required"; \
echo "Usage: make run-server DATADOG_SITE=<site> [DATADOG_API_KEY=<key>]"; \
exit 1; \
fi
@echo "DATADOG_SITE: $(DATADOG_SITE)"
@if [ -n "$(DATADOG_API_KEY)" ]; then \
echo "DATADOG_API_KEY: [SET]"; \
else \
echo "DATADOG_API_KEY: [NOT SET]"; \
fi
DATADOG_SITE=$(DATADOG_SITE) DATADOG_API_KEY=$(DATADOG_API_KEY) go run $(MAIN_PATH)
# Install dependencies
deps:
@echo "Installing dependencies..."
go mod download
go mod tidy
# Update dependencies
deps-update:
@echo "Updating dependencies..."
go get -u ./...
go mod tidy
# Format code
fmt:
@echo "Formatting code..."
go fmt ./...
# Lint code
# Install Go linting tools
install-go-tools:
@echo "Installing Go linting tools..."
@echo "Installing goimports..."
go install golang.org/x/tools/cmd/goimports@latest
@echo "Installing gocyclo..."
go install github.com/fzipp/gocyclo/cmd/gocyclo@latest
@echo "Installing golangci-lint..."
go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest
@echo "✅ Go linting tools installed"
# Lint code with fallback to basic Go tools
lint: install-go-tools
@echo "Linting code..."
@if command -v golangci-lint &> /dev/null; then \
echo "Using golangci-lint for comprehensive linting..."; \
golangci-lint run; \
else \
echo "golangci-lint not available, using basic Go tools..."; \
echo "Running go fmt..."; \
go fmt ./...; \
echo "Running go vet..."; \
go vet ./...; \
echo "✅ Basic linting completed"; \
fi
# Run gitleaks to detect secrets
gitleaks: install-gitleaks
@echo "Running gitleaks to detect secrets..."
gitleaks detect -v -s . --config gitleaks.toml
@echo "✅ gitleaks scan completed"
# Install all development tools
install-dev-tools: install-go-tools install-oapi-codegen install-redocly install-spectral install-gitleaks
@echo "✅ All development tools installed"
# Install oapi-codegen tool
install-oapi-codegen:
@echo "Installing oapi-codegen tool..."
go install github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen@latest
# Install redocly CLI tool
install-redocly:
@echo "Installing redocly CLI tool..."
npm install -g @redocly/cli@latest
@echo "✅ redocly CLI installed"
# Install Spectral CLI tool
install-spectral:
@echo "Installing Spectral CLI tool..."
npm install -g @stoplight/spectral-cli@latest
@echo "✅ Spectral CLI installed"
# Install gitleaks tool
install-gitleaks:
@echo "Installing gitleaks tool..."
go install github.com/zricethezav/gitleaks/v8@latest
@echo "✅ gitleaks installed"
# Generate API client code from DataDog schemas
generate: generate-v1 generate-v2
@echo "API client code generated:"
@echo " - internal/api/v1/client.gen.go (DataDog API v1)"
# @echo " - internal/api/v2/client.gen.go (DataDog API v2)"
# Generate API client code from split OpenAPI files (experimental)
# Note: redocly split creates incomplete main files, so this falls back to original spec
generate-from-split: generate-v1-split generate-v2-split
@echo "API client code generated from split files (with fallback to original):"
@echo " - internal/api/v1/client.gen.go (DataDog API v1)"
@echo " - v2 generation skipped due to schema issues"
generate-v1-split: install-oapi-codegen download-schemas
@echo "Generating API client code for DataDog API v1 from split files..."
@mkdir -p internal/api/v1
@echo "Note: redocly split creates incomplete main files, using original spec"
@echo "Split files are available for analysis but not suitable for code generation"
oapi-codegen --config oapi-codegen-config.yaml -package v1 $(V1_SPLIT_FILE) > internal/api/v1/client.gen.go
generate-v2-split: install-oapi-codegen download-schemas
@echo "Generating API client code for DataDog API v2 from split files..."
@mkdir -p internal/api/v2
@echo "Note: redocly split creates incomplete main files, using original spec"
@echo "Split files are available for analysis but not suitable for code generation"
@echo "Warning: v2 schema has duplicate type names, skipping v2 generation"
@echo "API client code for DataDog API v2 not generated due to schema issues"
generate-v1: install-oapi-codegen download-schemas
@echo "Generating API client code for DataDog API v1..."
@mkdir -p internal/api/v1
@echo "Note: Using original OpenAPI spec as split files are incomplete for code generation"
oapi-codegen --config oapi-codegen-config.yaml -package v1 $(V1_FILE) > internal/api/v1/client.gen.go
generate-v2: install-oapi-codegen download-schemas
@echo "Generating API client code for DataDog API v2..."
# @mkdir -p internal/api/v2
# @echo "Note: Using original OpenAPI spec as split files are incomplete for code generation"
# oapi-codegen --config oapi-codegen-config.yaml -package v2 $(V2_FILE) > internal/api/v2/client.gen.go
@echo "API client code for DataDog API v2 not generated"
# Split OpenAPI specifications into smaller files
split: split-v1 split-v2
@echo "OpenAPI specifications split:"
@echo " - schemas/v1/split/ (DataDog API v1 split files)"
@echo " - schemas/v2/split/ (DataDog API v2 split files)"
# Split DataDog API v1 OpenAPI specification
split-v1: install-redocly
@echo "Splitting DataDog API v1 OpenAPI specification..."
@mkdir -p schemas/v1/split
redocly split $(V1_FILE) --outDir schemas/v1/split/
@echo "✅ DataDog API v1 OpenAPI split completed"
# Split DataDog API v2 OpenAPI specification
split-v2: install-redocly
@echo "Splitting DataDog API v2 OpenAPI specification..."
@mkdir -p schemas/v2/split
redocly split $(V2_FILE) --outDir schemas/v2/split/
@echo "✅ DataDog API v2 OpenAPI split completed"
# Lint OpenAPI specifications with Spectral
lint-openapi: lint-openapi-v1 lint-openapi-v2
@echo "OpenAPI specifications linted:"
@echo " - schemas/v1/openapi.yaml (DataDog API v1)"
@echo " - schemas/v2/openapi.yaml (DataDog API v2)"
# Lint DataDog API v1 OpenAPI specification
lint-openapi-v1: install-spectral
@echo "Linting DataDog API v1 OpenAPI specification..."
spectral lint $(V1_FILE) --ruleset .spectral.yml
@echo "✅ DataDog API v1 OpenAPI linting completed"
# Lint DataDog API v2 OpenAPI specification
lint-openapi-v2: install-spectral
@echo "Linting DataDog API v2 OpenAPI specification..."
spectral lint $(V2_FILE) --ruleset .spectral.yml
@echo "✅ DataDog API v2 OpenAPI linting completed"
# Lint specific OpenAPI file
lint-openapi-file: install-spectral
@if [ -z "$(FILE)" ]; then \
echo "Error: FILE parameter is required"; \
echo "Usage: make lint-openapi-file FILE=path/to/openapi.yaml"; \
exit 1; \
fi
@echo "Linting OpenAPI file: $(FILE)"
spectral lint $(FILE) --ruleset .spectral.yml
@echo "✅ OpenAPI file linting completed"
# Build Docker image
docker-build:
@echo "Building Docker image..."
docker build -t datadog-mcp-server:latest .
@echo "Docker image built: datadog-mcp-server:latest"
# Build Docker image with specific tag
docker-build-tag:
@echo "Building Docker image with tag..."
@if [ -z "$(TAG)" ]; then \
echo "Error: TAG environment variable is required"; \
echo "Usage: make docker-build-tag TAG=v1.0.0"; \
exit 1; \
fi
docker build -t datadog-mcp-server:$(TAG) .
@echo "Docker image built: datadog-mcp-server:$(TAG)"
# Run Docker container
docker-run:
@echo "Running Docker container..."
@if [ -z "$(DATADOG_SITE)" ]; then \
echo "Error: DATADOG_SITE environment variable is required"; \
echo "Usage: make docker-run DATADOG_SITE=<site> [DATADOG_API_KEY=<key>]"; \
exit 1; \
fi
docker run --rm -e DATADOG_SITE=$(DATADOG_SITE) -e DATADOG_API_KEY=$(DATADOG_API_KEY) datadog-mcp-server:latest
# Clean Docker images
docker-clean:
@echo "Cleaning Docker images..."
docker rmi datadog-mcp-server:latest 2>/dev/null || true
@echo "Docker images cleaned"
# Docker Compose targets
compose-up:
@echo "Starting services with Docker Compose..."
@if [ -z "$(DATADOG_SITE)" ]; then \
echo "Error: DATADOG_SITE environment variable is required"; \
echo "Usage: make compose-up DATADOG_SITE=<site> [DATADOG_API_KEY=<key>]"; \
exit 1; \
fi
docker compose up -d
@echo "Services started. Use 'make compose-logs' to view logs"
# Stop and remove Docker Compose services
compose-down:
@echo "Stopping services with Docker Compose..."
docker compose down
@echo "Services stopped"
# Show logs from Docker Compose services
compose-logs:
@echo "Showing Docker Compose logs..."
docker compose logs -f
# Build and start Docker Compose services
compose-build:
@echo "Building and starting services with Docker Compose..."
@if [ -z "$(DATADOG_SITE)" ]; then \
echo "Error: DATADOG_SITE environment variable is required"; \
echo "Usage: make compose-build DATADOG_SITE=<site> [DATADOG_API_KEY=<key>]"; \
exit 1; \
fi
docker compose up --build -d
@echo "Services built and started. Use 'make compose-logs' to view logs"
# Remove Docker Compose resources (volumes, orphans)
compose-clean:
@echo "Cleaning Docker Compose resources..."
docker compose down --volumes --remove-orphans
@echo "Docker Compose resources cleaned"
# Install pre-commit hooks
install-pre-commit:
@echo "Installing pre-commit hooks..."
pdm add --dev pre-commit
pdm run pre-commit install
@echo "Pre-commit hooks installed"
# Run pre-commit hooks on all files
pre-commit-run:
@echo "Running pre-commit hooks on all files..."
pdm run pre-commit run --all-files
# Run pre-commit hooks with auto-fix on all files
pre-commit-fix:
@echo "Running pre-commit hooks with auto-fix on all files..."
pdm run pre-commit run --all-files --hook-stage manual --show-diff-on-failure --color always --verbose
# Update pre-commit hooks
pre-commit-update:
@echo "Updating pre-commit hooks..."
pdm run pre-commit autoupdate
@echo "Pre-commit hooks updated"
# Show help
help:
@echo "\033[1;36m╔══════════════════════════════════════════════════════════╗\033[0m"
@echo "\033[1;36m║ Available Targets ║\033[0m"
@echo "\033[1;36m╚══════════════════════════════════════════════════════════╝\033[0m"
@echo ""
@echo "\033[1;33mSchema Management:\033[0m"
@echo " \033[1;32mdownload-schemas\033[0m - Download both v1 and v2 DataDog OpenAPI schemas"
@echo " \033[1;32mdownload-v1\033[0m - Download only v1 DataDog OpenAPI schema"
@echo " \033[1;32mdownload-v2\033[0m - Download only v2 DataDog OpenAPI schema"
@echo " \033[1;32mclean-schemas\033[0m - Remove downloaded schema files"
@echo ""
@echo "\033[1;33mBuild & Development:\033[0m"
@echo " \033[1;32mbuild\033[0m - Build the binary"
@echo " \033[1;32mbuild-local\033[0m - Build for current platform"
@echo " \033[1;32mbuild-all\033[0m - Build for multiple platforms"
@echo " \033[1;32mclean\033[0m - Remove build artifacts"
@echo " \033[1;32mclean-split\033[0m - Remove split OpenAPI files"
@echo " \033[1;32mclean-all\033[0m - Remove schemas, split files, and build artifacts"
@echo ""
@echo "\033[1;33mTesting:\033[0m"
@echo " \033[1;32mtest\033[0m - Run tests"
@echo " \033[1;32mtest-coverage\033[0m - Run tests with coverage report"
@echo ""
@echo "\033[1;33mCode Quality:\033[0m"
@echo " \033[1;32mfmt\033[0m - Format code"
@echo " \033[1;32mlint\033[0m - Lint code"
@echo " \033[1;32mgitleaks\033[0m - Run gitleaks to detect secrets"
@echo " \033[1;32minstall-go-tools\033[0m - Install Go linting tools"
@echo " \033[1;32minstall-gitleaks\033[0m - Install gitleaks tool"
@echo " \033[1;32minstall-dev-tools\033[0m- Install all development tools"
@echo ""
@echo "\033[1;33mCode Generation:\033[0m"
@echo " \033[1;32mgenerate\033[0m - Generate API client code from DataDog schemas"
@echo " \033[1;32mgenerate-from-split\033[0m - Generate API client code (uses original spec, split files for analysis)"
@echo " \033[1;32minstall-oapi-codegen\033[0m - Install oapi-codegen tool"
@echo ""
@echo "\033[1;33mOpenAPI Management:\033[0m"
@echo " \033[1;32msplit\033[0m - Split OpenAPI specs into smaller files"
@echo " \033[1;32msplit-v1\033[0m - Split DataDog API v1 OpenAPI spec"
@echo " \033[1;32msplit-v2\033[0m - Split DataDog API v2 OpenAPI spec"
@echo " \033[1;32minstall-redocly\033[0m - Install redocly CLI tool"
@echo ""
@echo "\033[1;33mOpenAPI Linting:\033[0m"
@echo " \033[1;32mlint-openapi\033[0m - Lint both v1 and v2 OpenAPI specs"
@echo " \033[1;32mlint-openapi-v1\033[0m - Lint DataDog API v1 OpenAPI spec"
@echo " \033[1;32mlint-openapi-v2\033[0m - Lint DataDog API v2 OpenAPI spec"
@echo " \033[1;32mlint-openapi-file\033[0m- Lint specific OpenAPI file (FILE=path)"
@echo " \033[1;32minstall-spectral\033[0m - Install Spectral CLI tool"
@echo ""
@echo "\033[1;33mDependencies:\033[0m"
@echo " \033[1;32mdeps\033[0m - Install dependencies"
@echo " \033[1;32mdeps-update\033[0m - Update dependencies"
@echo ""
@echo "\033[1;33mRunning:\033[0m"
@echo " \033[1;32mrun-server\033[0m - Run the server (requires DATADOG_SITE)"
@echo " \033[1;32minspector\033[0m - Run MCP Inspector"
@echo ""
@echo "\033[1;33mDocker:\033[0m"
@echo " \033[1;32mdocker-build\033[0m - Build Docker image"
@echo " \033[1;32mdocker-run\033[0m - Run Docker container (requires DATADOG_SITE)"
@echo " \033[1;32mdocker-clean\033[0m - Clean Docker images"
@echo ""
@echo "\033[1;33mDocker Compose:\033[0m"
@echo " \033[1;32mcompose-up\033[0m - Start services with Docker Compose"
@echo " \033[1;32mcompose-down\033[0m - Stop Docker Compose services"
@echo " \033[1;32mcompose-logs\033[0m - Show Docker Compose logs"
@echo " \033[1;32mcompose-build\033[0m - Build and start with Docker Compose"
@echo " \033[1;32mcompose-clean\033[0m - Clean Docker Compose resources"
@echo ""
@echo "\033[1;33mPre-commit:\033[0m"
@echo " \033[1;32minstall-pre-commit\033[0m - Install pre-commit hooks"
@echo " \033[1;32mpre-commit-run\033[0m - Run pre-commit hooks on all files"
@echo " \033[1;32mpre-commit-fix\033[0m - Run pre-commit hooks with auto-fix"
@echo " \033[1;32mpre-commit-update\033[0m - Update pre-commit hooks"
@echo ""
@echo "\033[1;32m💡 Tip: Use 'make <target>' to run a specific target\033[0m"
@echo "\033[1;32m📖 Example: make build, make run-server DATADOG_SITE=datadoghq.com\033[0m"