name: CI/CD Pipeline
on:
push:
branches: [main, develop]
paths:
- 'simplenote_mcp/**'
- 'tests/**'
- 'scripts/**'
- 'pyproject.toml'
- 'requirements*.txt'
- '.github/workflows/ci.yml'
pull_request:
branches: [main, develop]
paths:
- 'simplenote_mcp/**'
- 'tests/**'
- 'scripts/**'
- 'pyproject.toml'
- 'requirements*.txt'
- '.github/workflows/ci.yml'
workflow_dispatch:
inputs:
debug_mode:
description: 'Enable debug mode with verbose output'
required: false
default: false
type: boolean
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
PYTHON_VERSION: "3.12"
FORCE_COLOR: "1"
PYTHONUNBUFFERED: "1"
PIP_TIMEOUT: "60"
PIP_RETRIES: "3"
PIP_DEFAULT_TIMEOUT: "60"
jobs:
diagnostics:
name: Environment Diagnostics
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Run comprehensive diagnostics
run: |
if [ -f .github/scripts/ci-diagnostics.py ]; then
timeout 60 python .github/scripts/ci-diagnostics.py || echo "โ ๏ธ Diagnostics completed with issues"
else
echo "๐ Running offline validation as fallback:"
python scripts/validate-ci-offline.py
fi
- name: Upload diagnostics report
if: always()
uses: actions/upload-artifact@v4
with:
name: ci-diagnostics-report
path: ci-diagnostics-report.json
retention-days: 7
build-and-test:
name: Build and Test
runs-on: ubuntu-latest
timeout-minutes: 15
needs: diagnostics
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12"]
steps:
- name: Checkout code
uses: actions/checkout@v5
with:
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: pip
cache-dependency-path: |
pyproject.toml
requirements*.txt
- name: Cache pre-commit
uses: actions/cache@v4
with:
path: ~/.cache/pre-commit
key: ${{ runner.os }}-precommit-${{ matrix.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
${{ runner.os }}-precommit-${{ matrix.python-version }}-
${{ runner.os }}-precommit-
- name: Cache build artifacts
uses: actions/cache@v4
with:
path: |
build/
dist/
*.egg-info/
.pytest_cache/
htmlcov/
.coverage
.mypy_cache/
.ruff_cache/
key: ${{ runner.os }}-build-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml', 'setup.py', 'setup.cfg') }}
restore-keys: |
${{ runner.os }}-build-${{ matrix.python-version }}-
${{ runner.os }}-build-
- name: Cache installed packages
uses: actions/cache@v4
with:
path: |
~/.local/lib/python${{ matrix.python-version }}/site-packages/
~/.local/bin/
key: ${{ runner.os }}-packages-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml', 'requirements*.txt') }}
restore-keys: |
${{ runner.os }}-packages-${{ matrix.python-version }}-
${{ runner.os }}-packages-
- name: Cache test results and coverage
uses: actions/cache@v4
with:
path: |
.pytest_cache/
.coverage*
htmlcov/
test-results.xml
coverage.xml
key: ${{ runner.os }}-tests-${{ matrix.python-version }}-${{ github.sha }}
restore-keys: |
${{ runner.os }}-tests-${{ matrix.python-version }}-
${{ runner.os }}-tests-
- name: Upgrade pip and install build tools
run: |
python -m pip install --upgrade pip
pip install --upgrade setuptools wheel build
echo "โ
Build tools installed"
- name: Debug Python environment
if: inputs.debug_mode == true || failure()
run: |
echo "๐ Python Environment Debug:"
python --version
pip --version
python -c "import sys; print(f'Python path: {sys.executable}')"
python -c "import sys; print(f'Python version: {sys.version}')"
python -c "import sys; print(f'Platform: {sys.platform}')"
which python
which pip
ls -la
- name: Install package (method 1 - full with retries)
id: install-full
run: |
echo "๐ง Installing package with all dependencies..."
set +e
for attempt in 1 2 3; do
echo "Attempt $attempt of 3..."
if timeout 300 pip install --timeout=60 --retries=3 -e .[dev,test]; then
echo "success=true" >> $GITHUB_OUTPUT
echo "โ
Full installation successful on attempt $attempt"
exit 0
else
echo "โ Attempt $attempt failed"
if [ $attempt -eq 3 ]; then
echo "success=false" >> $GITHUB_OUTPUT
else
sleep 10
fi
fi
done
exit 1
- name: Install package (method 2 - minimal fallback)
if: steps.install-full.outputs.success != 'true'
run: |
echo "๐ง Fallback: Installing minimal package..."
timeout 180 pip install --timeout=60 --retries=3 -e .
timeout 180 pip install --timeout=60 --retries=3 ruff==0.12.9 mypy==1.17.1 pytest==8.4.1 pytest-asyncio==1.1.0
echo "โ
Minimal installation successful"
- name: Verify installation
run: |
echo "๐ Verifying installation..."
python -c "import simplenote_mcp; print(f'โ
Package imported: {simplenote_mcp.__version__}')"
python -c "import simplenote_mcp.server.server; print('โ
Server module imported')"
python -c "from simplenote_mcp.server.server import run_main; print('โ
run_main imported')"
ruff --version
mypy --version
echo "โ
All verifications passed"
- name: Run linting
run: |
echo "๐ Running linting checks..."
ruff check . --output-format=github
echo "โ
Linting passed"
- name: Run formatting check
run: |
echo "๐ Checking code formatting..."
ruff format --check --diff .
echo "โ
Formatting check passed"
- name: Run type checking
run: |
echo "๐ Running type checking..."
mypy simplenote_mcp --config-file=mypy.ini --show-error-codes
echo "โ
Type checking passed"
- name: Run tests
run: |
echo "๐งช Running test suite with coverage (excluding integration tests)..."
start_time=$(date +%s)
# Explicit test execution with failure detection
set +e # Don't exit on failure, capture exit code
python -m pytest -v --tb=short \
--cov=simplenote_mcp \
--cov-report=xml \
--cov-report=term \
--durations=10 \
--ignore=tests/test_title_search_integration.py \
--ignore=tests/test_search_integration.py \
--ignore=tests/test_title_search.py \
-k "not (integration or real_api or network)"
test_exit_code=$?
set -e # Re-enable exit on error
end_time=$(date +%s)
duration=$((end_time - start_time))
echo "โฑ๏ธ Test suite completed in ${duration}s"
echo "test_duration=${duration}" >> $GITHUB_ENV
# Explicit failure handling for pipeline reliability
if [ $test_exit_code -eq 0 ]; then
echo "โ
All tests passed successfully"
exit 0
else
echo "โ Tests failed with exit code: $test_exit_code"
echo "Pipeline will fail to ensure issues are addressed"
exit $test_exit_code
fi
env:
SIMPLENOTE_EMAIL: "test@example.com"
SIMPLENOTE_PASSWORD: "test_password"
# Use offline mode to prevent network calls
SIMPLENOTE_OFFLINE_MODE: "true"
- name: Upload coverage report
if: always()
uses: actions/upload-artifact@v4
with:
name: coverage-${{ matrix.python-version }}
path: coverage.xml
retention-days: 7
- name: Build package
run: |
echo "๐ฆ Building package..."
python -m build
echo "โ
Package built successfully"
ls -la dist/
- name: Upload build artifacts
uses: actions/upload-artifact@v4
with:
name: build-artifacts-${{ matrix.python-version }}
path: |
dist/
build/
*.egg-info/
retention-days: 7
compression-level: 6
- name: Test package installation
run: |
echo "๐ฆ Testing built package..."
pip install dist/*.whl
python -c "import simplenote_mcp; print(f'โ
Built package works: {simplenote_mcp.__version__}')"
echo "โ
Built package test passed"
security-and-quality:
name: Security and Quality Checks
runs-on: ubuntu-latest
timeout-minutes: 10
needs: diagnostics # Run in parallel with build-and-test
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
timeout 300 pip install --timeout=60 --retries=3 -e .[dev,test] || \
timeout 180 pip install --timeout=60 --retries=3 -e . ruff==0.12.9 mypy==1.17.1
- name: Run security checks (package only)
run: |
echo "๐ Running security checks on main package..."
timeout 60 ruff check --select=S simplenote_mcp/ --output-format=github || echo "โ ๏ธ Security scan completed with warnings"
echo "โ
Security scan completed"
- name: Dependency vulnerability scan (pip-audit)
run: |
echo "๐ Running pip-audit with HIGH/CRITICAL severity blocking..."
pip install pip-audit==2.7.3
# Run pip-audit and capture results
set +e
pip-audit -r requirements-lock.txt -f json -o pip-audit-report.json
AUDIT_EXIT_CODE=$?
set -e
# Parse results and determine severity
python - <<'EOF'
import json
import sys
import os
try:
with open('pip-audit-report.json', 'r') as f:
audit_data = json.load(f)
vulnerabilities = audit_data.get('vulnerabilities', [])
critical_count = 0
high_count = 0
medium_count = 0
low_count = 0
print(f"Found {len(vulnerabilities)} vulnerabilities")
for vuln in vulnerabilities:
aliases = vuln.get('aliases', [])
description = vuln.get('description', 'No description')
package = vuln.get('package', 'unknown')
# Determine severity (pip-audit doesn't always provide severity)
# Use heuristics based on CVE data or description
severity = 'UNKNOWN'
if any('critical' in alias.lower() for alias in aliases):
severity = 'CRITICAL'
critical_count += 1
elif any('high' in alias.lower() for alias in aliases):
severity = 'HIGH'
high_count += 1
elif any('medium' in alias.lower() for alias in aliases):
severity = 'MEDIUM'
medium_count += 1
else:
# Default to HIGH for unknown severity to be safe
severity = 'HIGH'
high_count += 1
print(f"- {severity}: {package} - {description[:100]}...")
print(f"\nSeverity Summary:")
print(f" CRITICAL: {critical_count}")
print(f" HIGH: {high_count}")
print(f" MEDIUM: {medium_count}")
print(f" LOW: {low_count}")
# Fail on CRITICAL or HIGH severity vulnerabilities
if critical_count > 0:
print(f"\nโ BLOCKING: {critical_count} CRITICAL vulnerabilities found")
sys.exit(2)
elif high_count > 0:
print(f"\nโ BLOCKING: {high_count} HIGH severity vulnerabilities found")
sys.exit(1)
else:
print(f"\nโ
No CRITICAL/HIGH severity vulnerabilities found")
except FileNotFoundError:
print("โ pip-audit report not found")
sys.exit(1)
except json.JSONDecodeError as e:
print(f"โ Failed to parse pip-audit report: {e}")
sys.exit(1)
EOF
echo "โ
Vulnerability scan completed"
- name: Generate SBOM (Software Bill of Materials)
continue-on-error: true
run: |
echo "๐ Generating SBOM..."
# Install SBOM generation tools
pip install cyclonedx-bom==7.0.0
# Generate SBOM in multiple formats
echo "Generating CycloneDX SBOM..."
cyclonedx-py -o sbom-cyclonedx.json -F json --install-all-packages
cyclonedx-py -o sbom-cyclonedx.xml -F xml --install-all-packages
# Generate pip-audit SBOM format
echo "Generating pip-audit requirements SBOM..."
pip-audit -r requirements-lock.txt -f cyclonedx-json -o sbom-pip-audit.json || true
# Generate simple requirements-based SBOM
echo "Generating requirements-based SBOM..."
python - <<'EOF'
import json
import subprocess
import sys
from datetime import datetime
def get_package_info():
"""Get installed package information."""
try:
result = subprocess.run(
[sys.executable, '-m', 'pip', 'list', '--format=json'],
capture_output=True, text=True, check=True
)
return json.loads(result.stdout)
except Exception as e:
print(f"Error getting package info: {e}")
return []
# Generate simple SBOM
packages = get_package_info()
sbom = {
"bomFormat": "CycloneDX",
"specVersion": "1.5",
"serialNumber": "urn:uuid:simplenote-mcp-server-sbom",
"version": 1,
"metadata": {
"timestamp": datetime.now().isoformat(),
"tools": [
{
"vendor": "simplenote-mcp-server",
"name": "pip-list",
"version": "1.0.0"
}
],
"component": {
"type": "application",
"name": "simplenote-mcp-server",
"version": "1.6.0",
"description": "A simple MCP Server that connects to Simplenote"
}
},
"components": []
}
for pkg in packages:
component = {
"type": "library",
"name": pkg["name"],
"version": pkg["version"],
"purl": f"pkg:pypi/{pkg['name']}@{pkg['version']}"
}
sbom["components"].append(component)
# Save SBOM
with open('sbom-simple.json', 'w') as f:
json.dump(sbom, f, indent=2)
print(f"Generated SBOM with {len(packages)} components")
EOF
# Display SBOM summary
echo "SBOM Files Generated:"
ls -la sbom-*.json sbom-*.xml 2>/dev/null || true
if [ -f "sbom-simple.json" ]; then
echo "SBOM Component Count:"
python -c "import json; data=json.load(open('sbom-simple.json')); print(f'Components: {len(data[\"components\"])}')"
fi
echo "โ
SBOM generation completed"
- name: Upload SBOM artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: sbom-reports
path: |
sbom-*.json
sbom-*.xml
retention-days: 30
- name: Upload pip-audit report
if: always()
uses: actions/upload-artifact@v4
with:
name: pip-audit-report
path: pip-audit-report.json
retention-days: 7
- name: Validate badges
run: |
echo "๐ Validating README badges..."
timeout 120 python scripts/validate-badges.py || echo "โ ๏ธ Badge validation completed with timeout/error"
echo "โ
Badge validation finished"
integration-tests:
name: Integration Tests (Optional)
runs-on: ubuntu-latest
timeout-minutes: 15
needs: build-and-test
# Disabled by default - to enable:
# 1. Change 'if: false' to 'if: true' or remove the line entirely
# 2. Add SIMPLENOTE_EMAIL and SIMPLENOTE_PASSWORD secrets to the repository
if: false
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[test] || pip install -e .
- name: Run integration tests
run: |
echo "๐ Running integration tests with real API..."
python -m pytest -v --tb=short \
tests/test_title_search_integration.py \
tests/test_search_integration.py \
tests/test_title_search.py \
-k "integration or real_api or network"
env:
SIMPLENOTE_EMAIL: ${{ secrets.SIMPLENOTE_EMAIL }}
SIMPLENOTE_PASSWORD: ${{ secrets.SIMPLENOTE_PASSWORD }}
docker-test:
name: Docker Build Test
runs-on: ubuntu-latest
timeout-minutes: 20
needs: build-and-test
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Build Docker image
run: |
echo "๐ณ Building Docker image..."
docker build -t simplenote-mcp-test .
echo "โ
Docker build successful"
- name: Test Docker image
run: |
echo "๐งช Testing Docker image..."
docker run --rm simplenote-mcp-test --help
docker run --rm --entrypoint python simplenote-mcp-test -c "import simplenote_mcp; print('โ
Docker image works')"
echo "โ
Docker image test passed"
summary:
name: CI Summary
runs-on: ubuntu-latest
needs: [diagnostics, build-and-test, security-and-quality, docker-test]
if: always()
steps:
- name: Summary
run: |
echo "๐ฏ CI/CD Pipeline Summary"
echo "========================"
echo "Diagnostics: ${{ needs.diagnostics.result }}"
echo "Build and Test: ${{ needs.build-and-test.result }}"
echo "Security and Quality: ${{ needs.security-and-quality.result }}"
echo "Docker Test: ${{ needs.docker-test.result }}"
echo "========================"
if [[ "${{ needs.build-and-test.result }}" == "success" ]]; then
echo "๐ Core functionality: PASSED"
else
echo "โ Core functionality: FAILED"
exit 1
fi