We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/sweeden-ttu/canvas-lms-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
name: AutoGen CI/CD Pipeline
on:
push:
branches: [main, develop]
pull_request:
branches: [main]
schedule:
# Weekly embeddings refresh (Sun 00:00 UTC)
- cron: "0 0 * * 0"
workflow_dispatch:
inputs:
run_agents:
description: 'Run AutoGen agents'
required: false
default: 'false'
type: boolean
run_embeddings:
description: 'Run embeddings pipeline'
required: false
default: 'false'
type: boolean
run_agent_review:
description: 'Run agent review (review-changes)'
required: false
default: 'false'
type: boolean
env:
PYTHON_VERSION: "3.11"
UV_CACHE_DIR: /tmp/.uv-cache
GIT_BRANCH_NAME_4SYNC: cs5374-cid70713-f11855957-m_item_id3998930
jobs:
# ============================================
# Strip env_save / .env.save from repo (never commit credentials)
# ============================================
strip-env-save:
name: Strip env_save / .env.save
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Check for env_save / .env.save
id: check
run: |
FOUND=0
[ -f env_save ] && FOUND=1
[ -f .env.save ] && FOUND=1
echo "found=$FOUND" >> $GITHUB_OUTPUT
- name: Skip if commit is our removal
id: skip
run: |
if git log -1 --pretty=%B | grep -q '\[skip ci\]'; then
echo "skip=1" >> $GITHUB_OUTPUT
else
echo "skip=0" >> $GITHUB_OUTPUT
fi
- name: Remove env_save and .env.save and push
if: steps.check.outputs.found == '1' && steps.skip.outputs.skip == '0'
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git rm -f env_save .env.save 2>/dev/null || true
git diff --staged --quiet && exit 0
git commit -m "chore: remove env_save and .env.save [skip ci]"
git push
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# ============================================
# Build and Test Stage
# ============================================
build-test:
name: Build & Test
runs-on: ubuntu-latest
needs: strip-env-save
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Pull latest (in case strip-env-save pushed)
run: git pull origin ${{ github.ref_name }} || true
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: "uv.lock"
- name: Install dependencies
run: uv sync --all-extras --dev
- name: Run linting
run: uv run ruff check .
- name: Run type checking
run: uv run mypy server.py --ignore-missing-imports
- name: Run tests
run: uv run pytest tests/ -v --tb=short
env:
CANVAS_API_TOKEN: ${{ secrets.CANVAS_API_TOKEN }}
CANVAS_BASE_URL: ${{ secrets.CANVAS_BASE_URL }}
- name: Run validation pipelines
run: uv run python pipelines/run_pipelines.py
# ============================================
# Log review + Podman reproduce (one pod per job, self-hosted runner)
# See docs/PODMAN_RUNNER.md for setting up a runner with label "podman".
# ============================================
reproduce-failure:
name: Review logs & reproduce in Podman
runs-on: [self-hosted, podman]
needs: build-test
if: failure() && needs.build-test.result == 'failure'
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Review errors and run reproduction (one Podman pod)
run: |
chmod +x .github/reproduce/run-repro.sh
.github/reproduce/run-repro.sh .
env:
CANVAS_API_TOKEN: ${{ secrets.CANVAS_API_TOKEN }}
CANVAS_BASE_URL: ${{ secrets.CANVAS_BASE_URL }}
- name: Upload reproduction log
uses: actions/upload-artifact@v4
with:
name: podman-repro-log-${{ github.run_id }}
path: |
repro.log
repro-errors.txt
retention-days: 14
if-no-files-found: ignore
# ============================================
# GitLab Sync Stage
# ============================================
gitlab-sync:
name: Sync to GitLab
runs-on: ubuntu-latest
needs: build-test
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Configure Git
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
- name: Add GitLab remote
run: |
git remote add gitlab https://oauth2:${{ secrets.GITLAB_TOKEN }}@gitlab.com/sweeden3/canvas-lms-mcp.git || true
- name: Push to GitLab
run: |
git push gitlab main --force
git push gitlab --tags --force
continue-on-error: true
- name: Sync status
run: echo "::notice::GitLab sync completed"
# ============================================
# AutoGen Agents Stage
# ============================================
autogen-agents:
name: Run AutoGen Agents
runs-on: ubuntu-latest
needs: build-test
if: github.event.inputs.run_agents == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v4
- name: Install AutoGen dependencies
run: |
uv sync
uv add autogen-agentchat "autogen-ext[openai]" openai
- name: Run AutoGen status check
run: |
uv run python -c "
print('AutoGen agents ready')
print('Canvas MCP Server status: OK')
"
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
CANVAS_API_TOKEN: ${{ secrets.CANVAS_API_TOKEN }}
# ============================================
# Embeddings Pipeline (docs/EMBEDDINGS_AND_PROMPTS_PLAN.md)
# ============================================
embeddings:
name: Embeddings Pipeline
runs-on: ubuntu-latest
needs: build-test
if: github.event_name == 'schedule' || github.event.inputs.run_embeddings == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v4
- name: Install dependencies (with embed extra)
run: |
uv sync --all-extras --dev
uv pip install -e ".[embed]" 2>/dev/null || true
- name: Run embed_docs
run: uv run python scripts/embed_docs.py
- name: Run embed_mcp_server for each submodule
run: |
for dir in mcp/*/; do
[ -d "$dir" ] || continue
name=$(basename "$dir")
uv run python scripts/embed_mcp_server.py "$name" 2>/dev/null || true
done
- name: Upload embeddings artifacts
uses: actions/upload-artifact@v4
with:
name: embeddings-and-prompts
path: |
.cursor/embeddings/
.cursor/prompts/
retention-days: 7
# ============================================
# Agent Review (review-changes; docs/AGENT_REVIEWS.md)
# ============================================
agent-review:
name: Agent Review
runs-on: ubuntu-latest
needs: build-test
if: github.event_name == 'pull_request' || github.event.inputs.run_agent_review == 'true'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install uv
uses: astral-sh/setup-uv@v4
- name: Install dependencies
run: uv sync --all-extras --dev
- name: Run tests (reproduction)
run: uv run pytest tests/ -v --tb=short
env:
CANVAS_API_TOKEN: ${{ secrets.CANVAS_API_TOKEN }}
CANVAS_BASE_URL: ${{ secrets.CANVAS_BASE_URL }}
- name: Agent review checklist
run: |
echo "::notice::Agent review checklist (docs/AGENT_REVIEWS.md)"
echo " 1. Evaluate step-by-step instructions"
echo " 2. Peer review (cs-peer-reviewer-trustworthy-ai, evidence_evaluator)"
echo " 3. Reproduce (re-run tests, re-fetch data, re-build)"
echo " 4. Accept or reject premise (no synthetic/mock/dummy data)"
# ============================================
# Badge Generation Stage
# ============================================
generate-badges:
name: Generate Status Badges
runs-on: ubuntu-latest
needs: [build-test, gitlab-sync]
if: always() && github.ref == 'refs/heads/main'
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Generate badge JSON
run: |
mkdir -p .github/badges
# Build status badge
if [ "${{ needs.build-test.result }}" == "success" ]; then
echo '{"schemaVersion":1,"label":"build","message":"passing","color":"brightgreen"}' > .github/badges/build.json
else
echo '{"schemaVersion":1,"label":"build","message":"failing","color":"red"}' > .github/badges/build.json
fi
# AutoGen badge
echo '{"schemaVersion":1,"label":"AutoGen","message":"enabled","color":"purple"}' > .github/badges/autogen.json
# Sync status badge
if [ "${{ needs.gitlab-sync.result }}" == "success" ]; then
echo '{"schemaVersion":1,"label":"GitLab","message":"synced","color":"fc6d26"}' > .github/badges/gitlab-sync.json
else
echo '{"schemaVersion":1,"label":"GitLab","message":"pending","color":"yellow"}' > .github/badges/gitlab-sync.json
fi
- name: Commit badge updates
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add .github/badges/
git diff --staged --quiet || git commit -m "chore: update status badges [skip ci]"
git push || true