Skip to main content
Glama

mcp-run-python

Official
by pydantic
ci.yml12.2 kB
name: CI on: push: branches: - main tags: - "**" pull_request: {} env: COLUMNS: 150 UV_PYTHON: 3.12 UV_FROZEN: "1" permissions: contents: read jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 with: enable-cache: true - name: Install dependencies run: uv sync --all-extras --all-packages --group lint - uses: pre-commit/action@v3.0.0 with: extra_args: --all-files --verbose env: SKIP: no-commit-to-branch - run: uv build --all-packages - run: ls -lh dist/ # mypy and lint are a bit slower than other jobs, so we run them separately mypy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 with: enable-cache: true - name: Install dependencies run: uv sync --no-dev --group lint - run: make typecheck-mypy docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 with: enable-cache: true - run: uv sync --group docs # always build docs to check it works without insiders packages - run: make docs - run: make docs-insiders if: github.event.pull_request.head.repo.full_name == github.repository || github.ref == 'refs/heads/main' env: PPPR_TOKEN: ${{ secrets.PPPR_TOKEN }} - run: tree -sh site - uses: actions/setup-node@v4 - run: npm install working-directory: docs-site - run: npm run typecheck working-directory: docs-site - name: Store docs uses: actions/upload-artifact@v4 with: name: site path: site # check all docs images are tinified, You'll need an API key from https://tinify.com/ to fix this if it fails - run: uvx tinicly docs --check test-live: runs-on: ubuntu-latest timeout-minutes: 5 if: github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'push' steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 with: enable-cache: true - uses: pydantic/ollama-action@v3 with: model: qwen2:0.5b - run: uv sync --only-dev - run: > uv run --package pydantic-ai-slim --extra openai --extra vertexai --extra google --extra groq --extra anthropic --extra mistral --extra cohere pytest tests/test_live.py -v --durations=100 env: PYDANTIC_AI_LIVE_TEST_DANGEROUS: "CHARGE-ME!" OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }} GOOGLE_SERVICE_ACCOUNT_CONTENT: ${{ secrets.GOOGLE_SERVICE_ACCOUNT_CONTENT }} GROQ_API_KEY: ${{ secrets.GROQ_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }} CO_API_KEY: ${{ secrets.COHERE_API_KEY }} test: name: test on ${{ matrix.python-version }} (${{ matrix.install.name }}) runs-on: ubuntu-latest timeout-minutes: 10 strategy: fail-fast: false matrix: python-version: ["3.10", "3.11", "3.12", "3.13"] install: - name: pydantic-ai-slim command: "--package pydantic-ai-slim" - name: standard command: "" - name: all-extras command: "--all-extras" env: UV_PYTHON: ${{ matrix.python-version }} CI: true COVERAGE_PROCESS_START: ./pyproject.toml steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 with: enable-cache: true - uses: denoland/setup-deno@v2 with: deno-version: v2.x - run: mkdir .coverage - run: uv run mcp-run-python example --deps=numpy - run: uv sync --only-dev - run: uv run ${{ matrix.install.command }} coverage run -m pytest --durations=100 -n auto --dist=loadgroup env: COVERAGE_FILE: .coverage/.coverage.${{ matrix.python-version }}-${{ matrix.install.name }} - name: store coverage files uses: actions/upload-artifact@v4 with: name: coverage-${{ matrix.python-version }}-${{ matrix.install.name }} path: .coverage include-hidden-files: true test-lowest-versions: name: test on ${{ matrix.python-version }} (lowest-versions) runs-on: ubuntu-latest timeout-minutes: 10 strategy: fail-fast: false matrix: python-version: ["3.10", "3.11", "3.12", "3.13"] env: UV_PYTHON: ${{ matrix.python-version }} CI: true COVERAGE_PROCESS_START: ./pyproject.toml steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 with: enable-cache: true - uses: denoland/setup-deno@v2 with: deno-version: v2.x - run: mkdir .coverage - run: uv sync --group dev - run: uv run mcp-run-python example --deps=numpy - run: unset UV_FROZEN - run: uv run --all-extras --resolution lowest-direct coverage run -m pytest --durations=100 -n auto --dist=loadgroup env: COVERAGE_FILE: .coverage/.coverage.${{matrix.python-version}}-lowest-versions - name: store coverage files uses: actions/upload-artifact@v4 with: name: coverage-${{ matrix.python-version }}-lowest-versions path: .coverage include-hidden-files: true test-examples: name: test examples on ${{ matrix.python-version }} runs-on: ubuntu-latest timeout-minutes: 10 strategy: fail-fast: false matrix: python-version: ["3.11", "3.12", "3.13"] env: UV_PYTHON: ${{ matrix.python-version }} CI: true steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 with: enable-cache: true - run: uv run --all-extras python tests/import_examples.py coverage: runs-on: ubuntu-latest needs: [test, test-lowest-versions] steps: - uses: actions/checkout@v4 with: # needed for diff-cover fetch-depth: 0 - name: get coverage files uses: actions/download-artifact@v4 with: merge-multiple: true path: .coverage - uses: astral-sh/setup-uv@v5 with: enable-cache: true - run: uv sync --group dev - run: uv run coverage combine - run: uv run coverage report - run: uv run strict-no-cover env: COVERAGE_FILE: .coverage/.coverage - run: uv run coverage html --show-contexts --title "Pydantic AI coverage for ${{ github.sha }}" - uses: actions/upload-artifact@v4 with: name: coverage-html path: htmlcov include-hidden-files: true # https://github.com/marketplace/actions/alls-green#why used for branch protection checks check: if: always() needs: - lint - mypy - docs - test-live - test - test-lowest-versions - test-examples - coverage runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} allowed-skips: test-live deploy-docs: needs: [check] if: success() && startsWith(github.ref, 'refs/tags/') runs-on: ubuntu-latest environment: name: deploy-docs url: https://ai.pydantic.dev steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 - run: npm install working-directory: docs-site - uses: astral-sh/setup-uv@v5 with: enable-cache: true - uses: actions/download-artifact@v4 with: name: site path: site - uses: cloudflare/wrangler-action@v3 with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} workingDirectory: docs-site command: > deploy --var GIT_COMMIT_SHA:${{ github.sha }} --var GIT_BRANCH:main - run: uv sync --group docs-upload - run: uv run python docs/.hooks/algolia.py upload env: ALGOLIA_WRITE_API_KEY: ${{ secrets.ALGOLIA_WRITE_API_KEY }} deploy-docs-preview: needs: [check] if: success() && github.ref == 'refs/heads/main' runs-on: ubuntu-latest environment: name: deploy-docs-preview permissions: deployments: write statuses: write steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 - run: npm install working-directory: docs-site - uses: astral-sh/setup-uv@v5 with: enable-cache: true - uses: actions/download-artifact@v4 with: name: site path: site - uses: cloudflare/wrangler-action@v3 id: deploy with: apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }} environment: previews workingDirectory: docs-site command: > deploy --var GIT_COMMIT_SHA:${{ github.sha }} --var GIT_BRANCH:main - name: Set preview URL run: uv run --no-project --with httpx .github/set_docs_main_preview_url.py env: DEPLOY_OUTPUT: ${{ steps.deploy.outputs.command-output }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} REPOSITORY: ${{ github.repository }} REF: ${{ github.sha }} # TODO(Marcelo): We need to split this into two jobs: `build` and `release`. release: needs: [check] if: success() && startsWith(github.ref, 'refs/tags/') runs-on: ubuntu-latest environment: name: release url: https://pypi.org/project/pydantic-ai/${{ steps.inspect_package.outputs.version }} permissions: id-token: write outputs: package-version: ${{ steps.inspect_package.outputs.version }} steps: - uses: actions/checkout@v4 - uses: astral-sh/setup-uv@v5 with: enable-cache: true - run: uv build --all-packages - name: Inspect package version id: inspect_package run: | uv tool install --with uv-dynamic-versioning hatchling version=$(uvx hatchling version) echo "version=$version" >> "$GITHUB_OUTPUT" - name: Publish to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: skip-existing: true send-tweet: name: Send tweet needs: [release] if: needs.release.result == 'success' runs-on: ubuntu-latest steps: - uses: actions/setup-python@v5 with: python-version: "3.12" - name: Install dependencies run: pip install tweepy==4.14.0 - name: Send tweet shell: python run: | import os import tweepy client = tweepy.Client( access_token=os.getenv("TWITTER_ACCESS_TOKEN"), access_token_secret=os.getenv("TWITTER_ACCESS_TOKEN_SECRET"), consumer_key=os.getenv("TWITTER_CONSUMER_KEY"), consumer_secret=os.getenv("TWITTER_CONSUMER_SECRET"), ) version = os.getenv("VERSION").strip('"') tweet = os.getenv("TWEET").format(version=version) client.create_tweet(text=tweet) env: VERSION: ${{ needs.release.outputs.package-version }} TWEET: | Pydantic AI version {version} is out! 🎉 https://github.com/pydantic/pydantic-ai/releases/tag/v{version} TWITTER_CONSUMER_KEY: ${{ secrets.TWITTER_CONSUMER_KEY }} TWITTER_CONSUMER_SECRET: ${{ secrets.TWITTER_CONSUMER_SECRET }} TWITTER_ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }} TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/pydantic/pydantic-ai'

If you have feedback or need assistance with the MCP directory API, please join our Discord server