We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/eat-pray-ai/yutu'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
name: Release to GitHub Packages
description: Release to GitHub Packages
inputs:
arm64_binary:
description: Directory for linux arm64 binary
required: true
default: yutu_linux_arm64_v8.0/yutu-linux-arm64
amd64_binary:
description: Directory for linux amd64 binary
required: true
default: yutu_linux_amd64_v1/yutu-linux-amd64
add_latest:
description: Whether to add the latest tag
required: false
default: auto
runs:
using: composite
steps:
- uses: actions/checkout@v5
with:
sparse-checkout: |
Dockerfile
server.json
sparse-checkout-cone-mode: false
- uses: actions/download-artifact@v5
with:
name: binary
path: ${{ github.workspace }}/dist
- uses: docker/setup-qemu-action@v3
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ github.token }}
- id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/${{ github.repository }}
flavor: |
latest=${{ inputs.add_latest }}
labels: |
io.modelcontextprotocol.server.name=io.github.eat-pray-ai/yutu
- id: push
uses: docker/build-push-action@v6
with:
context: .
target: yutu
push: true
tags: ${{ steps.meta.outputs.tags }}
annotations: ${{ steps.meta.outputs.annotations }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
provenance: true
sbom: true
build-args: |
arm64_binary=${{ inputs.amd64_binary }}
amd64_binary=${{ inputs.amd64_binary }}
- uses: actions/attest-build-provenance@v3
with:
subject-name: ghcr.io/${{ github.repository }}
subject-digest: ${{ steps.push.outputs.digest }}
push-to-registry: true