Skip to main content
Glama
by PaddleHQ
publish.yml2.44 kB
name: Publish package on: push: branches: - main paths: - "package.json" permissions: contents: read jobs: publish: runs-on: ubuntu-latest permissions: contents: read actions: write steps: - name: Harden the runner (Audit all outbound calls) uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1 with: egress-policy: audit - name: Checkout code uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - name: Setup Node.js uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: node-version: "22" registry-url: "https://registry.npmjs.org" - name: Install pnpm uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda # v4.1.0 with: version: latest run_install: false - name: Get pnpm store directory id: pnpm-cache shell: bash run: | echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT - name: Configure NPM authentication run: pnpm config set '//registry.npmjs.org/:_authToken' "${NODE_AUTH_TOKEN}" env: NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN_PADDLE_MCP_SERVER}} - name: Setup pnpm cache uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 with: path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} restore-keys: | ${{ runner.os }}-pnpm-store- - name: Install dependencies run: pnpm install - name: Build run: pnpm build - name: Check if version exists id: version-check run: | PACKAGE_NAME=$(node -p "require('./package.json').name") PKG_VERSION=$(node -p "require('./package.json').version") if npm view $PACKAGE_NAME@$PKG_VERSION version &> /dev/null; then echo "Version $PKG_VERSION already exists, skipping publish" echo "exists=true" >> $GITHUB_OUTPUT else echo "Version $PKG_VERSION does not exist, will publish" echo "exists=false" >> $GITHUB_OUTPUT fi - name: Publish to npm if: steps.version-check.outputs.exists == 'false' run: pnpm publish --access public

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/PaddleHQ/paddle-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server