Skip to main content
Glama

mcp-server-circleci

Official
Dockerfile1.65 kB
# Base image with Node.js and pnpm setup FROM node:lts-alpine AS base ENV PNPM_HOME="/pnpm" ENV PATH="$PNPM_HOME:$PATH" RUN corepack enable WORKDIR /app # Production dependencies stage FROM base AS prod-deps COPY package.json pnpm-lock.yaml ./ RUN --mount=type=cache,id=pnpm,target=/pnpm/store \ pnpm install --prod --frozen-lockfile --ignore-scripts # Build stage FROM base AS build # Install build dependencies RUN apk add --no-cache git python3 make g++ # Copy package files first for better caching COPY package.json pnpm-lock.yaml ./ # Install all dependencies including devDependencies for building RUN --mount=type=cache,id=pnpm,target=/pnpm/store \ pnpm install --frozen-lockfile --ignore-scripts=false # Install express types and ensure all dependencies are available RUN pnpm add -D @types/express # Copy source files COPY . . # Build the application RUN pnpm run build # Install production dependencies for the final image RUN pnpm install --prod --frozen-lockfile # Final stage - clean minimal image FROM node:lts-alpine ENV NODE_ENV=production WORKDIR /app #Debug Build Info ARG BUILD_ID="" ENV BUILD_ID=$BUILD_ID # Install pnpm RUN corepack enable && corepack prepare pnpm@latest --activate # Copy package files and install only production dependencies COPY package.json pnpm-lock.yaml ./ # Install production dependencies RUN pnpm install --prod --frozen-lockfile # Copy built files and node_modules COPY --from=build /app/dist /app/dist COPY --from=build /app/node_modules /app/node_modules # Docker container to listen on port 8000 EXPOSE 8000 # Command to run the MCP server ENTRYPOINT ["node", "dist/index.js"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/CircleCI-Public/mcp-server-circleci'

If you have feedback or need assistance with the MCP directory API, please join our Discord server