We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/ivo-toby/mcp-picnic'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
Dockerfile•1.22 kB
# Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile
# Use an official Node.js image as the base image
FROM node:22-alpine AS builder
# Set the working directory
WORKDIR /app
# Copy package files first for better caching
COPY package*.json ./
# Install ALL dependencies (including dev dependencies needed for build) but skip scripts
RUN --mount=type=cache,target=/root/.npm npm ci --ignore-scripts
# Copy source code and build files
COPY . .
# Build the application
RUN npm run build
# Use a smaller Node.js image for the runtime
FROM node:22-alpine AS runtime
# Set the working directory
WORKDIR /app
# Copy package files first
COPY package*.json ./
# Install only production dependencies and skip scripts to avoid prepare hook
RUN --mount=type=cache,target=/root/.npm npm ci --omit=dev --ignore-scripts
# Copy built files from the builder stage
COPY --from=builder /app/dist /app/dist
COPY --from=builder /app/bin /app/bin
# Environment variable for Picnic credentials
ENV PICNIC_USERNAME=Some_username
ENV PICNIC_PASSWORD=Some_password
# Expose any required ports (if needed by the application)
# EXPOSE 3000
# Start the server
ENTRYPOINT ["node", "bin/mcp-server.js"]