# Multi-stage build for development - builds from local source
# Build stage - creates the same package that would be published to npm
FROM node:22 AS builder
WORKDIR /app
# Copy package files
COPY package*.json ./
# Install all dependencies (including dev dependencies for building)
RUN npm ci
# Copy source code
COPY . .
# Build the project (same as npm publish preparation)
RUN npm run build
# Production stage - mirror of production Dockerfile but install local package
FROM node:22 AS production
# Install kubectl (same as production needs)
RUN apt-get update && \
apt-get install -y curl && \
ARCH=$(dpkg --print-architecture) && \
curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/${ARCH}/kubectl" && \
chmod +x kubectl && \
mv kubectl /usr/local/bin/ && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Copy the complete built package from builder stage (everything npm would publish)
COPY --from=builder /app ./package
# Install the local package globally (same as production Dockerfile but from local)
RUN npm install -g ./package
# Set working directory
WORKDIR /app
# Create sessions directory
RUN mkdir -p /app/sessions
# Set default environment variables
ENV DOT_AI_SESSION_DIR=/app/sessions
ENV NODE_ENV=production
# Default command to run dot-ai-mcp (same as production)
CMD ["dot-ai-mcp"]
MCP directory API
We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/vfarcic/dot-ai'
If you have feedback or need assistance with the MCP directory API, please join our Discord server