Skip to main content
Glama

DevOps AI Toolkit

by vfarcic
Dockerfile.local•1.07 kB
FROM node:22 # Install kubectl and curl RUN apt-get update && \ apt-get install -y curl && \ ARCH=$(dpkg --print-architecture) && \ curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/${ARCH}/kubectl" && \ chmod +x kubectl && \ mv kubectl /usr/local/bin/ && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* # Set working directory WORKDIR /app # Copy package files COPY package*.json ./ # Install dependencies RUN npm ci --only=production # Copy built application COPY dist/ ./dist/ COPY prompts/ ./prompts/ COPY shared-prompts/ ./shared-prompts/ # Create sessions directory RUN mkdir -p /app/sessions # Create symlink for the binary RUN ln -s /app/dist/mcp/server.js /usr/local/bin/dot-ai-mcp && chmod +x /usr/local/bin/dot-ai-mcp # Set default environment variables ENV DOT_AI_SESSION_DIR=/app/sessions ENV NODE_ENV=production ENV TRANSPORT_TYPE=stdio ENV PORT=3456 ENV HOST=0.0.0.0 # Expose port for HTTP transport mode EXPOSE 3456 # Default command to run dot-ai-mcp CMD ["dot-ai-mcp"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/vfarcic/dot-ai'

If you have feedback or need assistance with the MCP directory API, please join our Discord server