# Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile
# Use an official Node.js image as the base image
FROM node:22-alpine AS builder
# Set the working directory
WORKDIR /app
# Copy package files and source code
COPY . .
# Install dependencies
RUN --mount=type=cache,target=/root/.npm npm install
# Build the application
RUN npm run build
# Use a smaller Node.js image for the runtime
FROM node:22-alpine AS runtime
# Set the working directory
WORKDIR /app
# Copy built files from the builder stage
COPY --from=builder /app/dist /app/dist
COPY --from=builder /app/bin /app/bin
COPY --from=builder /app/node_modules /app/node_modules
COPY --from=builder /app/package.json /app/package.json
COPY --from=builder /app/example-spec.json /app/example-spec.json
# Set environment variables for the MCP server
ENV API_BASE_URL="https://api.example.com"
ENV OPENAPI_SPEC_FROM_STDIN="true"
ENV TRANSPORT_TYPE="stdio"
ENV SERVER_NAME="mcp-openapi-server"
ENV SERVER_VERSION="1.0.0"
# Create a startup script that pipes the example spec to stdin
RUN echo '#!/bin/sh' > /app/start-server.sh && \
echo 'cat /app/example-spec.json | node /app/bin/mcp-server.js' >> /app/start-server.sh && \
chmod +x /app/start-server.sh
# Expose any required ports (if needed by the application)
# EXPOSE 3000
# Start the server with stdin spec
ENTRYPOINT ["/app/start-server.sh"]
MCP directory API
We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/ivo-toby/mcp-openapi-server'
If you have feedback or need assistance with the MCP directory API, please join our Discord server