# Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile
FROM node:lts-alpine
# Set default environment variables to allow the server to initialize
ENV LLM_MODEL_NAME=dummy-model
ENV LLM_MODEL_PROVIDER=ollama
ENV LLM_BASE_URL=http://localhost:11434
# Create app directory
WORKDIR /usr/src/app
# Copy package files
COPY package.json package-lock.json ./
# Install dependencies (skip lifecycle scripts if necessary)
RUN npm install --ignore-scripts
# Copy rest of the code
COPY . .
# Build the project
RUN npm run build
# Start the MCP server
CMD [ "npm", "start" ]
MCP directory API
We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/sammcj/mcp-llm'
If you have feedback or need assistance with the MCP directory API, please join our Discord server