Skip to main content
Glama

MCP Memory Service

Dockerfile.slim3.46 kB
# Lightweight Docker image optimized for sqlite-vec + ONNX (CPU-only) # Eliminates PyTorch and CUDA dependencies for ~90% size reduction FROM python:3.10-slim # Build arguments for conditional features ARG SKIP_MODEL_DOWNLOAD=false ARG PLATFORM=linux/amd64 # Set environment variables for optimized configuration ENV PYTHONUNBUFFERED=1 \ MCP_MEMORY_STORAGE_BACKEND=sqlite_vec \ MCP_MEMORY_USE_ONNX=1 \ MCP_MEMORY_SQLITE_PATH=/app/sqlite_db \ MCP_MEMORY_BACKUPS_PATH=/app/backups \ PYTHONPATH=/app/src \ DOCKER_CONTAINER=1 \ CHROMA_TELEMETRY_IMPL=none \ ANONYMIZED_TELEMETRY=false \ HF_HUB_DISABLE_TELEMETRY=1 # Set the working directory WORKDIR /app # Install minimal system dependencies (no build tools needed for ONNX) RUN apt-get update && \ apt-get install -y --no-install-recommends \ curl \ && rm -rf /var/lib/apt/lists/* # Copy essential files COPY pyproject.toml . COPY uv.lock . COPY README.md . COPY scripts/installation/install_uv.py . # Install UV RUN python install_uv.py # Create directories for data persistence RUN mkdir -p /app/sqlite_db /app/backups # Copy source code COPY src/ /app/src/ COPY run_server.py ./ # Copy utility scripts if they exist COPY scripts/utils/uv_wrapper.py ./uv_wrapper.py COPY scripts/utils/memory_wrapper_uv.py ./memory_wrapper_uv.py # Copy Docker entrypoint scripts COPY tools/docker/docker-entrypoint.sh /usr/local/bin/ COPY tools/docker/docker-entrypoint-persistent.sh /usr/local/bin/ COPY tools/docker/docker-entrypoint-unified.sh /usr/local/bin/ # Install minimal dependencies (CPU-only, no PyTorch) RUN python -m uv pip install \ "mcp>=1.0.0,<2.0.0" \ "sqlite-vec>=0.1.0" \ "onnxruntime>=1.15.0" \ "tokenizers==0.20.3" \ "build>=0.10.0" \ "aiohttp>=3.8.0" \ "fastapi>=0.115.0" \ "uvicorn>=0.30.0" \ "python-multipart>=0.0.9" \ "sse-starlette>=2.1.0" \ "aiofiles>=23.2.1" \ "psutil>=5.9.0" \ "zeroconf>=0.130.0" \ "PyPDF2>=3.0.0" \ "chardet>=5.0.0" \ "click>=8.0.0" \ && python -m uv pip install -e . --no-deps # Conditionally pre-download ONNX embedding models RUN if [ "$SKIP_MODEL_DOWNLOAD" != "true" ]; then \ echo "Pre-downloading ONNX embedding models..." && \ python -c "try:\ from mcp_memory_service.embeddings.onnx_embeddings import ONNXEmbeddingModel; \ print('Initializing ONNX embedding model...'); \ model = ONNXEmbeddingModel(); \ print('ONNX models cached successfully')\ except Exception as e:\ print(f'Warning: Could not pre-download ONNX models: {e}'); \ print('Models will be downloaded at runtime')" || echo "ONNX model download failed, continuing..."; \ else \ echo "Skipping ONNX model download (SKIP_MODEL_DOWNLOAD=true)"; \ fi # Configure stdio for MCP communication and make entrypoints executable RUN chmod a+rw /dev/stdin /dev/stdout /dev/stderr && \ chmod +x /usr/local/bin/docker-entrypoint.sh && \ chmod +x /usr/local/bin/docker-entrypoint-persistent.sh && \ chmod +x /usr/local/bin/docker-entrypoint-unified.sh # Add volume mount points for data persistence VOLUME ["/app/sqlite_db", "/app/backups"] # Expose the port (if needed) EXPOSE 8000 # Use the unified entrypoint script by default # Can be overridden with docker-entrypoint.sh for backward compatibility ENTRYPOINT ["/usr/local/bin/docker-entrypoint-unified.sh"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/doobidoo/mcp-memory-service'

If you have feedback or need assistance with the MCP directory API, please join our Discord server