Weaviate MCP Server

by sndani
# Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile # Use a Python image with uv pre-installed FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv # Set the working directory WORKDIR /app # Enable bytecode compilation ENV UV_COMPILE_BYTECODE=1 # Copy the project files COPY . /app # Install the project dependencies using uv RUN --mount=type=cache,target=/root/.cache/uv \ --mount=type=bind,source=uv.lock,target=uv.lock \ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ uv sync --frozen --no-install-project --no-dev --no-editable # Then, add the rest of the project source code and install it # Installing separately from its dependencies allows optimal layer caching RUN --mount=type=cache,target=/root/.cache/uv \ uv sync --frozen --no-dev --no-editable # Use the Python base image FROM python:3.12-slim-bookworm # Set the working directory WORKDIR /app # Copy the installed dependencies from the uv stage COPY --from=uv /root/.local /root/.local COPY --from=uv --chown=app:app /app/.venv /app/.venv # Add the project files COPY --from=uv /app /app # Place executables in the environment at the front of the path ENV PATH="/app/.venv/bin:$PATH" # Set the entrypoint command ENTRYPOINT ["python", "-m", "src.server", "--weaviate-url", "http://localhost:8080/v1/graphql", "--weaviate-api-key", "", "--search-collection-name", "Reader", "--store-collection-name", "Reader", "--openai-api-key", ""]
ID: 1abfvfsgku