Skip to main content
Glama

FrankfurterMCP

smithery.dockerfile1.46 kB
# Use a Python image with uv pre-installed FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv # Install the project into `/app` WORKDIR /app # Enable bytecode compilation ENV UV_COMPILE_BYTECODE=1 # Copy from the cache instead of linking since it's a mounted volume ENV UV_LINK_MODE=copy # Install the project's dependencies using the lockfile and settings RUN --mount=type=cache,target=/root/.cache/uv \ --mount=type=bind,source=uv.lock,target=uv.lock \ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ uv sync --frozen --no-install-project --no-dev --no-editable # Then, add the rest of the project source code and install it # Installing separately from its dependencies allows optimal layer caching COPY . /app RUN --mount=type=cache,target=/root/.cache/uv \ uv sync --frozen --no-dev --no-editable FROM python:3.12.5-slim-bookworm RUN useradd app WORKDIR /app COPY --from=uv --chown=app:app /app/.venv /app/.venv RUN chown -R app:app /app USER app # Place executables in the environment at the front of the path ENV PATH="/app/.venv/bin:$PATH" ENV FASTMCP_HOST="0.0.0.0" ENV MCP_SERVER_TRANSPORT="streamable-http" ENTRYPOINT ["sh", "-c", "PORT=${PORT:-8081} && export FASTMCP_PORT=$PORT && echo \"MCP_SERVER_TRANSPORT=${MCP_SERVER_TRANSPORT}\" > /app/.env && echo \"FASTMCP_HOST=${FASTMCP_HOST}\" >> /app/.env && echo \"FASTMCP_PORT=${FASTMCP_PORT}\" >> /app/.env && exec python3 -m frankfurtermcp.server"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/anirbanbasu/frankfurtermcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server