Skip to main content
Glama
docker-compose.yml1.24 kB
services: neo4j: image: neo4j:5.15-community container_name: graphiti-neo4j ports: - "7474:7474" # HTTP - "7687:7687" # Bolt environment: - NEO4J_AUTH=neo4j/demodemo - NEO4J_PLUGINS=["apoc", "graph-data-science"] - NEO4J_dbms_security_procedures_unrestricted=apoc.*,gds.* - NEO4J_dbms_security_procedures_allowlist=apoc.*,gds.* volumes: - neo4j_data:/data - neo4j_logs:/logs healthcheck: test: ["CMD", "cypher-shell", "-u", "neo4j", "-p", "demodemo", "RETURN 1"] interval: 10s timeout: 5s retries: 5 mcp-server: build: context: . dockerfile: Dockerfile container_name: graphiti-mcp-server ports: - "8000:8000" environment: - NEO4J_URI=bolt://neo4j:7687 - NEO4J_USER=neo4j - NEO4J_PASSWORD=demodemo - OPENROUTER_API_KEY=${OPENROUTER_API_KEY:-} - OPENAI_API_KEY=${OPENAI_API_KEY:-} - MODEL_NAME=${MODEL_NAME:-openai/gpt-4o-mini} depends_on: neo4j: condition: service_healthy command: ["python", "graphiti_mcp_server.py", "--transport", "sse", "--host", "0.0.0.0", "--port", "8000"] restart: unless-stopped volumes: neo4j_data: neo4j_logs:

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/apexneural-hansika/graphiti_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server