Skip to main content
Glama
pyproject.toml694 B
[project] name = "graphiti-mcp-server" version = "0.1.0" description = "Graphiti MCP Server for persistent memory and context continuity" readme = "README.md" requires-python = ">=3.10" dependencies = [ "mcp>=0.9.0", "neo4j>=5.15.0", "openai>=1.12.0", "python-dotenv>=1.0.0", "fastapi>=0.104.0", "uvicorn>=0.24.0", "sse-starlette>=1.6.0", ] [project.optional-dependencies] dev = [ "pytest>=7.4.0", "pytest-asyncio>=0.21.0", "black>=23.0.0", "ruff>=0.1.0", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.black] line-length = 100 target-version = ['py310'] [tool.ruff] line-length = 100 target-version = "py310"

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/apexneural-hansika/graphiti_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server