Skip to main content
Glama

Lila MCP Server

by lila-graph
pyproject.toml1.61 kB
[project] name = "lila-mcp-standalone" version = "1.0.0" description = "Standalone Lila MCP Server - Minimal psychological relationship intelligence" authors = [ {name = "Lila Team", email = "team@lila.dev"} ] readme = "README.md" requires-python = ">=3.12" dependencies = [ # Core MCP Framework "fastmcp>=2.12.3", # Database connectivity "neo4j>=5.15.0", # LLM integration "openai>=1.30.0", "anthropic>=0.25.0", # Core utilities "pydantic>=2.6.0", "pydantic-settings>=2.2.0", "httpx>=0.27.0", "aiohttp>=3.9.0", # Observability (optional) "logfire>=0.28.0", # Configuration "python-dotenv>=1.0.0", # CLI "click>=8.1.0", # Async utilities "asyncio-mqtt>=0.16.0", ] [project.optional-dependencies] dev = [ "pytest>=8.0.0", "pytest-asyncio>=0.23.0", "black>=24.0.0", "ruff>=0.3.0", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.black] line-length = 120 target-version = ['py312'] [tool.ruff] line-length = 120 target-version = "py312" [tool.pytest.ini_options] asyncio_mode = "auto" testpaths = ["tests"] python_files = ["test_*.py"] python_classes = ["Test*"] python_functions = ["test_*"] [tool.hatch.metadata] allow-direct-references = true [tool.hatch.build.targets.wheel] # Include all Python files in the current directory as a flat package include = [ "*.py", "agents/**/*.py", "llm/**/*.py", "graph/**/*.py", ".env" ] [tool.hatch.build.targets.wheel.sources] # Map the current directory to the package root "." = "lila_mcp_standalone"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/lila-graph/lila-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server