Skip to main content
Glama

Elrond MCP

pyproject.toml1.28 kB
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "elrond-mcp" version = "0.1.0" description = "A thinking augmentation MCP server using hierarchical LLM critique and synthesis" readme = "README.md" requires-python = ">=3.13" dependencies = [ "mcp[cli]>=1.0.0", "instructor>=1.6.0", "pydantic>=2.10.0", "google-genai>=0.1.0", "jsonref", ] [project.optional-dependencies] dev = ["pytest>=8.0.0", "pytest-asyncio>=0.24.0", "ruff>=0.7.0"] [project.scripts] elrond-mcp = "elrond_mcp.server:main" [tool.ruff] target-version = "py313" line-length = 88 [tool.ruff.lint] select = [ "E", # pycodestyle errors "W", # pycodestyle warnings "F", # pyflakes "I", # isort "B", # flake8-bugbear "C4", # flake8-comprehensions "UP", # pyupgrade ] ignore = [ "B008", # do not perform function calls in argument defaults ] [tool.ruff.format] quote-style = "double" indent-style = "space" skip-magic-trailing-comma = false line-ending = "auto" [tool.pytest.ini_options] asyncio_mode = "auto" testpaths = ["tests"] python_files = ["test_*.py", "*_test.py"] python_classes = ["Test*"] python_functions = ["test_*"] [dependency-groups] dev = ["pytest>=8.4.1", "pytest-asyncio>=1.1.0", "ruff>=0.12.11"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/dogonthehorizon/elrond-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server