Skip to main content
Glama

Yellhorn MCP

by msnidal
pyproject.toml1.53 kB
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "yellhorn-mcp" version = "0.8.1" authors = [{ name = "Author" }] description = "Yellhorn offers MCP tools to generate detailed workplans with Gemini 2.5 Pro or OpenAI models and to review diffs against them using your entire codebase as context. Features unified LLM management, automatic chunking, and robust retry logic." readme = "README.md" requires-python = ">=3.10" classifiers = [ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ] dependencies = [ "mcp[cli]~=1.15.0", "google-genai~=1.38.0", "aiohttp~=3.12.13", "pydantic~=2.11.7", "openai~=1.108.1", "xai-sdk~=1.2.0", "jedi~=0.19.2", "tiktoken~=0.11.0", "tenacity<9.0.0", "google-api-core~=2.25.1", ] [project.scripts] yellhorn-mcp = "yellhorn_mcp.cli:main" [tool.black] line-length = 100 target-version = ["py310"] [tool.isort] profile = "black" line_length = 100 [tool.flake8] max-line-length = 100 exclude = ["venv", ".git", "__pycache__", "build", "dist"] [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" [tool.hatch.build.targets.wheel] packages = ["yellhorn_mcp"] [dependency-groups] dev = [ "black>=25.9.0", "flake8>=7.1.1", "httpx>=0.28.1", "isort>=6.0.1", "jedi>=0.19.2", "pytest>=8.3.3", "pytest-asyncio>=1.2.0", "pytest-cov>=7.0.0", ]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/msnidal/yellhorn-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server