Skip to main content
Glama
pyproject.toml1.61 kB
[project] name = "shepherd-mcp" version = "0.0.1" description = "MCP server for Shepherd - Debug your AI agents like you debug your code" readme = "README.md" license = { text = "MIT" } requires-python = ">=3.10" authors = [{ name = "Neuralis", email = "hello@neuralis.ai" }] keywords = ["mcp", "ai", "agents", "debugging", "observability", "llm"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Debuggers", ] dependencies = [ "mcp>=1.0.0", "httpx>=0.27.0", "pydantic>=2.0.0", ] [project.optional-dependencies] dev = [ "pytest>=8.0.0", "pytest-asyncio>=0.23.0", "ruff>=0.5.0", "build>=1.0.0", "twine>=5.0.0", ] docs = [ "sphinx>=7.0.0", "sphinx-autobuild>=2024.0.0", ] [project.scripts] shepherd-mcp = "shepherd_mcp:main" [project.urls] Homepage = "https://github.com/neuralis/shepherd-mcp" Repository = "https://github.com/neuralis/shepherd-mcp" Documentation = "https://neuralis.github.io/shepherd-mcp/" [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = ["src/shepherd_mcp"] [tool.ruff] target-version = "py310" line-length = 100 [tool.ruff.lint] select = ["E", "F", "I", "UP", "B", "SIM"] ignore = ["E501"] [tool.pytest.ini_options] asyncio_mode = "auto" testpaths = ["tests"]

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/neuralis-in/shepherd-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server