Skip to main content
Glama

Execution System MCP Server

by elinsky
pyproject.toml742 B
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "execution-system-mcp" version = "0.1.0" description = "MCP server for project creation" readme = "README.md" requires-python = ">=3.11" dependencies = [ "mcp>=0.9.0", "pyyaml>=6.0", ] [project.optional-dependencies] dev = [ "pytest>=7.4.0", "pytest-mock>=3.12.0", "pytest-cov>=4.1.0", ] [tool.pytest.ini_options] testpaths = ["tests"] python_files = ["test_*.py"] python_classes = ["Test*"] python_functions = ["test_*"] [tool.coverage.run] source = ["src"] omit = ["tests/*"] [tool.coverage.report] exclude_lines = [ "pragma: no cover", "def __repr__", "raise AssertionError", "raise NotImplementedError", ]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/elinsky/execution-system-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server