Skip to main content
Glama

n8n MCP Server

by CodeHalwell
pyproject.toml943 B
[project] name = "n8n-mcp" version = "0.1.0" description = "MCP server + n8n workflow builder and client" readme = "README.md" requires-python = ">=3.9,<3.13" authors = [ { name = "You", email = "you@example.com" } ] dependencies = [ "requests>=2.32.3", "pydantic>=2.7.4", "fastapi>=0.111.0", "uvicorn[standard]>=0.30.1", "python-dotenv>=1.0.1", "gradio>=4.38.1", ] [tool.uv] dev-dependencies = [ "pytest>=8.2.2", "pytest-cov>=5.0.0", "ruff>=0.5.0", "mypy>=1.10.0", ] [tool.ruff] line-length = 100 exclude = ["get-pip.py"] [tool.mypy] python_version = "3.11" strict = true exclude = ["^ui_gradio/", "^ui/"] [[tool.mypy.overrides]] module = ["builder.*", "n8n_client.*", "ui.*", "ui_gradio.*", "get-pip"] ignore_errors = true [build-system] requires = ["hatchling>=1.18.0"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = [ "builder", "n8n_client", "mcp_server", "ui_gradio", ]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/CodeHalwell/n8n-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server