Skip to main content
Glama

n8n MCP Server

by CodeHalwell
test_mcp_server_tools.py1.29 kB
from __future__ import annotations from typing import Any, Dict import pytest from pytest import MonkeyPatch @pytest.mark.asyncio async def test_execute_workflow_tool_runs_client(monkeypatch: MonkeyPatch) -> None: monkeypatch.setenv("N8N_API_URL", "https://example.com") monkeypatch.setenv("N8N_API_KEY", "dummy") from importlib import reload from mcp_server import server reload(server) calls: Dict[str, object] = {} class DummyClient: def __init__(self, settings: object) -> None: # pragma: no cover - simple holder calls["settings"] = settings async def list_workflows(self) -> list[Dict[str, str]]: return [{"id": "1", "name": "Example"}] async def execute_workflow( self, workflow_id: str, payload: Dict[str, Any] ) -> Dict[str, str]: calls["executed"] = (workflow_id, payload) return {"status": "ok"} async def close(self) -> None: calls["closed"] = True monkeypatch.setattr(server, "N8nClient", DummyClient) result = await server.execute_workflow_action("Example", {"foo": "bar"}) assert calls["executed"] == ("1", {"foo": "bar"}) assert calls.get("closed") is True assert result["workflow"]["status"] == "ok"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/CodeHalwell/n8n-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server