Skip to main content
Glama

n8n-workflow-builder-mcp

by ifmelate
langchain_chatTrigger.json4.51 kB
{ "nodeType": "@n8n/n8n-nodes-langchain.chatTrigger", "displayName": "Chat Trigger", "description": "Runs the workflow when an n8n generated webchat is submitted", "version": [ 1, 1.1 ], "properties": [ { "name": "public", "displayName": "Make Chat Publicly Available", "type": "boolean", "default": false, "description": "Whether the chat should be publicly available or only accessible through the manual chat interface" }, { "name": "mode", "displayName": "Mode", "type": "options", "default": "hostedChat", "description": "Chat on a page served by n8n", "options": [ { "name": "Hosted Chat", "value": "hostedChat", "description": "Chat on a page served by n8n" }, { "name": "Embedded Chat", "value": "webhook", "description": "Chat through a widget embedded in another page, or by calling a webhook" } ], "displayOptions": { "show": { "public": [ true ] } } }, { "name": "hostedChatNotice", "displayName": "Chat will be live at the URL above once you activate this workflow. Live executions will show up in the ‘executions’ tab", "type": "notice", "default": "", "displayOptions": { "show": { "mode": [ "hostedChat" ], "public": [ true ] } } }, { "name": "embeddedChatNotice", "displayName": "Follow the instructions <a href=\"https://www.npmjs.com/package/@n8n/chat\" target=\"_blank\">here</a> to embed chat in a webpage (or just call the webhook URL at the top of this section). Chat will be live once you activate this workflow", "type": "notice", "default": "", "displayOptions": { "show": { "mode": [ "webhook" ], "public": [ true ] } } }, { "name": "authentication", "displayName": "Authentication", "type": "options", "default": "none", "description": "Simple username and password (the same one for all users)", "options": [ { "name": "Basic Auth", "value": "basicAuth", "description": "Simple username and password (the same one for all users)" }, { "name": "n8n User Auth", "value": "n8nUserAuth", "description": "Require user to be logged in with their n8n account" }, { "name": "None", "value": "none" } ], "displayOptions": { "show": { "public": [ true ] } } }, { "name": "initialMessages", "displayName": "Initial Message(s)", "type": "string", "default": "Hi there! 👋\\nMy name is Nathan. How can I assist you today?", "description": "Default messages shown at the start of the chat, one per line", "typeOptions": { "rows": 3 }, "displayOptions": { "show": { "mode": [ "hostedChat" ], "public": [ true ] } } }, { "name": "options", "displayName": "Options", "type": "collection", "default": {}, "placeholder": "Add Field", "displayOptions": "{\n\t\t\t\t\tshow: {\n\t\t\t\t\t\tpublic: [false],\n\t\t\t\t\t\t'@version': [{ _cnd: { gte: 1.1 }" } ], "credentialsConfig": [ { "name": "httpBasicAuth", "required": true }, { "name": "setup", "required": false }, { "name": "public", "required": false }, { "name": "hostedChatNotice", "required": false }, { "name": "initialMessages", "required": false }, { "name": "options", "required": false }, { "name": "responseMode", "required": false }, { "name": "showWelcomeScreen", "required": false } ], "io": { "inputs": [ "AiMemory" ], "outputs": [ "Main" ], "outputNames": [], "hints": {} }, "wiring": { "role": "chatTrigger", "requires": [], "optional": [ "AiMemory" ], "consumedBy": [], "consumes": [ "AiMemory" ], "produces": [ "Main" ] } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ifmelate/n8n-workflow-builder-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server