Skip to main content
Glama

n8n-workflow-builder-mcp

by ifmelate
langchain_openAiAssistant.json4.55 kB
{ "nodeType": "@n8n/n8n-nodes-langchain.openAiAssistant", "displayName": "OpenAI Assistant", "description": "Utilizes Assistant API from Open AI.", "version": [ 1, 1.1 ], "properties": [ { "name": "mode", "displayName": "Operation", "type": "options", "default": "existing", "options": [ { "name": "Use New Assistant", "value": "new" }, { "name": "Use Existing Assistant", "value": "existing" } ] }, { "name": "name", "displayName": "Name", "type": "string", "default": "", "required": true, "displayOptions": { "show": { "/mode": [ "new" ] } } }, { "name": "instructions", "displayName": "Instructions", "type": "string", "default": "", "description": "How the Assistant and model should behave or respond", "typeOptions": { "rows": 5 }, "displayOptions": { "show": { "/mode": [ "new" ] } } }, { "name": "model", "displayName": "Model", "type": "options", "default": "gpt-3.5-turbo-1106", "description": "The model which will be used to power the assistant. <a href=\"https://beta.openai.com/docs/models/overview\">Learn more</a>. The Retrieval tool requires gpt-3.5-turbo-1106 and gpt-4-1106-preview models.", "required": true, "displayOptions": { "show": { "/mode": [ "new" ] } } }, { "name": "assistantId", "displayName": "Assistant", "type": "options", "default": "", "description": "The assistant to use. <a href=\"https://beta.openai.com/docs/assistants/overview\">Learn more</a>.", "required": true, "displayOptions": { "show": { "/mode": [ "existing" ] } } }, { "name": "text", "displayName": "Text", "type": "string", "default": "={{ $json.chat_input }}", "required": true, "displayOptions": { "show": { "@version": [ 1 ] } } }, { "name": "nativeTools", "displayName": "OpenAI Tools", "type": "multiOptions", "default": [], "options": [ { "name": "Code Interpreter", "value": "code_interpreter" }, { "name": "Knowledge Retrieval", "value": "retrieval" } ] }, { "name": "noticeTools", "displayName": "Connect your own custom tools to this node on the canvas", "type": "notice", "default": "" }, { "name": "options", "displayName": "Options", "type": "collection", "default": {}, "description": "Additional options to add", "placeholder": "Add Option", "options": [ { "name": "baseURL", "displayName": "Base URL", "type": "string", "default": "https://api.openai.com/v1", "description": "Override the default base URL for the API" }, { "name": "maxRetries", "displayName": "Max Retries", "type": "number", "default": 2, "description": "Maximum number of retries to attempt" }, { "name": "timeout", "displayName": "Timeout", "type": "number", "default": 10000, "description": "Maximum amount of time a request is allowed to take in milliseconds" } ] } ], "credentialsConfig": [ { "name": "openAiApi", "required": true }, { "name": "mode", "required": false }, { "name": "name", "required": true }, { "name": "assistantId", "required": false }, { "name": "text", "required": true }, { "name": "noticeTools", "required": false } ], "io": { "inputs": [ "Main", "AiTool" ], "outputs": [ "Main" ], "outputNames": [], "hints": {} }, "wiring": { "role": "agent", "requires": [ "AiLanguageModel" ], "optional": [ "AiMemory", "AiOutputParser", "AiTool" ], "consumedBy": [], "consumes": [ "Main", "AiTool" ], "produces": [ "Main" ] } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ifmelate/n8n-workflow-builder-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server