Data Enterprise Tool
data_enterprise_toolManage 301 redirects and robots.txt for Webflow enterprise sites. Add well-known files and view activity logs to control site behavior.
Instructions
Data tool - Enterprise tool to perform actions like manage 301 redirects, manage robots.txt and more. This tool only works if User's workspace plan is Enterprise or higher, else tool will return an error.
Input Schema
| Name | Required | Description | Default |
|---|---|---|---|
| actions | Yes | The actions to perform on the enterprise tool. |
Implementation Reference
- src/tools/enterprise.ts:189-572 (handler)The handler for the data_enterprise_tool - registered via server.registerTool, iterates over an array of actions (list_301_redirects, create_301_redirect, update_301_redirect, delete_301_redirect, get_robots_txt, update_robots_txt, replace_robots_txt, delete_robots_txt, add_well_known_file, remove_well_known_files, list_site_activity_logs) and executes each with per-action error handling for non-Enterprise plan errors.
server.registerTool( "data_enterprise_tool", { title: "Data Enterprise Tool", description: "Data tool - Enterprise tool to perform actions like manage 301 redirects, manage robots.txt and more. This tool only works if User's workspace plan is Enterprise or higher, else tool will return an error.", annotations: { readOnlyHint: false, }, inputSchema: { actions: z .array( z .object({ list_301_redirects: z .object({ site_id: z .string() .describe( "The site's unique ID, used to list its 301 redirects.", ), }) .optional() .describe("List all 301 redirects for a site."), create_301_redirect: z .object({ site_id: z .string() .describe( "The site's unique ID, used to create a 301 redirect.", ), fromUrl: z .string() .describe( "The source URL path that will be redirected (e.g., '/old-page').", ), toUrl: z .string() .describe( "The destination URL path where requests will be redirected to (e.g., '/new-page').", ), }) .optional() .describe("Create a new 301 redirect for a site."), update_301_redirect: z .object({ site_id: z .string() .describe( "The site's unique ID, used to update a 301 redirect.", ), redirect_id: z .string() .describe( "The redirect's unique ID, used to identify which redirect to update.", ), fromUrl: z .string() .describe( "The source URL path that will be redirected (e.g., '/old-page').", ), toUrl: z .string() .describe( "The destination URL path where requests will be redirected to (e.g., '/new-page').", ), }) .optional() .describe("Update an existing 301 redirect."), delete_301_redirect: z .object({ site_id: z .string() .describe( "The site's unique ID, used to delete a 301 redirect.", ), redirect_id: z .string() .describe( "The redirect's unique ID, used to identify which redirect to delete.", ), }) .optional() .describe("Delete a 301 redirect from a site."), get_robots_txt: z .object({ site_id: z .string() .describe( "The site's unique ID, used to get its robots.txt configuration.", ), }) .optional() .describe("Get the robots.txt configuration for a site."), update_robots_txt: z .object({ site_id: z .string() .describe( "The site's unique ID, used to update its robots.txt.", ), rules: z .array( z.object({ userAgent: z .string() .describe( "The user agent to apply rules to (e.g., '*', 'Googlebot').", ), allow: z .array(z.string()) .describe("Array of URL paths to allow."), disallow: z .array(z.string()) .describe("Array of URL paths to disallow."), }), ) .optional() .describe( "Array of rules to apply to the robots.txt file.", ), sitemap: z .string() .optional() .describe( "URL to the sitemap (e.g., 'https://example.com/sitemap.xml').", ), }) .optional() .describe( "Partially update the robots.txt file (PATCH operation).", ), replace_robots_txt: z .object({ site_id: z .string() .describe( "The site's unique ID, used to replace its robots.txt.", ), rules: z .array( z.object({ userAgent: z .string() .describe( "The user agent to apply rules to (e.g., '*', 'Googlebot').", ), allow: z .array(z.string()) .describe("Array of URL paths to allow."), disallow: z .array(z.string()) .describe("Array of URL paths to disallow."), }), ) .optional() .describe( "Array of rules to apply to the robots.txt file.", ), sitemap: z .string() .optional() .describe( "URL to the sitemap (e.g., 'https://example.com/sitemap.xml').", ), }) .optional() .describe( "Completely replace the robots.txt file (PUT operation).", ), delete_robots_txt: z .object({ site_id: z .string() .describe( "The site's unique ID, used to delete rules from its robots.txt.", ), rules: z .array( z.object({ userAgent: z .string() .describe( "The user agent to apply rules to (e.g., '*', 'Googlebot').", ), allow: z .array(z.string()) .describe("Array of URL paths to allow."), disallow: z .array(z.string()) .describe("Array of URL paths to disallow."), }), ) .optional() .describe( "Array of rules to remove from the robots.txt file.", ), sitemap: z .string() .optional() .describe("Sitemap URL to remove."), }) .optional() .describe("Delete specific rules from the robots.txt file."), add_well_known_file: z .object({ site_id: z .string() .describe( "The site's unique ID, used to add a well-known file.", ), fileName: z .string() .describe( `The name of the well-known file (e.g., 'apple-app-site-association', 'assetlinks.json'). ".noext" is a special file extension that removes other extensions. For example, apple-app-site-association.noext.txt will be uploaded as apple-app-site-association. Use this extension for tools that have trouble uploading extensionless files.`, ), fileData: z .string() .describe( "The content/data of the well-known file as a string.", ), contentType: z .enum(["application/json", "text/plain"]) .describe( "The MIME type of the file content (application/json or text/plain).", ), }) .optional() .describe( "Add or update a well-known file to the site's /.well-known/ directory.", ), remove_well_known_files: z .object({ site_id: z .string() .describe( "The site's unique ID, used to remove well-known files.", ), fileNames: z .array(z.string()) .describe( "Array of file names to remove from the /.well-known/ directory.", ), }) .optional() .describe( "Remove one or more well-known files from the site.", ), list_site_activity_logs: z .object({ site_id: z .string() .describe( "The site's unique ID, used to list its activity log events.", ), limit: z .number() .optional() .describe( "Maximum number of records to return (max 100).", ), offset: z .number() .optional() .describe( "Offset used for pagination if the results have more than limit records.", ), }) .optional() .describe( "List activity log events for a site. Requires Enterprise hosting plan. To get logs for multiple sites, include multiple actions in the array — each is handled independently so a failure for one site won't prevent results from others.", ), }) .strict() .refine( (d) => [ d.list_301_redirects, d.create_301_redirect, d.update_301_redirect, d.delete_301_redirect, d.get_robots_txt, d.update_robots_txt, d.replace_robots_txt, d.delete_robots_txt, d.add_well_known_file, d.remove_well_known_files, d.list_site_activity_logs, ].filter(Boolean).length >= 1, { message: "Provide at least one of list_301_redirects, create_301_redirect, update_301_redirect, delete_301_redirect, get_robots_txt, update_robots_txt, replace_robots_txt, delete_robots_txt, add_well_known_file, remove_well_known_files, list_site_activity_logs.", }, ), ) .min(1) .describe("The actions to perform on the enterprise tool."), }, }, async ({ actions }) => { // Per-action try/catch (unlike other tools which use an outer try/catch) // because a workspace can contain a mix of Enterprise and non-Enterprise // sites. A 403 from a non-Enterprise site should not prevent results from // Enterprise sites in the same batch. const result: Content[] = []; let hasError = false; for (const action of actions) { try { if (action.list_301_redirects) { const content = await list301Redirects(action.list_301_redirects); result.push(textContent(content)); } if (action.create_301_redirect) { const content = await create301Redirect(action.create_301_redirect); result.push(textContent(content)); } if (action.update_301_redirect) { const content = await update301Redirect(action.update_301_redirect); result.push(textContent(content)); } if (action.delete_301_redirect) { const content = await delete301Redirect(action.delete_301_redirect); result.push(textContent(content)); } if (action.get_robots_txt) { const content = await getRobotsDotTxt(action.get_robots_txt); result.push(textContent(content)); } if (action.update_robots_txt) { const content = await updateRobotsDotTxt(action.update_robots_txt); result.push(textContent(content)); } if (action.replace_robots_txt) { const content = await replaceRobotsDotTxt( action.replace_robots_txt, ); result.push(textContent(content)); } if (action.delete_robots_txt) { const content = await deleteRobotsDotTxt(action.delete_robots_txt); result.push(textContent(content)); } if (action.add_well_known_file) { const content = await addWellKnownFile(action.add_well_known_file); result.push(textContent(content)); } if (action.remove_well_known_files) { const content = await removeWellKnownFiles( action.remove_well_known_files, ); result.push(textContent(content)); } if (action.list_site_activity_logs) { const content = await listSiteActivityLogs( action.list_site_activity_logs, ); result.push(textContent(content)); } } catch (error) { hasError = true; const err = error as { statusCode?: number; body?: { code?: string }; }; if ( err.statusCode === 403 && (err.body?.code === "not_enterprise_plan_site" || err.body?.code === "not_enterprise_plan_workspace") ) { result.push( textContent({ error: true, message: "This site does not have an Enterprise hosting plan. This action requires an Enterprise site plan.", }), ); } else { result.push(...formatErrorResponse(error).content); } } } return { ...toolResponse(result), isError: hasError }; }, ); - src/tools/enterprise.ts:198-486 (schema)The Zod input schema defining the actions array with 11 possible sub-actions (301 redirect CRUD, robots.txt operations, well-known file management, site activity logs). Uses .strict() and .refine() to ensure at least one action is provided.
inputSchema: { actions: z .array( z .object({ list_301_redirects: z .object({ site_id: z .string() .describe( "The site's unique ID, used to list its 301 redirects.", ), }) .optional() .describe("List all 301 redirects for a site."), create_301_redirect: z .object({ site_id: z .string() .describe( "The site's unique ID, used to create a 301 redirect.", ), fromUrl: z .string() .describe( "The source URL path that will be redirected (e.g., '/old-page').", ), toUrl: z .string() .describe( "The destination URL path where requests will be redirected to (e.g., '/new-page').", ), }) .optional() .describe("Create a new 301 redirect for a site."), update_301_redirect: z .object({ site_id: z .string() .describe( "The site's unique ID, used to update a 301 redirect.", ), redirect_id: z .string() .describe( "The redirect's unique ID, used to identify which redirect to update.", ), fromUrl: z .string() .describe( "The source URL path that will be redirected (e.g., '/old-page').", ), toUrl: z .string() .describe( "The destination URL path where requests will be redirected to (e.g., '/new-page').", ), }) .optional() .describe("Update an existing 301 redirect."), delete_301_redirect: z .object({ site_id: z .string() .describe( "The site's unique ID, used to delete a 301 redirect.", ), redirect_id: z .string() .describe( "The redirect's unique ID, used to identify which redirect to delete.", ), }) .optional() .describe("Delete a 301 redirect from a site."), get_robots_txt: z .object({ site_id: z .string() .describe( "The site's unique ID, used to get its robots.txt configuration.", ), }) .optional() .describe("Get the robots.txt configuration for a site."), update_robots_txt: z .object({ site_id: z .string() .describe( "The site's unique ID, used to update its robots.txt.", ), rules: z .array( z.object({ userAgent: z .string() .describe( "The user agent to apply rules to (e.g., '*', 'Googlebot').", ), allow: z .array(z.string()) .describe("Array of URL paths to allow."), disallow: z .array(z.string()) .describe("Array of URL paths to disallow."), }), ) .optional() .describe( "Array of rules to apply to the robots.txt file.", ), sitemap: z .string() .optional() .describe( "URL to the sitemap (e.g., 'https://example.com/sitemap.xml').", ), }) .optional() .describe( "Partially update the robots.txt file (PATCH operation).", ), replace_robots_txt: z .object({ site_id: z .string() .describe( "The site's unique ID, used to replace its robots.txt.", ), rules: z .array( z.object({ userAgent: z .string() .describe( "The user agent to apply rules to (e.g., '*', 'Googlebot').", ), allow: z .array(z.string()) .describe("Array of URL paths to allow."), disallow: z .array(z.string()) .describe("Array of URL paths to disallow."), }), ) .optional() .describe( "Array of rules to apply to the robots.txt file.", ), sitemap: z .string() .optional() .describe( "URL to the sitemap (e.g., 'https://example.com/sitemap.xml').", ), }) .optional() .describe( "Completely replace the robots.txt file (PUT operation).", ), delete_robots_txt: z .object({ site_id: z .string() .describe( "The site's unique ID, used to delete rules from its robots.txt.", ), rules: z .array( z.object({ userAgent: z .string() .describe( "The user agent to apply rules to (e.g., '*', 'Googlebot').", ), allow: z .array(z.string()) .describe("Array of URL paths to allow."), disallow: z .array(z.string()) .describe("Array of URL paths to disallow."), }), ) .optional() .describe( "Array of rules to remove from the robots.txt file.", ), sitemap: z .string() .optional() .describe("Sitemap URL to remove."), }) .optional() .describe("Delete specific rules from the robots.txt file."), add_well_known_file: z .object({ site_id: z .string() .describe( "The site's unique ID, used to add a well-known file.", ), fileName: z .string() .describe( `The name of the well-known file (e.g., 'apple-app-site-association', 'assetlinks.json'). ".noext" is a special file extension that removes other extensions. For example, apple-app-site-association.noext.txt will be uploaded as apple-app-site-association. Use this extension for tools that have trouble uploading extensionless files.`, ), fileData: z .string() .describe( "The content/data of the well-known file as a string.", ), contentType: z .enum(["application/json", "text/plain"]) .describe( "The MIME type of the file content (application/json or text/plain).", ), }) .optional() .describe( "Add or update a well-known file to the site's /.well-known/ directory.", ), remove_well_known_files: z .object({ site_id: z .string() .describe( "The site's unique ID, used to remove well-known files.", ), fileNames: z .array(z.string()) .describe( "Array of file names to remove from the /.well-known/ directory.", ), }) .optional() .describe( "Remove one or more well-known files from the site.", ), list_site_activity_logs: z .object({ site_id: z .string() .describe( "The site's unique ID, used to list its activity log events.", ), limit: z .number() .optional() .describe( "Maximum number of records to return (max 100).", ), offset: z .number() .optional() .describe( "Offset used for pagination if the results have more than limit records.", ), }) .optional() .describe( "List activity log events for a site. Requires Enterprise hosting plan. To get logs for multiple sites, include multiple actions in the array — each is handled independently so a failure for one site won't prevent results from others.", ), }) .strict() .refine( (d) => [ d.list_301_redirects, d.create_301_redirect, d.update_301_redirect, d.delete_301_redirect, d.get_robots_txt, d.update_robots_txt, d.replace_robots_txt, d.delete_robots_txt, d.add_well_known_file, d.remove_well_known_files, d.list_site_activity_logs, ].filter(Boolean).length >= 1, { message: "Provide at least one of list_301_redirects, create_301_redirect, update_301_redirect, delete_301_redirect, get_robots_txt, update_robots_txt, replace_robots_txt, delete_robots_txt, add_well_known_file, remove_well_known_files, list_site_activity_logs.", }, ), ) .min(1) .describe("The actions to perform on the enterprise tool."), }, - src/mcp.ts:48-62 (registration)The function registerEnterpriseTools is called from registerTools in src/mcp.ts, which is the entry point for registering all tools including the data_enterprise_tool.
export function registerTools( server: McpServer, getClient: () => WebflowClient, getAccessToken: () => string, ) { registerAiChatTools(server); registerCmsTools(server, getClient); registerComponentsTools(server, getClient); registerPagesTools(server, getClient); registerScriptsTools(server, getClient); registerSiteTools(server, getClient); registerCommentsTools(server, getClient); registerEnterpriseTools(server, getClient); registerWebhookTools(server, getClient); } - src/tools/enterprise.ts:17-23 (helper)All 11 helper functions that perform the actual API calls via the Webflow client (redirects CRUD, robots.txt operations, well-known file management, activity logs). These are called by the tool handler.
const list301Redirects = async (arg: { site_id: string }) => { const response = await getClient().sites.redirects.list( arg.site_id, requestOptions, ); return response; };