aws_s3
Manage AWS S3 buckets and objects by listing, creating, deleting, uploading, and downloading files through infrastructure automation.
Instructions
Manage AWS S3 buckets and objects
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
| action | Yes | ||
| region | Yes | ||
| bucket | No | ||
| objectKey | No | ||
| localPath | No | ||
| acl | No | ||
| tags | No | ||
| metadata | No | ||
| contentType | No |
Implementation Reference
- The main handler function for the 'aws_s3' tool. It verifies AWS credentials, destructures input args, dynamically generates an Ansible playbook YAML based on the specified S3 action (list_buckets, create_bucket, etc.), and executes it using the executeAwsPlaybook helper.export async function s3Operations(args: S3Options): Promise<string> { await verifyAwsCredentials(); const { action, region, bucket, objectKey, localPath, acl, tags, metadata, contentType } = args; let playbookContent = `--- - name: AWS S3 ${action} operation hosts: localhost connection: local gather_facts: no tasks:`; switch (action) { case 'list_buckets': playbookContent += ` - name: List S3 buckets amazon.aws.s3_bucket_info: region: "${region}" register: s3_buckets - name: Display buckets debug: var: s3_buckets.buckets`; break; case 'create_bucket': playbookContent += ` - name: Create S3 bucket amazon.aws.s3_bucket: region: "${region}" name: "${bucket}" state: present ${formatYamlParams({ tags, acl })} register: s3_create - name: Display creation result debug: var: s3_create`; break; case 'delete_bucket': playbookContent += ` - name: Delete S3 bucket amazon.aws.s3_bucket: region: "${region}" name: "${bucket}" state: absent force: true register: s3_delete - name: Display deletion result debug: var: s3_delete`; break; case 'list_objects': playbookContent += ` - name: List S3 objects amazon.aws.s3_object: region: "${region}" bucket: "${bucket}" mode: list register: s3_objects - name: Display objects debug: var: s3_objects.keys`; break; case 'upload': playbookContent += ` - name: Upload file to S3 amazon.aws.s3_object: region: "${region}" bucket: "${bucket}" object: "${objectKey}" src: "${localPath}" mode: put ${formatYamlParams({ acl, tags, metadata, content_type: contentType })} register: s3_upload - name: Display upload result debug: var: s3_upload`; break; case 'download': playbookContent += ` - name: Download file from S3 amazon.aws.s3_object: region: "${region}" bucket: "${bucket}" object: "${objectKey}" dest: "${localPath}" mode: get register: s3_download - name: Display download result debug: var: s3_download`; break; default: throw new AnsibleError(`Unsupported S3 action: ${action}`); } // Execute the generated playbook return executeAwsPlaybook(`s3-${action}`, playbookContent); }
- Zod schema defining the input parameters for the aws_s3 tool, including the action enum and optional fields like bucket, objectKey, etc., used for validation in the tool handler.export const S3ActionEnum = z.enum(['list_buckets', 'create_bucket', 'delete_bucket', 'list_objects', 'upload', 'download']); export type S3Action = z.infer<typeof S3ActionEnum>; export const VPCActionEnum = z.enum(['list', 'create', 'delete']); export type VPCAction = z.infer<typeof VPCActionEnum>; export const CloudFormationActionEnum = z.enum(['list', 'create', 'update', 'delete']); export type CloudFormationAction = z.infer<typeof CloudFormationActionEnum>; export const IAMActionEnum = z.enum(['list_roles', 'list_policies', 'create_role', 'create_policy', 'delete_role', 'delete_policy']); export type IAMAction = z.infer<typeof IAMActionEnum>; export const RDSActionEnum = z.enum(['list', 'create', 'delete', 'start', 'stop']); export type RDSAction = z.infer<typeof RDSActionEnum>; export const Route53ActionEnum = z.enum(['list_zones', 'list_records', 'create_zone', 'create_record', 'delete_record', 'delete_zone']); export type Route53Action = z.infer<typeof Route53ActionEnum>; export const ELBActionEnum = z.enum(['list', 'create', 'delete']); export type ELBAction = z.infer<typeof ELBActionEnum>; export const LambdaActionEnum = z.enum(['list', 'create', 'update', 'delete', 'invoke']); export type LambdaAction = z.infer<typeof LambdaActionEnum>; // AWS EC2 Schema export const EC2InstanceSchema = z.object({ action: EC2InstanceActionEnum, region: z.string().min(1, 'AWS region is required'), instanceIds: z.array(z.string()).optional(), filters: z.record(z.any()).optional(), instanceType: z.string().optional(), imageId: z.string().optional(), keyName: z.string().optional(), securityGroups: z.array(z.string()).optional(), userData: z.string().optional(), count: z.number().optional(), tags: z.record(z.string()).optional(), waitForCompletion: z.boolean().optional().default(true), terminationProtection: z.boolean().optional() }); export type EC2InstanceOptions = z.infer<typeof EC2InstanceSchema>; // AWS S3 Schema export const S3Schema = z.object({ action: S3ActionEnum, region: z.string().min(1, 'AWS region is required'), bucket: z.string().optional(), objectKey: z.string().optional(), localPath: z.string().optional(), acl: z.string().optional(), tags: z.record(z.string()).optional(), metadata: z.record(z.string()).optional(), contentType: z.string().optional() }); export type S3Options = z.infer<typeof S3Schema>;
- src/sysoperator/index.ts:96-100 (registration)Registration of the 'aws_s3' tool in the toolDefinitions record, mapping the tool name to its description, input schema (aws.S3Schema), and handler function (aws.s3Operations). This makes it available via the MCP server.aws_s3: { description: 'Manage AWS S3 buckets and objects', schema: aws.S3Schema, handler: aws.s3Operations, },
- Helper function executeAwsPlaybook used by the aws_s3 handler (and other AWS handlers) to create temporary directories, write playbook files, execute ansible-playbook command, handle errors, and cleanup.async function executeAwsPlaybook( operationName: string, playbookContent: string, extraParams: string = '', tempFiles: { filename: string, content: string }[] = [] // For additional files like templates, policies ): Promise<string> { let tempDir: string | undefined; try { // Create a unique temporary directory tempDir = await createTempDirectory(`ansible-aws-${operationName}`); // Write the main playbook file const playbookPath = await writeTempFile(tempDir, 'playbook.yml', playbookContent); // Write any additional temporary files for (const file of tempFiles) { await writeTempFile(tempDir, file.filename, file.content); } // Build the command const command = `ansible-playbook ${playbookPath} ${extraParams}`; console.error(`Executing: ${command}`); // Execute the playbook asynchronously const { stdout, stderr } = await execAsync(command); // Return stdout, or a success message if stdout is empty return stdout || `${operationName} completed successfully (no output).`; } catch (error: any) { // Handle execution errors const errorMessage = error.stderr || error.message || 'Unknown error'; throw new AnsibleExecutionError(`Ansible execution failed for ${operationName}: ${errorMessage}`, error.stderr); } finally { // Ensure cleanup happens even if errors occur if (tempDir) { await cleanupTempDirectory(tempDir); } } }