Skip to main content
Glama

Google Drive MCP Server

by ducla5
automation-scripts.sh11.2 kB
#!/bin/bash # Google Drive MCP Server - Automation Scripts # This file contains example automation scripts for common workflows # Script 1: Daily Report Processing daily_report_processing() { echo "=== Daily Report Processing ===" local date=$(date +%Y-%m-%d) local report_folder="1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms" # Replace with actual folder ID local output_dir="./daily_reports_${date}" echo "Processing daily reports for $date" # Create output directory mkdir -p "$output_dir" # Search for today's reports echo "Searching for reports modified today..." npx google-drive-mcp search \ --query "daily report" \ --folder-id "$report_folder" \ --modified-after "$date" \ --format json > "${output_dir}/reports.json" # Check if any reports were found local report_count=$(cat "${output_dir}/reports.json" | jq '.files | length') echo "Found $report_count reports" if [ "$report_count" -gt 0 ]; then # Process each report cat "${output_dir}/reports.json" | jq -r '.files[].id' | while read file_id; do if [ ! -z "$file_id" ]; then echo "Processing report: $file_id" # Get file metadata npx google-drive-mcp info --file-id "$file_id" --format json > "${output_dir}/metadata_${file_id}.json" # Extract summary section npx google-drive-mcp content \ --file-id "$file_id" \ --search "summary" \ --context-lines 5 \ --output "${output_dir}/summary_${file_id}.md" # Extract full content for backup npx google-drive-mcp content \ --file-id "$file_id" \ --output "${output_dir}/full_${file_id}.md" fi done echo "Daily report processing complete. Files saved to $output_dir" else echo "No reports found for today" fi } # Script 2: Weekly Document Backup weekly_backup() { echo "=== Weekly Document Backup ===" local week_start=$(date -d "7 days ago" +%Y-%m-%d) local today=$(date +%Y-%m-%d) local backup_dir="./backup_${today}" echo "Backing up documents modified between $week_start and $today" # Create backup directory mkdir -p "$backup_dir" # Search for recently modified documents npx google-drive-mcp search \ --query "*" \ --type "document,pdf,spreadsheet,presentation" \ --modified-after "$week_start" \ --modified-before "$today" \ --limit 50 \ --format json > "${backup_dir}/files_to_backup.json" # Process each file local file_count=$(cat "${backup_dir}/files_to_backup.json" | jq '.files | length') echo "Found $file_count files to backup" if [ "$file_count" -gt 0 ]; then cat "${backup_dir}/files_to_backup.json" | jq -r '.files[] | @base64' | while read file_data; do local file_info=$(echo "$file_data" | base64 --decode) local file_id=$(echo "$file_info" | jq -r '.id') local file_name=$(echo "$file_info" | jq -r '.name' | tr '/' '_') echo "Backing up: $file_name" # Create safe filename local safe_name=$(echo "$file_name" | sed 's/[^a-zA-Z0-9._-]/_/g') # Backup content npx google-drive-mcp content \ --file-id "$file_id" \ --output "${backup_dir}/${safe_name}.md" # Backup metadata npx google-drive-mcp info \ --file-id "$file_id" \ --format json > "${backup_dir}/${safe_name}_metadata.json" done echo "Weekly backup complete. Files saved to $backup_dir" # Create backup summary echo "Backup Summary - $(date)" > "${backup_dir}/BACKUP_SUMMARY.txt" echo "Period: $week_start to $today" >> "${backup_dir}/BACKUP_SUMMARY.txt" echo "Files backed up: $file_count" >> "${backup_dir}/BACKUP_SUMMARY.txt" echo "Backup location: $backup_dir" >> "${backup_dir}/BACKUP_SUMMARY.txt" else echo "No files found for backup" fi } # Script 3: Content Analysis Pipeline content_analysis() { echo "=== Content Analysis Pipeline ===" local search_term="$1" local analysis_dir="./analysis_$(date +%Y%m%d_%H%M%S)" if [ -z "$search_term" ]; then echo "Usage: content_analysis <search_term>" return 1 fi echo "Analyzing content for term: $search_term" # Create analysis directory mkdir -p "$analysis_dir" # Search for relevant files echo "Searching for files containing '$search_term'..." npx google-drive-mcp search \ --query "$search_term" \ --type "document,pdf" \ --limit 20 \ --format json > "${analysis_dir}/search_results.json" local file_count=$(cat "${analysis_dir}/search_results.json" | jq '.files | length') echo "Found $file_count relevant files" if [ "$file_count" -gt 0 ]; then # Initialize analysis results echo "File Analysis Results" > "${analysis_dir}/analysis_report.txt" echo "Search Term: $search_term" >> "${analysis_dir}/analysis_report.txt" echo "Date: $(date)" >> "${analysis_dir}/analysis_report.txt" echo "Files Analyzed: $file_count" >> "${analysis_dir}/analysis_report.txt" echo "" >> "${analysis_dir}/analysis_report.txt" # Analyze each file cat "${analysis_dir}/search_results.json" | jq -r '.files[] | @base64' | while read file_data; do local file_info=$(echo "$file_data" | base64 --decode) local file_id=$(echo "$file_info" | jq -r '.id') local file_name=$(echo "$file_info" | jq -r '.name') echo "Analyzing: $file_name" # Extract content around search term npx google-drive-mcp content \ --file-id "$file_id" \ --search "$search_term" \ --context-lines 3 \ --output "${analysis_dir}/content_${file_id}.md" # Get file metadata npx google-drive-mcp info \ --file-id "$file_id" \ --format json > "${analysis_dir}/meta_${file_id}.json" # Add to analysis report echo "File: $file_name" >> "${analysis_dir}/analysis_report.txt" echo "ID: $file_id" >> "${analysis_dir}/analysis_report.txt" # Extract word count and other metrics if [ -f "${analysis_dir}/content_${file_id}.md" ]; then local word_count=$(wc -w < "${analysis_dir}/content_${file_id}.md") local line_count=$(wc -l < "${analysis_dir}/content_${file_id}.md") echo "Word Count: $word_count" >> "${analysis_dir}/analysis_report.txt" echo "Line Count: $line_count" >> "${analysis_dir}/analysis_report.txt" fi echo "" >> "${analysis_dir}/analysis_report.txt" done echo "Content analysis complete. Results saved to $analysis_dir" echo "View the analysis report: cat ${analysis_dir}/analysis_report.txt" else echo "No files found containing '$search_term'" fi } # Script 4: Cache Maintenance cache_maintenance() { echo "=== Cache Maintenance ===" # Show current cache status echo "Current cache status:" npx google-drive-mcp cache stats echo # Clean up old cache entries echo "Cleaning up cache entries older than 7 days..." npx google-drive-mcp cache cleanup --older-than 7d echo # Clear cache for files that no longer exist echo "Checking for orphaned cache entries..." # This would require custom logic to check if files still exist # Show updated cache status echo "Updated cache status:" npx google-drive-mcp cache stats echo echo "Cache maintenance complete" } # Script 5: Health Check and Monitoring health_monitoring() { echo "=== Health Check and Monitoring ===" local log_file="./health_check_$(date +%Y%m%d_%H%M%S).log" echo "Running comprehensive health check..." echo "Health Check Report - $(date)" > "$log_file" echo "=================================" >> "$log_file" echo "" >> "$log_file" # Test authentication echo "Testing authentication..." | tee -a "$log_file" if npx google-drive-mcp test auth >> "$log_file" 2>&1; then echo "✓ Authentication: OK" | tee -a "$log_file" else echo "✗ Authentication: FAILED" | tee -a "$log_file" fi echo "" >> "$log_file" # Test basic functionality echo "Testing basic search functionality..." | tee -a "$log_file" if npx google-drive-mcp search --query "test" --limit 1 >> "$log_file" 2>&1; then echo "✓ Search: OK" | tee -a "$log_file" else echo "✗ Search: FAILED" | tee -a "$log_file" fi echo "" >> "$log_file" # Check cache health echo "Checking cache health..." | tee -a "$log_file" npx google-drive-mcp cache stats >> "$log_file" 2>&1 echo "" >> "$log_file" # Check configuration echo "Validating configuration..." | tee -a "$log_file" if npx google-drive-mcp config validate >> "$log_file" 2>&1; then echo "✓ Configuration: OK" | tee -a "$log_file" else echo "✗ Configuration: ISSUES FOUND" | tee -a "$log_file" fi echo "" >> "$log_file" # System resources echo "System Resources:" >> "$log_file" echo "Disk Usage:" >> "$log_file" df -h >> "$log_file" 2>&1 echo "" >> "$log_file" echo "Memory Usage:" >> "$log_file" free -h >> "$log_file" 2>&1 echo "" >> "$log_file" echo "Health check complete. Report saved to: $log_file" } # Main script execution case "$1" in "daily-reports") daily_report_processing ;; "weekly-backup") weekly_backup ;; "analyze") content_analysis "$2" ;; "cache-maintenance") cache_maintenance ;; "health-check") health_monitoring ;; *) echo "Google Drive MCP Server - Automation Scripts" echo "" echo "Usage: $0 <command> [arguments]" echo "" echo "Available commands:" echo " daily-reports - Process daily reports" echo " weekly-backup - Backup weekly documents" echo " analyze <term> - Analyze content for specific term" echo " cache-maintenance - Perform cache maintenance" echo " health-check - Run comprehensive health check" echo "" echo "Examples:" echo " $0 daily-reports" echo " $0 weekly-backup" echo " $0 analyze 'quarterly results'" echo " $0 cache-maintenance" echo " $0 health-check" ;; esac

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ducla5/gdriver-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server