Skip to main content
Glama

restore_batch_backup

Restore multiple notes from a batch backup to undo previous bulk operations in your Obsidian vault.

Instructions

Restore notes from a batch backup (undo batch operation)

Input Schema

TableJSON Schema
NameRequiredDescriptionDefault
backup_idYes

Implementation Reference

  • Core handler function that executes the restore logic: locates the backup directory, finds all .md files recursively, restores them concurrently using asyncio.gather with async file I/O, preserves metadata, and returns list of restored paths.
    async def restore_batch_backup(self, backup_id: str) -> list[str]: """ Restore notes from a batch backup asynchronously. Args: backup_id: Backup ID (timestamp) to restore from Returns: List of restored note paths Raises: FileNotFoundError: If backup doesn't exist """ backup_dir = self.vault_path / ".batch_backups" / backup_id if not backup_dir.exists(): raise FileNotFoundError(f"Backup not found: {backup_id}") logger.info(f"Restoring batch backup {backup_id}...") # Get all backup files backup_files = list(backup_dir.rglob("*.md")) # Restore all files asynchronously async def restore_file(i: int, backup_file: Path) -> str: # Get relative path from backup directory rel_path = backup_file.relative_to(backup_dir) target_file = self.vault_path / rel_path # Ensure parent directory exists target_file.parent.mkdir(parents=True, exist_ok=True) # Restore file async with aiofiles.open(backup_file, "rb") as src: content = await src.read() async with aiofiles.open(target_file, "wb") as dst: await dst.write(content) # Preserve metadata shutil.copystat(backup_file, target_file) logger.debug(f"Restored ({i}): {rel_path}") return str(rel_path) # Run all restores concurrently restored = await asyncio.gather( *[restore_file(i, backup_file) for i, backup_file in enumerate(backup_files, 1)] ) logger.info(f"Completed batch restore: {backup_id} ({len(restored)} notes)") return list(restored)
  • MCP tool registration using @mcp.tool(name="restore_batch_backup") and wrapper handler function that validates input, delegates to ObsidianVault.restore_batch_backup, formats the result as a human-readable string list, and handles errors.
    @mcp.tool( name="restore_batch_backup", description="Restore notes from a batch backup (undo batch operation)", ) async def restore_batch_backup(backup_id: str) -> str: """ Restore notes from a batch backup. Args: backup_id: Backup ID (timestamp) to restore from Returns: Success message with restored note count """ if not backup_id or not backup_id.strip(): return "Error: Backup ID cannot be empty" context = _get_context() try: restored = await context.vault.restore_batch_backup(backup_id) result = [f"✅ Restored {len(restored)} notes from backup `{backup_id}`\n"] for path in restored: result.append(f"- `{path}`") return "\n".join(result) except FileNotFoundError as e: return f"Error: {e}" except Exception as e: logger.exception("Error restoring backup") return f"Error: {e}"
  • Companion helper method create_batch_backup that creates the timestamped backup directories used by restore_batch_backup, enabling atomic batch operations with rollback support.
    async def create_batch_backup(self, relative_paths: list[str]) -> str: """ Create a backup of multiple notes asynchronously. Args: relative_paths: List of note paths to backup Returns: Backup ID (timestamp) for later restoration Raises: VaultSecurityError: If any path is invalid FileNotFoundError: If any note doesn't exist """ # Validate all paths first file_paths = [] for rel_path in relative_paths: file_path = self._validate_path(rel_path) if not file_path.exists(): raise FileNotFoundError(f"Note not found: {rel_path}") file_paths.append((rel_path, file_path)) # Create backup directory with timestamp backup_id = datetime.now().strftime("%Y%m%d_%H%M%S") backup_dir = self.vault_path / ".batch_backups" / backup_id backup_dir.mkdir(parents=True, exist_ok=True) logger.info(f"Creating batch backup {backup_id}: {len(relative_paths)} files...") # Copy all files to backup asynchronously async def copy_file(i: int, rel_path: str, file_path: Path) -> None: backup_file = backup_dir / rel_path backup_file.parent.mkdir(parents=True, exist_ok=True) # Use async file operations async with aiofiles.open(file_path, "rb") as src: content = await src.read() async with aiofiles.open(backup_file, "wb") as dst: await dst.write(content) # Preserve metadata shutil.copystat(file_path, backup_file) logger.debug(f"Backed up ({i}/{len(file_paths)}): {rel_path}") # Run all copies concurrently await asyncio.gather( *[ copy_file(i, rel_path, file_path) for i, (rel_path, file_path) in enumerate(file_paths, 1) ] ) logger.info(f"Completed batch backup: {backup_id} ({len(relative_paths)} notes)") return backup_id

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/getglad/obsidian_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server