Skip to main content
Glama

cloud_sync

Upload your codebase to Relace Repos for semantic indexing, enabling cloud search functionality. Perform this action before using cloud_search or after significant code changes to maintain accurate search results.

Instructions

Upload codebase to Relace Repos for cloud_search semantic indexing.

Call this ONCE per session before using cloud_search, or after significant code changes. Incremental sync is fast (only changed files).

Sync Modes:

  • Incremental (default): only uploads new/modified files, deletes removed files

  • Safe Full: triggered by force=True OR first sync (no cached state) OR git HEAD changed (e.g., branch switch, rebase, commit amend). Uploads all files; suppresses delete operations UNLESS HEAD changed, in which case zombie files from the old ref are deleted to prevent stale results.

  • Mirror Full (force=True, mirror=True): completely overwrites cloud to match local

Args: force: If True, force full sync (ignore cached state). mirror: If True (with force=True), use Mirror Full mode to completely overwrite cloud repo (removes files not in local).

Input Schema

TableJSON Schema
NameRequiredDescriptionDefault
forceNo
mirrorNo

Implementation Reference

  • MCP tool registration and handler for 'cloud_sync'. Resolves base directory dynamically and delegates to cloud_sync_logic.
    @mcp.tool async def cloud_sync( force: bool = False, mirror: bool = False, ctx: Context | None = None ) -> dict[str, Any]: """Upload codebase to Relace Repos for cloud_search semantic indexing. Call this ONCE per session before using cloud_search, or after significant code changes. Incremental sync is fast (only changed files). Sync Modes: - Incremental (default): only uploads new/modified files, deletes removed files - Safe Full: triggered by force=True OR first sync (no cached state) OR git HEAD changed (e.g., branch switch, rebase, commit amend). Uploads all files; suppresses delete operations UNLESS HEAD changed, in which case zombie files from the old ref are deleted to prevent stale results. - Mirror Full (force=True, mirror=True): completely overwrites cloud to match local Args: force: If True, force full sync (ignore cached state). mirror: If True (with force=True), use Mirror Full mode to completely overwrite cloud repo (removes files not in local). """ base_dir, _ = await resolve_base_dir(config.base_dir, ctx) return cloud_sync_logic(repo_client, base_dir, force=force, mirror=mirror)
  • Core cloud_sync implementation: handles git detection, file scanning, hashing, diff computation (create/update/delete), sync modes (incremental/safe_full/mirror_full), API calls, and state persistence.
    def cloud_sync_logic( client: RelaceRepoClient, base_dir: str, force: bool = False, mirror: bool = False, ) -> dict[str, Any]: """Synchronize local codebase to Relace Cloud with incremental support. Args: client: RelaceRepoClient instance. base_dir: Base directory to sync. force: If True, force full sync ignoring cached state. mirror: If True (with force=True), use type="files" to completely overwrite cloud repo (removes files not in local). Returns: Dict containing: - repo_id: Repository ID - repo_name: Repository name - repo_head: New repo head after sync - is_incremental: Whether incremental sync was used - files_created: Number of new files - files_updated: Number of modified files - files_deleted: Number of deleted files - files_unchanged: Number of unchanged files - total_files: Total files in sync - local_git_branch: Current git branch name - local_git_head: Current git HEAD SHA (first 8 chars) - ref_changed: Whether git ref changed since last sync - sync_mode: "incremental" | "safe_full" | "mirror_full" - deletes_suppressed: Number of delete operations suppressed (safe_full mode) - error: Error message if failed (optional) """ trace_id = str(uuid.uuid4())[:8] logger.info( "[%s] Starting cloud sync from %s (force=%s, mirror=%s)", trace_id, base_dir, force, mirror, ) # Get current git info current_branch, current_head = get_current_git_info(base_dir) ref_changed = False deletes_suppressed = 0 repo_name = client.get_repo_name_from_base_dir(base_dir) try: # Ensure repo exists repo_id = client.ensure_repo(repo_name, trace_id=trace_id) logger.info("[%s] Using repo '%s' (id=%s)", trace_id, repo_name, repo_id) # Load cached sync state (unless force) cached_state: SyncState | None = None if not force: cached_state = load_sync_state(repo_name) if cached_state and cached_state.repo_id != repo_id: # Repo ID mismatch, force full sync logger.warning( "[%s] Cached repo_id mismatch, forcing full sync", trace_id, ) cached_state = None # Detect git ref change → trigger safe full sync # Keep a reference to cached state for diff calculation even when forcing safe_full force_safe_full = False diff_state = cached_state # State to use for diff operations if cached_state and current_head: old_head = cached_state.git_head_sha if old_head and old_head != current_head: logger.warning( "[%s] Git HEAD changed (%s -> %s), switching to safe full sync", trace_id, old_head[:8], current_head[:8], ) ref_changed = True force_safe_full = True # Mark for safe_full mode but keep diff_state # Determine sync mode if mirror and force: sync_mode = "mirror_full" elif cached_state is None or force_safe_full: sync_mode = "safe_full" else: sync_mode = "incremental" logger.info("[%s] Sync mode: %s", trace_id, sync_mode) is_incremental = sync_mode == "incremental" # Get file list (prefer git, fallback to directory scan) files = _get_git_tracked_files(base_dir) if files is None: logger.info("[%s] Git not available, using directory scan", trace_id) files = _scan_directory(base_dir) else: # Filter git files by extension or special filename files = [ f for f in files if Path(f).suffix.lower() in CODE_EXTENSIONS or Path(f).name.lower() in SPECIAL_FILENAMES ] logger.info("[%s] Found %d files to process", trace_id, len(files)) # Limit file count if len(files) > REPO_SYNC_MAX_FILES: logger.warning( "[%s] File count %d exceeds limit %d, truncating", trace_id, len(files), REPO_SYNC_MAX_FILES, ) files = files[:REPO_SYNC_MAX_FILES] # Compute file hashes logger.info("[%s] Computing file hashes...", trace_id) current_hashes = _compute_file_hashes(base_dir, files) # Compute diff operations (use diff_state to include deletes even in safe_full mode) operations, new_hashes, new_skipped = _compute_diff_operations( base_dir, current_hashes, diff_state ) # Count operation types before filtering writes = [op for op in operations if op["type"] == "write"] deletes = [op for op in operations if op["type"] == "delete"] # Safe Full mode delete handling: # - ref_changed: ALLOW deletes to clean up zombie files from old branch # - force=True (without ref_changed): SUPPRESS deletes for safety if sync_mode == "safe_full" and deletes: if ref_changed: # Branch switch detected: execute deletes to clean zombie files logger.info( "[%s] Branch switch detected: cleaning %d zombie files from cloud", trace_id, len(deletes), ) # deletes are kept, no suppression else: # force=True without branch switch: suppress deletes for safety deletes_suppressed = len(deletes) logger.warning( "[%s] Safe full sync: suppressing %d delete operations", trace_id, deletes_suppressed, ) operations = [op for op in operations if op["type"] != "delete"] deletes = [] # Determine creates vs updates cached_files = cached_state.files if cached_state else {} files_created = sum(1 for op in writes if op["filename"] not in cached_files) files_updated = sum(1 for op in writes if op["filename"] in cached_files) files_deleted = len(deletes) files_skipped = len(new_skipped) # Unchanged = total tracked - writes - skipped (skipped files are tracked but not uploaded) files_unchanged = len(new_hashes) - len(writes) - files_skipped logger.info( "[%s] Diff computed: %d created, %d updated, %d deleted, %d unchanged, %d skipped", trace_id, files_created, files_updated, files_deleted, files_unchanged, files_skipped, ) # Apply changes repo_head = "" if sync_mode == "mirror_full": # Mirror mode: use type="files" to completely overwrite logger.info("[%s] Mirror full sync: uploading %d files...", trace_id, len(writes)) file_contents = [ {"filename": op["filename"], "content": op["content"]} for op in writes ] # Always call API even with empty list to ensure cloud repo is cleared # and we get a valid repo_head for consistent sync state result = client.update_repo_files(repo_id, file_contents, trace_id=trace_id) repo_head = result.get("repo_head", "") if not file_contents: logger.warning( "[%s] Mirror sync with empty file list - cloud repo cleared", trace_id, ) logger.info( "[%s] Mirror sync completed, new head=%s", trace_id, repo_head[:8] if repo_head else "none", ) elif operations: logger.info("[%s] Applying %d operations via update API...", trace_id, len(operations)) result = client.update_repo(repo_id, operations, trace_id=trace_id) repo_head = result.get("repo_head", "") logger.info( "[%s] Update completed, new head=%s", trace_id, repo_head[:8] if repo_head else "none", ) else: logger.info("[%s] No changes detected, skipping update", trace_id) repo_head = cached_state.repo_head if cached_state else "" # Save new sync state with git info new_state = SyncState( repo_id=repo_id, repo_head=repo_head, last_sync="", # Will be set by save_sync_state git_branch=current_branch, git_head_sha=current_head, files=new_hashes, skipped_files=new_skipped, ) save_sync_state(repo_name, new_state) return { "repo_id": repo_id, "repo_name": repo_name, "repo_head": repo_head, "is_incremental": is_incremental, "files_created": files_created, "files_updated": files_updated, "files_deleted": files_deleted, "files_unchanged": files_unchanged, "files_skipped": files_skipped, "total_files": len(new_hashes), # Debug fields "local_git_branch": current_branch, "local_git_head": current_head[:8] if current_head else "", "ref_changed": ref_changed, "sync_mode": sync_mode, "deletes_suppressed": deletes_suppressed, } except Exception as exc: logger.error("[%s] Cloud sync failed: %s", trace_id, exc) return { "repo_id": None, "repo_name": repo_name, "repo_head": None, "is_incremental": False, "files_created": 0, "files_updated": 0, "files_deleted": 0, "files_unchanged": 0, "files_skipped": 0, "total_files": 0, "local_git_branch": current_branch, "local_git_head": current_head[:8] if current_head else "", "ref_changed": ref_changed, "sync_mode": "error", "deletes_suppressed": 0, "error": str(exc), }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/possible055/relace-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server