prepare_install_source
Searches Prowlarr for a game, downloads via qBittorrent, polls until complete, then classifies and returns a path for Lutris installation.
Instructions
Search Prowlarr, hand off the best candidate to qBittorrent, poll until complete (5-min stall timeout, configurable), classify the tree, and return a path lutris-mcp's install_from_yaml or install_from_directory consumes directly. mutates: true
Input Schema
| Name | Required | Description | Default |
|---|---|---|---|
| query | Yes | ||
| indexer_categories | No | ||
| indexer_ids | No | ||
| max_size_gb | No | ||
| min_seeders | No | ||
| freeleech_only | No | ||
| stall_timeout_seconds | No | ||
| poll_interval_seconds | No | ||
| confirm | No |
Output Schema
| Name | Required | Description | Default |
|---|---|---|---|
No arguments | |||
Implementation Reference
- The main handler function for the 'prepare_install_source' MCP tool. Decorated with @mcp.tool to register as an MCP tool and @confirm_required to enforce a two-step confirm flow. Searches Prowlarr, filters/ranks candidates, hands off the best to qBittorrent, polls until download completes (with stall detection), classifies the result, and returns path/kind/source metadata.
@mcp.tool( description="Search Prowlarr, hand off the best candidate to qBittorrent, " "poll until complete (5-min stall timeout, configurable), classify the " "tree, and return a path lutris-mcp's install_from_yaml or " "install_from_directory consumes directly. mutates: true" ) @confirm_required("prepare_install_source") def prepare_install_source( query: str, *, indexer_categories: list[int] | None = None, indexer_ids: list[int] | None = None, max_size_gb: float | None = None, min_seeders: int | None = None, freeleech_only: bool | None = None, stall_timeout_seconds: float = DEFAULT_STALL_TIMEOUT_S, poll_interval_seconds: float = DEFAULT_POLL_INTERVAL_S, confirm: bool = False, ) -> dict[str, Any]: cfg = _cfg.load() pol = cfg.policy eff_max_bytes = int((max_size_gb if max_size_gb is not None else pol.max_size_gb) * 1024**3) eff_min_seeders = min_seeders if min_seeders is not None else pol.min_seeders eff_freeleech = freeleech_only if freeleech_only is not None else pol.freeleech_only pw = Prowlarr(cfg.prowlarr) raw_releases = pw.search( query, indexer_ids=indexer_ids or (pol.allow_indexers or None), categories=indexer_categories, ) candidates = [normalize_release(r) for r in raw_releases] ranked = _ranking.filter_and_rank( candidates, blocklist=pol.blocklist, max_size_bytes=eff_max_bytes, min_seeders=eff_min_seeders, freeleech_only=eff_freeleech, ) if not ranked: return { "ok": False, "reason": "no_candidates", "considered": len(candidates), "filters": { "blocklist": pol.blocklist, "max_size_gb": eff_max_bytes / 1024**3, "min_seeders": eff_min_seeders, "freeleech_only": eff_freeleech, }, } pick = ranked[0] qb = Qbittorrent(cfg.qbittorrent) started = time.time() qb.add_url(pick["download_url"], save_path=cfg.qbittorrent.download_dir or None) # Wait briefly for metadata so qBittorrent reports the infohash and files. infohash = pick["infohash"] if not infohash: infohash = _wait_for_infohash(qb, pick["title"], DEFAULT_METADATA_WAIT_S) if not infohash: return {"ok": False, "reason": "metadata_timeout", "title": pick["title"]} last_dl = 0 last_progress_at = time.time() while True: info = qb.info(infohash) if info is None: time.sleep(poll_interval_seconds) continue dl = int(info.get("downloaded") or 0) elapsed = time.time() - started rate_kib = (dl - last_dl) / max(poll_interval_seconds, 1.0) / 1024.0 _emit_heartbeat(infohash, info, rate_kib, elapsed) state = info.get("state", "") if state in ("uploading", "stalledUP", "queuedUP", "pausedUP", "checkingUP", "forcedUP"): break if dl > last_dl: last_dl = dl last_progress_at = time.time() elif (time.time() - last_progress_at) > stall_timeout_seconds: return { "ok": False, "reason": "stalled", "infohash": infohash, "downloaded_bytes": dl, "elapsed_seconds": round(elapsed, 1), } time.sleep(poll_interval_seconds) final_info = qb.info(infohash) or {} content_path = final_info.get("content_path") or final_info.get("save_path") if not content_path or not _classify.readable(content_path): return { "ok": False, "reason": "path_unreadable", "infohash": infohash, "content_path": content_path, } kind = _classify.classify(content_path) return { "ok": True, "path": str(Path(content_path).resolve()), "kind": kind, "source": { "indexer": pick["indexer"], "title": pick["title"], "size_bytes": pick["size_bytes"], "infohash": infohash, "freeleech": pick["freeleech"], "indexer_priority": pick["indexer_priority"], }, "elapsed_seconds": round(time.time() - started, 1), } - Parameter schema for the tool: query (required), optional indexer filters, size/seeders/freeleech constraints, stall/poll timing, and a confirm flag.
def prepare_install_source( query: str, *, indexer_categories: list[int] | None = None, indexer_ids: list[int] | None = None, max_size_gb: float | None = None, min_seeders: int | None = None, freeleech_only: bool | None = None, stall_timeout_seconds: float = DEFAULT_STALL_TIMEOUT_S, poll_interval_seconds: float = DEFAULT_POLL_INTERVAL_S, confirm: bool = False, ) -> dict[str, Any]: - src/lutris_source_mcp/tools/install_pipeline.py:42-47 (registration)Registration of the function as an MCP tool via the @mcp.tool() decorator on the FastMCP instance from server.py.
@mcp.tool( description="Search Prowlarr, hand off the best candidate to qBittorrent, " "poll until complete (5-min stall timeout, configurable), classify the " "tree, and return a path lutris-mcp's install_from_yaml or " "install_from_directory consumes directly. mutates: true" ) - Helper that logs and prints a heartbeat line during torrent download polling so a hang is distinguishable from a slow download.
def _emit_heartbeat(infohash: str, info: dict[str, Any], rate_kib: float, elapsed: float) -> None: """Log+print a heartbeat line so a real hang is visibly distinguishable from a healthy slow download.""" dl = int(info.get("downloaded") or 0) total = int(info.get("size") or info.get("total_size") or 0) pct = (100.0 * dl / total) if total else 0.0 peers = int(info.get("num_seeds") or 0) + int(info.get("num_leechs") or 0) line = ( f"[prepare_install_source] {infohash[:8]} " f"dl={dl}/{total} ({pct:.1f}%) peers={peers} " f"rate={rate_kib:.1f}KiB/s elapsed={elapsed:.0f}s" ) log.info(line) print(line, flush=True) - Helper that polls qBittorrent's torrent list until it finds a torrent matching the title, returning the infohash (used when Prowlarr doesn't supply one upfront).
def _wait_for_infohash(qb: Qbittorrent, title: str, timeout: float) -> str: """Poll qBittorrent's torrent list until we find one whose name matches (qBittorrent doesn't echo the infohash on add).""" deadline = time.time() + timeout title_norm = title.lower() while time.time() < deadline: try: rs = qb._request("GET", "/api/v2/torrents/info").json() except Exception: rs = [] for t in rs: if title_norm in (t.get("name", "").lower()): return (t.get("hash") or "").lower() time.sleep(2.0) return ""