Skip to main content
Glama
workflow_utils.py1.72 kB
from __future__ import annotations from copy import deepcopy from typing import Any, Dict, Optional Workflow = Dict[str, Any] def normalize_workflow(workflow: Any) -> Workflow: """ Accept either: - a full ComfyUI API payload containing {"prompt": {...}} - a raw prompt/workflow dict {...} Return the raw prompt dict. """ if not isinstance(workflow, dict): raise TypeError("workflow_json must be a JSON object (dict).") if "prompt" in workflow and isinstance(workflow["prompt"], dict): return workflow["prompt"] # Heuristic: typical prompt is a dict keyed by node-id strings. # We do not enforce here; ComfyUI validate_prompt will decide. return workflow def apply_dryrun_overrides(prompt: Workflow, *, steps: Optional[int] = None) -> Workflow: """ Best-effort overrides for cheaper runs: - If steps is provided, clamp KSampler-like nodes' 'steps' input. This does not guarantee low-cost execution (nodes vary). """ p = deepcopy(prompt) if steps is None: return p for node_id, node in p.items(): if not isinstance(node, dict): continue class_type = node.get("class_type") inputs = node.get("inputs") if not isinstance(inputs, dict): continue # Common sampling nodes: KSampler, KSamplerAdvanced, etc. if isinstance(class_type, str) and "KSampler" in class_type and "steps" in inputs: try: cur = int(inputs["steps"]) inputs["steps"] = max(1, min(cur, int(steps))) except Exception: # If steps isn't int-coercible, leave it. pass return p

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/zturner1/comfyui-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server