Skip to main content
Glama

ComfyUI MCP Server

by neutrinotek
client.py2.52 kB
"""Async client for interacting with a ComfyUI instance.""" from __future__ import annotations import asyncio from contextlib import asynccontextmanager from typing import Any, Awaitable, Callable, Dict, Optional import httpx class ComfyUIClient: """Lightweight async HTTP client for ComfyUI's REST API.""" def __init__(self, base_url: str, api_key: Optional[str] = None, timeout: float = 60.0) -> None: self.base_url = base_url.rstrip("/") self.api_key = api_key headers = {"User-Agent": "comfyui-mcp/0.1"} if api_key: headers["Authorization"] = f"Bearer {api_key}" self._client = httpx.AsyncClient(base_url=self.base_url, headers=headers, timeout=timeout) async def close(self) -> None: await self._client.aclose() async def submit_workflow(self, workflow: Dict[str, Any]) -> Dict[str, Any]: response = await self._client.post("/prompt", json={"prompt": workflow}) response.raise_for_status() return response.json() async def get_queue(self) -> Dict[str, Any]: response = await self._client.get("/queue") response.raise_for_status() return response.json() async def get_history(self, prompt_id: str) -> Dict[str, Any]: response = await self._client.get(f"/history/{prompt_id}") response.raise_for_status() return response.json() async def fetch_image(self, image_path: str) -> bytes: response = await self._client.get(f"/view?filename={image_path}") response.raise_for_status() return response.content async def stream_updates( self, prompt_id: str, callback: Callable[[Dict[str, Any]], Awaitable[None]], poll_interval: float = 1.0, ) -> None: """Poll the queue for updates and forward them to ``callback``.""" while True: history = await self.get_history(prompt_id) await callback(history) if _is_prompt_complete(history, prompt_id): break await asyncio.sleep(poll_interval) @asynccontextmanager async def lifecycle(self) -> "ComfyUIClient": try: yield self finally: await self.close() def _is_prompt_complete(history: Dict[str, Any], prompt_id: str) -> bool: prompt_state = history.get(prompt_id) if not isinstance(prompt_state, dict): return False return prompt_state.get("status") == "completed" __all__ = ["ComfyUIClient"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/neutrinotek/ComfyUI_MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server