We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/MatthewSnow2/comfyui-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
models.py•960 B
"""Pydantic models for the ComfyUI MCP Server."""
from pydantic import BaseModel, Field
class GenerationRequest(BaseModel):
"""Request to generate an image via ComfyUI."""
prompt: str
negative_prompt: str = ""
model: str | None = None
width: int = 1024
height: int = 1024
steps: int = 20
cfg_scale: float = 7.0
seed: int = -1
class GenerationResult(BaseModel):
"""Result from a ComfyUI generation."""
prompt_id: str
status: str
images: list[str] = Field(default_factory=list)
elapsed_seconds: float | None = None
class QueueStatus(BaseModel):
"""Current state of the ComfyUI queue."""
pending: int = 0
running: int = 0
class ModelInfo(BaseModel):
"""Information about an available model."""
name: str
filename: str
type: str = "checkpoint"
class WorkflowInfo(BaseModel):
"""Information about a workflow template."""
name: str
description: str = ""