#!/usr/bin/env node
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import {
CallToolRequestSchema,
ListToolsRequestSchema,
} from '@modelcontextprotocol/sdk/types.js';
import { ComfyUIClient } from './comfyui-client.js';
import { getFluxWorkflow, getFluxDiffusersWorkflow } from './flux-workflow.js';
import { getBackgroundRemovalWorkflow, getBackgroundRemovalWithAlphaWorkflow } from './workflows/background-removal.js';
import { getUpscalingWorkflow, getAvailableUpscaleModels, selectBestModel } from './workflows/upscaling.js';
import { createRateLimitMiddleware } from './utils/rate-limiter.js';
import { InputSanitizer } from './utils/input-sanitizer.js';
import { getSecretsManager } from './utils/secrets-manager.js';
import { setupVRAMManagement } from './services/simple-vram-manager.js';
import path from 'path';
import { fileURLToPath } from 'url';
import fs from 'fs';
import sizeOf from 'image-size';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
class ComfyUIMCPServer {
constructor() {
this.server = new Server(
{
name: 'mcp-comfyui-flux',
version: '1.0.0',
},
{
capabilities: {
tools: {},
},
}
);
this.comfyClient = null;
this.vramManager = null;
// Initialize security components
this.initializeSecurity();
this.setupHandlers();
this.autoConnect(); // Auto-connect on startup
}
initializeSecurity() {
// Initialize rate limiter
const rateLimitConfig = {
tokensPerInterval: parseInt(process.env.RATE_LIMIT_TOKENS) || 20,
interval: parseInt(process.env.RATE_LIMIT_INTERVAL) || 60000,
maxBurst: parseInt(process.env.RATE_LIMIT_BURST) || 30,
enableLogging: process.env.RATE_LIMIT_LOGGING !== 'false'
};
this.rateLimiter = createRateLimitMiddleware(rateLimitConfig);
// Initialize input sanitizer
this.sanitizer = new InputSanitizer({
maxPathLength: 255,
maxPromptLength: 5000,
allowedDirectories: ['output', 'input'],
enableLogging: process.env.SANITIZER_LOGGING !== 'false'
});
// Initialize secrets manager
this.secretsManager = getSecretsManager({
fallbackToEnv: true,
enableCaching: true,
enableLogging: process.env.SECRETS_LOGGING !== 'false'
});
// Log security initialization
console.log('๐ Security features initialized:');
console.log(' โ
Rate limiting enabled');
console.log(' โ
Input sanitization active');
console.log(' โ
Secrets management configured');
}
async autoConnect() {
try {
const defaultAddress = process.env.COMFYUI_HOST
? `${process.env.COMFYUI_HOST}:${process.env.COMFYUI_PORT || '8188'}`
: '127.0.0.1:8188';
// Create client with resilience options
this.comfyClient = new ComfyUIClient(defaultAddress, {
enableAutoReconnect: true,
maxReconnectAttempts: 10,
reconnectDelay: 1000,
maxReconnectDelay: 30000
});
await this.comfyClient.connect();
console.error(`โ
Auto-connected to ComfyUI at ${defaultAddress}`);
// Setup VRAM management with GPU-safe configuration
this.vramManager = setupVRAMManagement(this.comfyClient);
console.error('๐ก๏ธ VRAM management initialized with GPU protection');
// Setup connection monitoring
this.setupConnectionMonitoring();
} catch (error) {
console.error(`โ ๏ธ Auto-connect failed: ${error.message}. Will retry on first operation.`);
// Keep the client instance for retry attempts
// Don't set to null, as it has reconnection capabilities
}
}
setupConnectionMonitoring() {
if (!this.comfyClient) return;
// Log connection state changes
this.comfyClient.connectionManager.on('stateChange', ({ from, to }) => {
console.error(`๐ ComfyUI connection state: ${from} -> ${to}`);
});
// Handle max reconnect attempts
this.comfyClient.connectionManager.on('maxReconnectAttemptsReached', () => {
console.error('โ Maximum reconnection attempts reached. Manual intervention may be required.');
});
}
setupHandlers() {
this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
tools: [
{
name: 'generate_image',
description: 'Generate an image using Flux dev model in ComfyUI',
inputSchema: {
type: 'object',
properties: {
prompt: {
type: 'string',
description: 'The text prompt to generate an image from',
},
negative_prompt: {
type: 'string',
description: 'Negative prompt to avoid certain features',
default: '',
},
width: {
type: 'number',
description: 'Width of the generated image',
default: 1024,
},
height: {
type: 'number',
description: 'Height of the generated image',
default: 1024,
},
steps: {
type: 'number',
description: 'Number of sampling steps',
default: 20,
},
cfg_scale: {
type: 'number',
description: 'Classifier-free guidance scale',
default: 7.0,
},
seed: {
type: 'number',
description: 'Random seed for reproducibility (-1 for random)',
default: -1,
},
sampler_name: {
type: 'string',
description: 'Sampling method to use',
default: 'euler',
enum: ['euler', 'euler_ancestral', 'heun', 'dpm_2', 'dpm_2_ancestral', 'lms', 'dpm_fast', 'dpm_adaptive', 'dpmpp_2s_ancestral', 'dpmpp_sde', 'dpmpp_2m', 'dpmpp_3m_sde'],
},
scheduler: {
type: 'string',
description: 'Scheduler to use',
default: 'normal',
enum: ['normal', 'karras', 'exponential', 'simple', 'ddim_uniform'],
},
batch_size: {
type: 'number',
description: 'Number of images to generate in parallel (1-4 recommended)',
default: 1,
minimum: 1,
maximum: 8,
},
},
required: ['prompt'],
},
},
{
name: 'connect_comfyui',
description: 'Connect to ComfyUI server',
inputSchema: {
type: 'object',
properties: {
server_address: {
type: 'string',
description: 'ComfyUI server address (default: 127.0.0.1:8188)',
default: '127.0.0.1:8188',
},
},
},
},
{
name: 'disconnect_comfyui',
description: 'Disconnect from ComfyUI server',
inputSchema: {
type: 'object',
properties: {},
},
},
{
name: 'check_models',
description: 'Check if Flux dev model is available in ComfyUI',
inputSchema: {
type: 'object',
properties: {},
},
},
{
name: 'remove_background',
description: 'Remove background from an image using AI-powered segmentation',
inputSchema: {
type: 'object',
properties: {
image_path: {
type: 'string',
description: 'Path to the input image file',
},
alpha_matting: {
type: 'boolean',
description: 'Use alpha matting for better edge quality (especially for hair/fur)',
default: true,
},
output_format: {
type: 'string',
description: 'Output image format',
default: 'png',
enum: ['png', 'webp'],
},
},
required: ['image_path'],
},
},
{
name: 'upscale_image',
description: 'Upscale an image using AI models',
inputSchema: {
type: 'object',
properties: {
image_path: {
type: 'string',
description: 'Path to the image file to upscale',
},
model: {
type: 'string',
description: 'Upscaling model to use',
default: 'ultrasharp',
enum: ['ultrasharp', 'animesharp'],
},
scale_factor: {
type: 'number',
description: 'Additional scaling factor (1.0 = model native, usually 4x)',
default: 1.0,
minimum: 0.5,
maximum: 2.0,
},
content_type: {
type: 'string',
description: 'Content type for auto model selection',
default: 'general',
enum: ['general', 'anime', 'artwork', 'illustration'],
},
},
required: ['image_path'],
},
},
],
}));
this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
// Extract client ID for rate limiting (could be from headers, IP, etc.)
const clientId = request.clientId || 'default-client';
// Check rate limit
const rateLimitResult = await this.rateLimiter.checkRequest(clientId, name);
if (!rateLimitResult.allowed) {
return {
content: [
{
type: 'text',
text: `โ ๏ธ Rate limit exceeded. ${rateLimitResult.reason}. Retry after ${rateLimitResult.retryAfter}ms`,
},
],
};
}
switch (name) {
case 'connect_comfyui': {
// Use Docker service name if running in Docker, otherwise use provided address
const defaultAddress = process.env.COMFYUI_HOST
? `${process.env.COMFYUI_HOST}:${process.env.COMFYUI_PORT || '8188'}`
: '127.0.0.1:8188';
const serverAddress = args.server_address || defaultAddress;
try {
if (this.comfyClient) {
this.comfyClient.disconnect();
}
// Create new client with resilience
this.comfyClient = new ComfyUIClient(serverAddress, {
enableAutoReconnect: true,
maxReconnectAttempts: 10,
reconnectDelay: 1000,
maxReconnectDelay: 30000
});
await this.comfyClient.connect();
this.setupConnectionMonitoring();
const status = this.comfyClient.getConnectionStatus();
return {
content: [
{
type: 'text',
text: `โ
Successfully connected to ComfyUI at ${serverAddress}\nConnection status: ${status.state}`,
},
],
};
} catch (error) {
const status = this.comfyClient ? this.comfyClient.getConnectionStatus() : null;
return {
content: [
{
type: 'text',
text: `โ Failed to connect to ComfyUI: ${error.message}\nConnection will retry automatically.\nStatus: ${status ? status.state : 'not initialized'}`,
},
],
};
}
}
case 'disconnect_comfyui': {
if (this.comfyClient) {
this.comfyClient.disconnect();
this.comfyClient = null;
return {
content: [
{
type: 'text',
text: 'Disconnected from ComfyUI',
},
],
};
}
return {
content: [
{
type: 'text',
text: 'No active ComfyUI connection',
},
],
};
}
case 'check_models': {
if (!this.comfyClient) {
return {
content: [
{
type: 'text',
text: 'Not connected to ComfyUI. Please connect first using connect_comfyui',
},
],
};
}
try {
const protocol = this.comfyClient.serverAddress.includes('https') ? 'https' : 'http';
const response = await fetch(`${protocol}://${this.comfyClient.serverAddress}/object_info`);
const objectInfo = await response.json();
const checkpointLoader = objectInfo['CheckpointLoaderSimple'] || objectInfo['UNETLoader'];
const availableModels = checkpointLoader?.input?.required?.ckpt_name?.[0] ||
checkpointLoader?.input?.required?.unet_name?.[0] || [];
const hasFlux = availableModels.some(model =>
model.toLowerCase().includes('flux')
);
if (hasFlux) {
const fluxModels = availableModels.filter(model =>
model.toLowerCase().includes('flux')
);
return {
content: [
{
type: 'text',
text: `Flux models found: ${fluxModels.join(', ')}`,
},
],
};
} else {
return {
content: [
{
type: 'text',
text: `No Flux models found. Available models: ${availableModels.join(', ')}`,
},
],
};
}
} catch (error) {
return {
content: [
{
type: 'text',
text: `Error checking models: ${error.message}`,
},
],
};
}
}
case 'generate_image': {
// Sanitize inputs
const sanitizationResult = this.sanitizer.sanitizeRequest('generate_image', args);
if (!sanitizationResult.valid) {
return {
content: [
{
type: 'text',
text: `โ Invalid input: ${sanitizationResult.errors.join(', ')}`,
},
],
};
}
const sanitizedArgs = sanitizationResult.sanitized;
if (!this.comfyClient) {
// Try to auto-connect if not connected
try {
const defaultAddress = process.env.COMFYUI_HOST
? `${process.env.COMFYUI_HOST}:${process.env.COMFYUI_PORT || '8188'}`
: '127.0.0.1:8188';
this.comfyClient = new ComfyUIClient(defaultAddress, {
enableAutoReconnect: true,
maxReconnectAttempts: 10,
reconnectDelay: 1000,
maxReconnectDelay: 30000
});
await this.comfyClient.connect();
this.setupConnectionMonitoring();
// Initialize VRAM manager after connection
if (!this.vramManager) {
this.vramManager = setupVRAMManagement(this.comfyClient);
}
} catch (error) {
return {
content: [
{
type: 'text',
text: `โ ๏ธ Initial connection failed, but will retry automatically. Error: ${error.message}`,
},
],
};
}
}
// Ensure connection before proceeding
if (!this.comfyClient.isConnected()) {
try {
await this.comfyClient.ensureConnected();
} catch (error) {
const status = this.comfyClient.getConnectionStatus();
return {
content: [
{
type: 'text',
text: `โ Unable to establish connection to ComfyUI.\nStatus: ${status.state}\nAttempts remaining: ${status.attemptsRemaining}`,
},
],
};
}
}
// VRAM pre-request check for GPU safety
if (this.vramManager) {
const vramCheck = await this.vramManager.preRequestCheck('flux');
if (!vramCheck.success) {
console.error(`โ FLUX generation refused: ${vramCheck.reason}`);
if (vramCheck.emergency) {
return {
content: [
{
type: 'text',
text: `๐จ GPU Protection: Operation refused - ${vramCheck.reason}\nSystem is in emergency mode to protect hardware.`,
},
],
};
}
// Try cleanup and retry once
console.error('๐งน Attempting VRAM cleanup before FLUX generation...');
await this.vramManager.forceCleanup();
await new Promise(resolve => setTimeout(resolve, 2000)); // Wait for cleanup
const retryCheck = await this.vramManager.preRequestCheck('flux');
if (!retryCheck.success) {
return {
content: [
{
type: 'text',
text: `โ ๏ธ GPU Protection: Unable to free sufficient VRAM for FLUX generation.\nCurrent usage too high. Please try again later.`,
},
],
};
}
}
// Update activity tracking
this.vramManager.updateActivity();
}
try {
const batchSize = Math.min(Math.max(sanitizedArgs.batch_size || 1, 1), 8); // Clamp between 1-8
// Use Diffusers workflow for fp8 models with native batch support
const workflow = getFluxDiffusersWorkflow({
prompt: sanitizedArgs.prompt,
negative_prompt: sanitizedArgs.negative_prompt || '',
width: sanitizedArgs.width || 1024,
height: sanitizedArgs.height || 1024,
steps: sanitizedArgs.steps || 4, // Optimized for FLUX schnell
cfg_scale: sanitizedArgs.cfg_scale || 1.0, // Best for FLUX schnell
seed: sanitizedArgs.seed || -1,
sampler_name: sanitizedArgs.sampler_name || 'euler',
scheduler: sanitizedArgs.scheduler || 'simple', // Best for FLUX schnell
batch_size: batchSize, // Native batch support
});
const outputDir = path.join(__dirname, '..', 'output');
const allImages = await this.comfyClient.generateImage(workflow, outputDir);
if (allImages.length > 0) {
// Return all generated images
const content = [
{
type: 'text',
text: `Successfully generated ${allImages.length} image(s)! Saved to: ${allImages.map(img => path.basename(img.path)).join(', ')}`,
},
];
// Add each image to the response
for (const image of allImages) {
const imageData = fs.readFileSync(image.path);
const base64Image = imageData.toString('base64');
content.push({
type: 'image',
data: base64Image,
mimeType: 'image/png',
});
}
return { content };
} else {
return {
content: [
{
type: 'text',
text: 'Image generation completed but no images were produced',
},
],
};
}
} catch (error) {
return {
content: [
{
type: 'text',
text: `Error generating image: ${error.message}`,
},
],
};
}
}
case 'remove_background': {
// Sanitize inputs
const sanitizationResult = this.sanitizer.sanitizeRequest('remove_background', args);
if (!sanitizationResult.valid) {
return {
content: [
{
type: 'text',
text: `โ Invalid input: ${sanitizationResult.errors.join(', ')}`,
},
],
};
}
const sanitizedArgs = sanitizationResult.sanitized;
if (!this.comfyClient) {
// Try to auto-connect if not connected
try {
const defaultAddress = process.env.COMFYUI_HOST
? `${process.env.COMFYUI_HOST}:${process.env.COMFYUI_PORT || '8188'}`
: '127.0.0.1:8188';
this.comfyClient = new ComfyUIClient(defaultAddress, {
enableAutoReconnect: true,
maxReconnectAttempts: 10,
reconnectDelay: 1000,
maxReconnectDelay: 30000
});
await this.comfyClient.connect();
this.setupConnectionMonitoring();
// Initialize VRAM manager after connection
if (!this.vramManager) {
this.vramManager = setupVRAMManagement(this.comfyClient);
}
} catch (error) {
return {
content: [
{
type: 'text',
text: `โ ๏ธ Initial connection failed, but will retry automatically. Error: ${error.message}`,
},
],
};
}
}
// Ensure connection before proceeding
if (!this.comfyClient.isConnected()) {
try {
await this.comfyClient.ensureConnected();
} catch (error) {
const status = this.comfyClient.getConnectionStatus();
return {
content: [
{
type: 'text',
text: `โ Unable to establish connection to ComfyUI.\nStatus: ${status.state}\nAttempts remaining: ${status.attemptsRemaining}`,
},
],
};
}
}
try {
const { image_path, alpha_matting = true, output_format = 'png' } = sanitizedArgs;
// If it's in output directory, use just the filename
let imageFilename = image_path;
if (image_path.includes('output/')) {
imageFilename = path.basename(image_path);
}
// Get the appropriate workflow using the filename
const workflow = alpha_matting
? getBackgroundRemovalWithAlphaWorkflow(imageFilename, true)
: getBackgroundRemovalWorkflow(imageFilename);
// Execute workflow
const outputDir = path.join(__dirname, '..', 'output');
const result = await this.comfyClient.generateImage(workflow, outputDir);
if (result && result.length > 0) {
const outputImage = result[0];
const imageData = fs.readFileSync(outputImage.path);
const base64Output = imageData.toString('base64');
return {
content: [
{
type: 'text',
text: `Successfully removed background! Saved to: ${path.basename(outputImage.path)}`,
},
{
type: 'image',
data: base64Output,
mimeType: `image/${output_format}`,
},
],
};
} else {
return {
content: [
{
type: 'text',
text: 'Background removal completed but no output was produced',
},
],
};
}
} catch (error) {
return {
content: [
{
type: 'text',
text: `Error removing background: ${error.message}`,
},
],
};
}
}
case 'upscale_image': {
// Sanitize inputs
const sanitizationResult = this.sanitizer.sanitizeRequest('upscale_image', args);
if (!sanitizationResult.valid) {
return {
content: [
{
type: 'text',
text: `โ Invalid input: ${sanitizationResult.errors.join(', ')}`,
},
],
};
}
const sanitizedArgs = sanitizationResult.sanitized;
// Auto-connect if not connected
if (!this.comfyClient || !this.comfyClient.isConnected()) {
const defaultAddress = process.env.COMFYUI_HOST
? `${process.env.COMFYUI_HOST}:${process.env.COMFYUI_PORT || '8188'}`
: '127.0.0.1:8188';
if (!this.comfyClient) {
this.comfyClient = new ComfyUIClient(defaultAddress, {
enableAutoReconnect: true,
maxReconnectAttempts: 10,
reconnectDelay: 1000,
maxReconnectDelay: 30000
});
}
try {
await this.comfyClient.connect();
this.setupConnectionMonitoring();
} catch (error) {
return {
content: [
{
type: 'text',
text: `โ ๏ธ Initial connection failed, but will retry automatically. Error: ${error.message}`,
},
],
};
}
}
// Ensure connection before proceeding
if (!this.comfyClient.isConnected()) {
try {
await this.comfyClient.ensureConnected();
} catch (error) {
const status = this.comfyClient.getConnectionStatus();
return {
content: [
{
type: 'text',
text: `โ Unable to establish connection to ComfyUI.\nStatus: ${status.state}\nAttempts remaining: ${status.attemptsRemaining}`,
},
],
};
}
}
try {
const {
image_path,
model = 'ultrasharp',
scale_factor = 1.0,
content_type = 'general'
} = sanitizedArgs;
console.error('Upscale args:', args);
// Get the appropriate model file
const availableModels = getAvailableUpscaleModels();
let modelFile = availableModels[model];
// If model not specified or invalid, auto-select based on content type
if (!modelFile) {
modelFile = selectBestModel(content_type);
}
// If it's in output directory, use just the filename
let imageFilename = image_path;
if (image_path.includes('output/')) {
imageFilename = path.basename(image_path);
}
console.error('Using image:', imageFilename, 'model:', modelFile);
// Get the upscaling workflow
const workflow = getUpscalingWorkflow(imageFilename, modelFile, scale_factor);
// Execute workflow
const outputDir = path.join(__dirname, '..', 'output');
const result = await this.comfyClient.generateImage(workflow, outputDir);
if (result && result.length > 0) {
const outputImage = result[0];
// Get image dimensions
const dimensions = sizeOf(outputImage.path);
// For upscaled images, we'll skip base64 since they're very large (4096x4096)
// This avoids the stack overflow issue
return {
content: [
{
type: 'text',
text: `โ
Successfully upscaled image!\n` +
`๐ Resolution: ${dimensions.width}x${dimensions.height}\n` +
`๐ File: output/${path.basename(outputImage.path)}\n` +
`๐ Model: ${model} (${modelFile})\n` +
`๐ Scale: 4x native upscaling`,
},
],
};
} else {
return {
content: [
{
type: 'text',
text: 'Upscaling completed but no output was produced',
},
],
};
}
} catch (error) {
console.error('Upscaling error:', error);
return {
content: [
{
type: 'text',
text: `Error upscaling image: ${error.message}\nDetails: ${error.stack}`,
},
],
};
}
}
default:
return {
content: [
{
type: 'text',
text: `Unknown tool: ${name}`,
},
],
};
}
});
}
async run() {
const transport = new StdioServerTransport();
await this.server.connect(transport);
console.error('ComfyUI Flux MCP server running on stdio');
}
}
const server = new ComfyUIMCPServer();
server.run().catch(console.error);