from pathlib import Path
from typing import Dict, List, Optional
def parse_env_file(env_path: Path) -> Dict[str, str]:
"""Parse a simple KEY=VALUE .env file into a dict.
Ignores comments and blank lines; strips quotes around values.
"""
env_vars: Dict[str, str] = {}
if not env_path.exists():
return env_vars
with env_path.open("r", encoding="utf-8") as file:
for raw_line in file:
line = raw_line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
env_vars[key.strip()] = value.strip().strip("\"'")
return env_vars
def has_minimal_llm_config(env_vars: Dict[str, str]) -> bool:
"""Return True if any known LLM provider key is present and non-empty."""
llm_keys = (
"OPENAI_API_KEY",
"OLLAMA_BASE_URL",
"GEMINI_API_KEY",
"DEEPSEEK_API_KEY",
"CLAUDE_API_KEY",
"QWEN_API_KEY",
)
return any(k in env_vars and bool(env_vars.get(k)) for k in llm_keys)
def has_minimal_execution_engine_config(env_vars: Dict[str, str]) -> bool:
"""Return True if at least one execution engine (ComfyUI or RunningHub) is configured."""
has_comfyui = "COMFYUI_BASE_URL" in env_vars and bool(env_vars.get("COMFYUI_BASE_URL"))
has_runninghub = "RUNNINGHUB_API_KEY" in env_vars and bool(env_vars.get("RUNNINGHUB_API_KEY"))
return has_comfyui or has_runninghub
def detect_config_status_from_env(env_vars: Dict[str, str]) -> str:
"""Compute config status: 'first_time'|'incomplete'|'complete'."""
# At least one execution engine must be configured
if not has_minimal_execution_engine_config(env_vars):
return "incomplete"
# At least one LLM provider must be configured
if not has_minimal_llm_config(env_vars):
return "incomplete"
return "complete"
def build_env_lines(
comfyui_config: Optional[Dict],
runninghub_config: Optional[Dict],
llm_configs: List[Dict],
service_config: Dict,
default_model: Optional[str] = None,
) -> List[str]:
"""Build .env file content lines from provided configs. No I/O here."""
env_lines: List[str] = [
"# Pixelle MCP Project Environment Variables Configuration",
"# This file is generated by Pixelle MCP CLI; you may edit it manually.",
"# Copy this file to .env and modify the configuration values.",
"",
"# ======== Basic Service Configuration ========",
"# Service configuration",
f"HOST={service_config['host']}",
f"PORT={service_config['port']}",
"# Optional, used to specify public access URL, generally not needed for local services,",
"# configure when service is not on local machine",
f"PUBLIC_READ_URL=\"{service_config.get('public_read_url', '')}\"",
"",
]
# Prepare ComfyUI configuration values (always included)
if comfyui_config:
comfyui_url = comfyui_config['url']
comfyui_api_key = comfyui_config.get('api_key', '')
else:
comfyui_url = "http://localhost:8188"
comfyui_api_key = ""
# Prepare RunningHub configuration values (always included)
if runninghub_config:
runninghub_base_url = runninghub_config['base_url']
runninghub_api_key = runninghub_config['api_key']
else:
runninghub_base_url = "https://www.runninghub.ai"
runninghub_api_key = ""
# Add execution engine configurations
env_lines.extend([
"# ======== ComfyUI Integration Configuration ========",
"# ComfyUI service address",
f"COMFYUI_BASE_URL={comfyui_url}",
"# ComfyUI API Key (required if API Nodes are used in workflows,",
"# get it from: https://platform.comfy.org/profile/api-keys)",
f"COMFYUI_API_KEY=\"{comfyui_api_key}\"",
"# Cookies used when calling ComfyUI interface, configure if ComfyUI service requires authentication",
"COMFYUI_COOKIES=\"\"",
"# Executor type for calling ComfyUI interface, supports websocket and http (both are generally supported)",
"COMFYUI_EXECUTOR_TYPE=http",
"",
"# ======== RunningHub Cloud Configuration ========",
"# RunningHub cloud execution engine configuration",
"# API base URL for RunningHub service",
"# Global: https://www.runninghub.ai (for international users)",
"# China: https://www.runninghub.cn (recommended for Chinese users)",
f"RUNNINGHUB_BASE_URL=\"{runninghub_base_url}\"",
"# RunningHub API Key",
"# Global users get key from: https://www.runninghub.ai",
"# China users get key from: https://www.runninghub.cn",
f"RUNNINGHUB_API_KEY=\"{runninghub_api_key}\"",
"",
])
env_lines.extend([
"# ======== Chainlit Framework Configuration ========",
"# Chainlit auth secret (used for chainlit auth, can be reused or randomly generated)",
"CHAINLIT_AUTH_SECRET=\"changeme-generate-a-secure-secret-key\"",
f"CHAINLIT_AUTH_ENABLED=true",
"CHAINLIT_SAVE_STARTER_ENABLED=false",
"",
"# ======== CDN Configuration ========",
"# CDN strategy for loading external resources (KaTeX, Google Fonts, etc.)",
"# Options:",
'# - "auto": Automatically detect user language (Chinese users get China CDN, others get global CDN)',
'# - "china": Force all users to use China CDN mirrors (faster for Chinese users)',
'# - "global": Force all users to use original global CDNs (faster for international users)',
"# Default: \"auto\"",
"CDN_STRATEGY=auto",
"",
"# ======== LLM Model Configuration ========",
])
for llm_config in llm_configs:
provider = llm_config["provider"].upper()
if provider == "OPENAI":
env_lines.extend([
"# OpenAI configuration",
f"OPENAI_BASE_URL=\"{llm_config.get('base_url', 'https://api.openai.com/v1')}\"",
"# Get your API key at: https://platform.openai.com/api-keys",
f"OPENAI_API_KEY=\"{llm_config['api_key']}\"",
"# List OpenAI models to be used, if multiple, separate with English commas",
f"CHAINLIT_CHAT_OPENAI_MODELS=\"{llm_config.get('models', 'gpt-4o-mini')}\"",
"",
])
elif provider == "OLLAMA":
env_lines.extend([
"# Ollama configuration (local models)",
f"OLLAMA_BASE_URL=\"{llm_config.get('base_url', 'http://localhost:11434/v1')}\"",
"# List Ollama models to be used, if multiple, separate with English commas",
f"OLLAMA_MODELS=\"{llm_config.get('models', '')}\"",
"",
])
elif provider == "GEMINI":
env_lines.extend([
"# Gemini configuration",
"GEMINI_BASE_URL=\"https://generativelanguage.googleapis.com/v1beta\"",
"# Get your API key at: https://aistudio.google.com/app/apikey",
f"GEMINI_API_KEY=\"{llm_config['api_key']}\"",
"# List Gemini models to be used, if multiple, separate with English commas",
f"GEMINI_MODELS=\"{llm_config.get('models', '')}\"",
"",
])
elif provider == "DEEPSEEK":
env_lines.extend([
"# DeepSeek configuration",
"DEEPSEEK_BASE_URL=\"https://api.deepseek.com\"",
"# Get your API key at: https://platform.deepseek.com/api_keys",
f"DEEPSEEK_API_KEY=\"{llm_config['api_key']}\"",
"# List DeepSeek models to be used, if multiple, separate with English commas",
f"DEEPSEEK_MODELS=\"{llm_config.get('models', '')}\"",
"",
])
elif provider == "CLAUDE":
env_lines.extend([
"# Claude (Anthropic) configuration",
"CLAUDE_BASE_URL=\"https://api.anthropic.com\"",
"# Get your API key at: https://console.anthropic.com/settings/keys",
f"CLAUDE_API_KEY=\"{llm_config['api_key']}\"",
"# List Claude models to be used, if multiple, separate with English commas",
f"CLAUDE_MODELS=\"{llm_config.get('models', '')}\"",
"",
])
elif provider == "QWEN":
env_lines.extend([
"# Qwen (Alibaba Cloud) configuration",
"QWEN_BASE_URL=\"https://dashscope.aliyun.com/compatible-mode/v1\"",
"# Get your API key at: https://bailian.console.aliyun.com/?tab=model#/api-key",
f"QWEN_API_KEY=\"{llm_config['api_key']}\"",
"# List Qwen models to be used, if multiple, separate with English commas",
f"QWEN_MODELS=\"{llm_config.get('models', '')}\"",
"",
])
if llm_configs:
computed_default_model = default_model
if not computed_default_model:
first_llm = llm_configs[0]
models = first_llm.get("models", "")
if models:
computed_default_model = models.split(",")[0].strip()
if computed_default_model:
env_lines.extend([
"# Optional, default model for conversations (can be from any provider above)",
f"CHAINLIT_CHAT_DEFAULT_MODEL=\"{computed_default_model}\"",
"",
])
return env_lines