Skip to main content
Glama

GPT Image MCP Server

by lansespirit
docker-compose.dev.yml3.17 kB
services: # Main application - Development version image-gen-mcp-dev: build: . container_name: image-gen-mcp-dev restart: unless-stopped ports: - "3001:3001" # Expose directly for development environment: - PROVIDERS__OPENAI__API_KEY=${PROVIDERS__OPENAI__API_KEY} - PROVIDERS__GEMINI__API_KEY=${PROVIDERS__GEMINI__API_KEY:-} - REDIS_URL=redis://redis:6379/0 - STORAGE_BASE_PATH=/app/storage - CACHE_ENABLED=true - CACHE_BACKEND=redis - LOG_LEVEL=DEBUG - SERVER_PORT=3001 volumes: - ./storage:/app/storage - ./logs:/app/logs # Mount only the source code, not the entire app directory (preserves .venv) - ./image_gen_mcp:/app/image_gen_mcp depends_on: - redis networks: - app-network # Development: Use streamable-http with CORS for web testing command: ["python", "-m", "image_gen_mcp.server", "--transport", "streamable-http", "--port", "3001", "--host", "0.0.0.0", "--cors", "--log-level", "DEBUG"] healthcheck: test: ["CMD", "python", "-c", "import sys, urllib.request; sys.exit(0 if urllib.request.urlopen('http://localhost:3001').getcode() == 200 else 1)"] interval: 30s timeout: 10s retries: 3 start_period: 40s # Alternative: stdio version for Claude Desktop testing image-gen-mcp-stdio: build: . container_name: image-gen-mcp-stdio restart: "no" # Don't restart automatically for stdio environment: - PROVIDERS__OPENAI__API_KEY=${PROVIDERS__OPENAI__API_KEY} - PROVIDERS__GEMINI__API_KEY=${PROVIDERS__GEMINI__API_KEY:-} - REDIS_URL=redis://redis:6379/0 - STORAGE_BASE_PATH=/app/storage - CACHE_ENABLED=true - CACHE_BACKEND=redis - LOG_LEVEL=DEBUG volumes: - ./storage:/app/storage - ./logs:/app/logs # Mount only the source code, not the entire app directory (preserves .venv) - ./image_gen_mcp:/app/image_gen_mcp depends_on: - redis networks: - app-network # STDIO transport for Claude Desktop command: ["python", "-m", "image_gen_mcp.server", "--transport", "stdio", "--log-level", "DEBUG"] profiles: - stdio # Only start with: docker-compose --profile stdio up # Redis cache redis: image: redis:7-alpine container_name: image-gen-mcp-redis-dev restart: unless-stopped ports: - "6379:6379" # Expose for development tools volumes: - redis_data:/data - ./redis.conf:/usr/local/etc/redis/redis.conf command: redis-server /usr/local/etc/redis/redis.conf networks: - app-network healthcheck: test: ["CMD", "redis-cli", "ping"] interval: 30s timeout: 10s retries: 3 # Development tools redis-commander: image: rediscommander/redis-commander:latest container_name: redis-commander restart: unless-stopped ports: - "8081:8081" environment: - REDIS_HOSTS=local:redis:6379 depends_on: - redis networks: - app-network profiles: - tools volumes: redis_data: driver: local networks: app-network: driver: bridge

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/lansespirit/gpt-image-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server