Skip to main content
Glama

Sequential Questioning MCP Server

by bitgeese
docker-compose.yml2.45 kB
services: app: build: . restart: always ports: - "8000:8000" volumes: - ./app:/app/app - ./logs:/app/logs - ./scripts:/app/scripts env_file: - .env environment: - PYTHONUNBUFFERED=1 - DATABASE_URL=postgresql+asyncpg://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-postgres}@db:5432/${POSTGRES_DB:-sequential_questioning} - QDRANT_URL=http://qdrant:6333 command: > bash -c " cd /app && pip install asyncpg psycopg2-binary && python -c 'import time; time.sleep(2)' && echo 'Initializing database...' && PGPASSWORD=${POSTGRES_PASSWORD:-postgres} psql -h db -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_DB:-sequential_questioning} -f /app/scripts/init_db.sql && echo 'Starting application...' && uvicorn app.main:app --host 0.0.0.0 --port 8000 " depends_on: db: condition: service_healthy qdrant: condition: service_started networks: - app-network logging: driver: "json-file" options: max-size: "10m" max-file: "3" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8000/health"] interval: 30s timeout: 10s retries: 3 start_period: 40s db: image: postgres:15 restart: always environment: - POSTGRES_USER=${POSTGRES_USER:-postgres} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres} - POSTGRES_DB=${POSTGRES_DB:-sequential_questioning} ports: - "5432:5432" volumes: - postgres_data:/var/lib/postgresql/data networks: - app-network logging: driver: "json-file" options: max-size: "10m" max-file: "3" healthcheck: test: ["CMD-SHELL", "pg_isready -U postgres"] interval: 10s timeout: 5s retries: 5 start_period: 10s qdrant: image: qdrant/qdrant:latest restart: always ports: - "6333:6333" - "6334:6334" volumes: - qdrant_data:/qdrant/storage networks: - app-network logging: driver: "json-file" options: max-size: "10m" max-file: "3" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:6333/health"] interval: 30s timeout: 10s retries: 3 start_period: 40s networks: app-network: driver: bridge volumes: postgres_data: qdrant_data:

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/bitgeese/sequential-questioning'

If you have feedback or need assistance with the MCP directory API, please join our Discord server