Skip to main content
Glama
compose_dcu.yaml1.47 kB
services: paddleocr-vl-api: image: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleocr-vl:${API_IMAGE_TAG_SUFFIX} container_name: paddleocr-vl-api ports: - 8080:8080 depends_on: paddleocr-vlm-server: condition: service_healthy user: root privileged: true devices: - /dev/kfd - /dev/dri - /dev/mkfd group_add: - video cap_add: - SYS_PTRACE security_opt: - seccomp=unconfined restart: unless-stopped environment: - VLM_BACKEND=${VLM_BACKEND:-vllm} command: /bin/bash -c "source ~/.bashrc && paddlex --serve --pipeline /home/paddleocr/pipeline_config_${VLM_BACKEND}.yaml --device dcu" healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:8080/health || exit 1"] volumes: - /opt/hyhal/:/opt/hyhal/:ro shm_size: 64G paddleocr-vlm-server: image: ccr-2vdh3abv-pub.cnc.bj.baidubce.com/paddlepaddle/paddleocr-genai-${VLM_BACKEND}-server:${VLM_IMAGE_TAG_SUFFIX} container_name: paddleocr-vlm-server user: root privileged: true devices: - /dev/kfd - /dev/dri - /dev/mkfd group_add: - video cap_add: - SYS_PTRACE security_opt: - seccomp=unconfined restart: unless-stopped healthcheck: test: ["CMD-SHELL", "curl -f http://localhost:8080/health || exit 1"] start_period: 300s volumes: - /opt/hyhal/:/opt/hyhal/:ro shm_size: 64G

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/PaddlePaddle/PaddleOCR'

If you have feedback or need assistance with the MCP directory API, please join our Discord server