We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/cs-qyzhang/glm-ocr-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
[build-system]
requires = ["hatchling>=1.25.0"]
build-backend = "hatchling.build"
[project]
name = "glm-ocr-mcp"
version = "0.1.1"
description = "MCP server for GLM OCR to extract text from images and PDFs"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"mcp>=1.26.0",
"httpx>=0.28.0",
"python-dotenv>=1.1.0",
]
[project.scripts]
glm-ocr-mcp = "glm_ocr_mcp.__main__:run"
[tool.hatch.build.targets.wheel]
packages = ["src/glm_ocr_mcp"]