Skip to main content
Glama

LandingAI ADE MCP Server

by avaxia8
pyproject.toml•1.23 kB
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "landingai-ade-mcp" version = "2.0.0" description = "Model Context Protocol server for LandingAI's Agentic Document Extraction API" readme = "README.md" requires-python = ">=3.9" license = {text = "MIT"} authors = [ {name = "LandingAI ADE MCP Contributors"} ] keywords = ["mcp", "landingai", "ade", "document", "extraction", "parsing", "ai"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] dependencies = [ "fastmcp>=0.1.0", "httpx>=0.24.0", "pydantic>=2.0.0", "python-multipart>=0.0.6", "aiofiles>=23.0.0" ] [project.urls] Documentation = "https://github.com/yourusername/landingai-ade-mcp" Repository = "https://github.com/yourusername/landingai-ade-mcp" [project.scripts] landingai-ade-mcp = "server:main" [tool.uv] dev-dependencies = [ "pytest>=7.0.0", "pytest-asyncio>=0.21.0", ]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/avaxia8/landingai-ade-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server