Skip to main content
Glama

LlamaCloud MCP Server

by run-llama
pyproject.toml1 kB
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [dependency-groups] dev = [ "pre-commit>=4.2.0" ] [project] name = "llamacloud-mcp" version = "1.0.0" description = "Expose LlamaCloud services as MCP tools" readme = "README.md" requires-python = ">=3.10" dependencies = [ "llama-index-indices-managed-llama-cloud>=0.6.9", "mcp[cli]>=1.6.0", "python-dotenv>=1.1.0", "llama-index-tools-mcp>=0.1.0", "llama-cloud-services", "click" ] license = "MIT" authors = [ {name = "Tuana Celik", email = "tuana@runllama.ai"}, {name = "Laurie Voss", email = "laurie@runllama.ai"}, {name = "Logan Markewich", email = "logan@runllama.ai"} ] keywords = [ "mcp", "llama", "llamacloud", "llama-cloud", "llama-cloud-services" ] [project.scripts] llamacloud-mcp = "llamacloud_mcp.main:main" [tool.hatch.build.targets.sdist] include = ["llamacloud_mcp/"] exclude = ["**/BUILD"] [tool.hatch.build.targets.wheel] include = ["llamacloud_mcp/"] exclude = ["**/BUILD"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/run-llama/llamacloud-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server