We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/itstanner5216/multi-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
[project]
name = "multi-mcp"
version = "1.0.0"
description = "A production-ready MCP proxy that aggregates multiple backend servers into one endpoint with lazy loading, per-tool filtering, and a unified YAML config."
readme = "README.md"
requires-python = ">=3.10"
license = { text = "MIT" }
keywords = ["mcp", "model-context-protocol", "proxy", "ai", "llm", "tools"]
dependencies = [
"mcp>=1.26.0",
"pydantic>=2.0.0",
"pydantic-settings>=2.0.0",
"pyyaml>=6.0",
"starlette",
"uvicorn",
"httpx-sse",
"anyio",
"rich",
"loguru",
"langchain-mcp-adapters",
]
[tool.pytest.ini_options]
pythonpath = ["."]
testpaths = ["tests"]
filterwarnings = ["ignore::pydantic.warnings.PydanticDeprecatedSince20"]