Skip to main content
Glama

MCP AI Service Platform

by dkb12138ggg
pyproject.toml1.1 kB
[project] name = "mcp-production-client" version = "0.2.0" description = "Production-ready MCP multi-server client with connection pooling and async processing" readme = "README.md" requires-python = ">=3.11" dependencies = [ "bs4>=0.0.2", "fastapi>=0.115.12", "httpx>=0.28.1", "mcp[cli]>=1.4.1", "openai>=1.66.3", "pydantic>=2.10.6", "python-dotenv>=1.0.1", "uvicorn[standard]>=0.34.0", "structlog>=23.2.0", "prometheus-client>=0.20.0", "redis>=5.0.0", "tenacity>=8.2.0", "asyncio-pool>=0.6.0", "pydantic-settings>=2.1.0", "slowapi>=0.1.9", "circuitbreaker>=1.4.0", # RAG和数据库相关依赖 "asyncpg>=0.29.0", "sqlalchemy[asyncio]>=2.0.25", "alembic>=1.13.0", "pgvector>=0.2.4", "sentence-transformers>=2.2.2", "nltk>=3.8.1", "tiktoken>=0.5.2", "numpy>=1.24.0", "scikit-learn>=1.3.0", "langchain-core>=0.1.0", "langchain-text-splitters>=0.0.1", ] [[tool.uv.index]] index-url = "https://mirrors.aliyun.com/pypi/simple/" url = "https://mirrors.aliyun.com/pypi/simple/" default = true

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/dkb12138ggg/python-rag-mcp-client'

If you have feedback or need assistance with the MCP directory API, please join our Discord server