Skip to main content
Glama
pyproject.toml1.29 kB
[project] name = "xiaohongshu-agent" version = "0.1.0" description = "基于 LangGraph 的小红书运营 Agent" authors = [ {name = "Your Name", email = "you@example.com"} ] readme = "README.md" requires-python = ">=3.11" dependencies = [ "langgraph>=1.0.0,<1.1.0", "langchain>=1.0.0,<1.1.0", "langchain-core>=1.0.0,<1.1.0", "langchain-openai>=1.0.0,<1.1.0", "langchain-anthropic>=1.0.0,<1.1.0", "fastapi>=0.115.0", "uvicorn[standard]>=0.31.0", "pydantic>=2.9.0", "pydantic-settings>=2.5.0", "sqlalchemy>=2.0.0", "asyncpg>=0.29.0", "pgvector>=0.3.0", "redis>=5.1.0", "celery[redis]>=5.4.0", "apscheduler>=3.10.0", "structlog>=24.4.0", "httpx>=0.27.0", "tenacity>=9.0.0", "pillow>=11.0.0", "python-dotenv>=1.0.0", "grandalf>=0.8", "langgraph-supervisor>=0.0.31", "langchain-mcp-adapters>=0.1.13", "pyyaml>=6.0.0", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = ["src/ai_social_scheduler"] [dependency-groups] dev = [ "pytest>=8.3.0", "pytest-asyncio>=0.24.0", "pytest-cov>=6.0.0", "ruff>=0.7.0", "pre-commit>=4.0.0", "ipython>=8.28.0", ] [tool.ruff] line-length = 100 target-version = "py311"

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/luyike221/xiaohongshu-mcp-python'

If you have feedback or need assistance with the MCP directory API, please join our Discord server