Skip to main content
Glama
pyproject.toml1.3 kB
[project] name = "wuwa-mcp-server" description = "Add your description here" readme = "README.md" requires-python = ">=3.12" version = "2.0.1" dependencies = [ "asyncio>=3.4.3", "beautifulsoup4>=4.13.4", "httpx>=0.28.1", "mcp[cli]>=1.8.0", "smithery>=0.1.23", "uvicorn>=0.32.1", "starlette>=0.41.3", ] [project.optional-dependencies] dev = [ "ruff>=0.8.0", ] [project.scripts] wuwa-mcp-server = "wuwa_mcp_server.server:main" [build-system] requires = ["uv_build>=0.8.15,<0.9.0"] build-backend = "uv_build" [tool.ruff] line-length = 120 target-version = "py312" [tool.ruff.lint] select = [ "E", # pycodestyle errors "F", # pyflakes "I", # isort "N", # pep8-naming "W", # pycodestyle warnings "UP", # pyupgrade "B", # flake8-bugbear "C4", # flake8-comprehensions "SIM", # flake8-simplify "TCH", # flake8-type-checking "RUF", # Ruff-specific rules ] ignore = [ "E501", # line too long (handled by formatter) "B008", # do not perform function calls in argument defaults "N806", # variable in function should be lowercase (conflicts with some APIs) ] [tool.ruff.lint.isort] force-single-line = true known-first-party = ["wuwa_mcp_server"] [tool.smithery] server = "wuwa_mcp_server.server:create_server"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/jacksmith3888/wuwa-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server