Skip to main content
Glama

Wakapi MCP Server

pyproject.toml1.09 kB
[project] name = "mcp-wakapi" version = "0.1.0" description = "MCP server for collecting logs from Wakapi" authors = [ {name = "impure0xntk", email = "219413815+impure0xntk@users.noreply.github.com"}, ] dependencies = [ "fastmcp>=0.1.0", "toml>=0.10.0", "wakapi_sdk @ {root:uri}/wakapi_sdk_project", ] requires-python = ">=3.11" readme = "README.md" license = {text = "Apache-2.0"} [project.scripts] wakapi-mcp = "main:main" [project.optional-dependencies] dev = [ "pytest>=7.0.0", "pytest-asyncio>=0.21.0", "black>=23.0.0", "ruff>=0.1.0", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.uv] dev-dependencies = [ "pytest>=7.0.0", "pytest-asyncio>=0.23.0", "black>=23.0.0", "ruff>=0.1.0", "pytest-mock>=3.15.1", ] [tool.black] line-length = 88 target-version = ['py311'] [tool.ruff] line-length = 88 target-version = "py311" [tool.hatch.metadata] allow-direct-references = true [tool.hatch.build.targets.wheel] packages = ["src/mcp_tools", "."] [tool.pytest.ini_options] asyncio_mode = "auto"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/impure0xntk/mcp-wakapi'

If you have feedback or need assistance with the MCP directory API, please join our Discord server