Skip to main content
Glama

BigBugAI MCP Server

by bigbugAi
pyproject.toml1.75 kB
[build-system] requires = ["hatchling>=1.25.0"] build-backend = "hatchling.build" [project] name = "bigbugai-mcp" version = "0.1.0" description = "BigBugAI MCP server exposing tools over stdio and optional HTTP/SSE." readme = "README.md" requires-python = ">=3.11" authors = [{ name = "BigBugAI", email = "dev@bigbug.ai" }] license = { text = "MIT" } keywords = ["mcp", "bigbugai", "fastapi", "stdio", "ai-tools"] classifiers = [ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.11", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ] dependencies = [ "mcp[cli]>=1.0.0", # MCP server with CLI tooling "httpx>=0.27.0", "pydantic>=2.7.0", "fastapi>=0.111.0", "uvicorn>=0.30.0", "limits>=3.7.0", ] [project.urls] Homepage = "https://github.com/bigbugAi/bigbugai-mcp" Repository = "https://github.com/bigbugAi/bigbugai-mcp" Issues = "https://github.com/bigbugAi/bigbugai-mcp/issues" [project.optional-dependencies] dev = [ "pytest>=7.4.0", "pytest-asyncio>=0.23.0", "ruff>=0.5.0", "mypy>=1.10.0", ] [tool.hatch.build.targets.wheel] packages = ["src/bigbugai_mcp"] [tool.hatch.build.targets.sdist] include = [ "src/bigbugai_mcp", "README.md", "pyproject.toml", "tests", "LICENSE", ] [tool.hatch.build] include = [ "LICENSE", "README.md", ] [tool.ruff] line-length = 100 [tool.ruff.lint] select = ["E", "F", "I", "B", "UP"] ignore = ["E501"] [tool.ruff.lint.isort] known-first-party = ["bigbugai_mcp"] [tool.mypy] python_version = "3.11" ignore_missing_imports = true strict = true warn_redundant_casts = true warn_unused_ignores = true warn_return_any = true warn_unreachable = true pretty = true

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/bigbugAi/bigbugai-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server