Skip to main content
Glama
pyproject.toml1.76 kB
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "lldb-mcp" version = "0.1.0" description = "MCP server for LLDB debugger integration with AI assistants" readme = "README.md" requires-python = ">=3.10" license = "MIT" keywords = ["lldb", "debugger", "mcp", "model-context-protocol", "claude"] authors = [ { name = "Your Name", email = "you@example.com" } ] classifiers = [ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Debuggers", ] dependencies = [ "mcp[cli]>=1.0.0", "pydantic>=2.0.0", "httpx>=0.25.0", ] [project.optional-dependencies] dev = [ "pytest>=7.0", "pytest-asyncio>=0.21", "ruff>=0.1.0", "mypy>=1.0", ] [project.scripts] lldb-mcp = "lldb_mcp_server:mcp.run" [project.urls] Homepage = "https://github.com/yourusername/lldb-mcp" Documentation = "https://github.com/yourusername/lldb-mcp#readme" Repository = "https://github.com/yourusername/lldb-mcp" Issues = "https://github.com/yourusername/lldb-mcp/issues" [tool.hatch.build.targets.wheel] packages = ["."] [tool.ruff] line-length = 100 target-version = "py310" [tool.ruff.lint] select = ["E", "F", "I", "N", "W", "UP"] ignore = ["E501"] # Line length handled by formatter [tool.mypy] python_version = "3.10" strict = true warn_return_any = true warn_unused_configs = true [tool.pytest.ini_options] asyncio_mode = "auto" testpaths = ["tests"]

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/benpm/claude_lldb_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server