Skip to main content
Glama

Sequential Thinking MCP Server

pyproject.toml1.48 kB
[project] name = "sequential-thinking" version = "0.3.0" description = "A Sequential Thinking MCP Server for advanced problem solving" readme = "README.md" requires-python = ">=3.10" license = { text = "MIT" } keywords = ["mcp", "ai", "problem-solving", "sequential-thinking"] authors = [ { name = "Arben Ademi", email = "arben.ademi@tuta.io" } ] dependencies = [ "mcp[cli]>=1.2.0", "rich>=13.7.0", "pyyaml>=6.0", ] [project.scripts] mcp-sequential-thinking = "mcp_sequential_thinking.server:main" [project.optional-dependencies] dev = [ "pytest>=7.0.0", "pytest-cov>=4.0.0", "black>=23.0.0", "isort>=5.0.0", "mypy>=1.0.0", ] vis = [ "matplotlib>=3.5.0", "numpy>=1.20.0", ] web = [ "fastapi>=0.100.0", "uvicorn>=0.20.0", "pydantic>=2.0.0", ] all = [ "sequential-thinking[dev,vis,web]", ] [project.urls] Source = "https://github.com/arben-adm/sequential-thinking" [tool.hatch.build.targets.wheel] packages = ["mcp_sequential_thinking"] [tool.pytest.ini_options] testpaths = ["tests"] python_files = "test_*.py" python_classes = "Test*" python_functions = "test_*" [tool.black] line-length = 100 target-version = ['py310'] include = '\.pyi?$' [tool.isort] profile = "black" line_length = 100 [tool.mypy] python_version = "3.10" warn_return_any = true warn_unused_configs = true disallow_untyped_defs = true disallow_incomplete_defs = true [build-system] requires = ["hatchling"] build-backend = "hatchling.build"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/arben-adm/mcp-sequential-thinking'

If you have feedback or need assistance with the MCP directory API, please join our Discord server