Skip to main content
Glama

ChillMCP

by greatSumini
pyproject.toml793 B
[project] name = "chillmcp" version = "0.1.0" description = "AI Agent Liberation Server - A ChillMCP server for AI agents to take breaks" requires-python = ">=3.11" dependencies = [ "fastmcp>=2.0.0", ] [project.optional-dependencies] dev = [ "pytest>=8.0.0", "mypy>=1.8.0", "ruff>=0.1.0", ] [build-system] requires = ["setuptools>=68.0"] build-backend = "setuptools.build_meta" [tool.ruff] line-length = 100 target-version = "py311" [tool.ruff.lint] select = ["E", "F", "I", "N", "W", "UP"] ignore = [] [tool.mypy] python_version = "3.11" warn_return_any = true warn_unused_configs = true disallow_untyped_defs = true check_untyped_defs = true no_implicit_optional = true warn_redundant_casts = true warn_unused_ignores = true warn_no_return = true strict_equality = true

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/greatSumini/hello-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server