Skip to main content
Glama

Simple MCP Server

by ilil1
pyproject.toml949 B
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "simple-mcp-example" version = "0.1" description = "Simple MCP server with streamable HTTP & STDIO transports" readme = "README.md" requires-python = ">=3.11" dependencies = [ "fastmcp>=2.5.1", "fastapi>=0.104.0", "uvicorn>=0.23.2", "pydantic>=2.4.2", "python-dotenv>=1.0.0", "pyyaml>=6.0.1", "httpx>=0.25.0", ] [project.optional-dependencies] dev = [ "black>=23.9.1", "ruff>=0.0.291", "pytest>=7.4.2", "pytest-asyncio>=0.21.1", ] [tool.hatch.build.targets.wheel] packages = ["."] # Simply include the single file directly [tool.hatch.build.targets.wheel.force-include] "run_server.py" = "run_server.py" [tool.black] line-length = 100 target-version = ["py311"] include = '\.pyi?$' [tool.ruff] line-length = 100 target-version = "py311" select = ["E", "F", "I", "W", "N", "UP", "B", "C4"] ignore = ["E203"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ilil1/simple-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server