Skip to main content
Glama

Timelines MCP Server

by Haervwe
pyproject.toml719 B
[project] name = "timelines-mcp" version = "0.1.0" description = "MCP server to aid LLMs in maintaining coherent long generations for time dependent narratives" readme = "README.md" requires-python = ">=3.10" dependencies = [ "fastmcp>=0.1.0", ] [project.optional-dependencies] dev = [ "pytest>=7.0.0", "pytest-asyncio>=0.21.0", "ruff>=0.1.0", ] [build-system] requires = ["setuptools>=61.0"] build-backend = "setuptools.build_meta" [tool.setuptools.packages.find] where = ["src"] [tool.ruff] line-length = 100 target-version = "py310" [tool.ruff.lint] select = ["E", "F", "I", "N", "W", "UP"] ignore = [] [tool.pytest.ini_options] testpaths = ["tests"] pythonpath = ["src"] asyncio_mode = "auto"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Haervwe/timelines-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server