Skip to main content
Glama

MCP server for LogSeq

by ergut
pyproject.toml1.26 kB
[project] name = "mcp-logseq" version = "1.0.1" description = "MCP server to work with LogSeq via the local HTTP server" readme = "README.md" requires-python = ">=3.11" license = "MIT" keywords = ["mcp", "logseq", "model-context-protocol", "knowledge-management"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing", ] dependencies = [ "mcp>=1.1.0", "python-dotenv>=1.0.1", "requests>=2.32.3", ] [project.urls] "Homepage" = "https://github.com/ergut/mcp-logseq" "Bug Reports" = "https://github.com/ergut/mcp-logseq/issues" "Source" = "https://github.com/ergut/mcp-logseq" [[project.authors]] name = "Salih Ergüt" email = "salih.ergut@oredata.com" [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [dependency-groups] dev = [ "pyright>=1.1.389", "pytest>=7.0.0", "pytest-mock>=3.10.0", "responses>=0.23.0", "pytest-asyncio>=0.21.0", ] [project.scripts] mcp-logseq = "mcp_logseq:main"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ergut/mcp-logseq-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server