Skip to main content
Glama
pyproject.toml1.44 kB
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "obsidian-mcp-server" version = "1.0.0" description = "Servidor MCP para interactuar con tu vault de Obsidian desde Claude" readme = "README.md" requires-python = ">=3.11" keywords = ["obsidian", "mcp", "claude", "knowledge-management", "notes"] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Topic :: Text Processing :: Markup :: Markdown", "Topic :: Office/Business :: Groupware" ] dependencies = [ "aiofiles>=25.1.0", "fastmcp>=2.9.2", "mypy>=1.19.1", "pathspec>=0.12.1", "pydantic>=2.11.7", "python-dotenv>=1.1.1", "ruff>=0.14.8", "youtube-transcript-api>=1.2.3", ] [project.scripts] obsidian-mcp-server = "obsidian_mcp.server:main" [dependency-groups] dev = [ "pytest>=8.4.1", "anyio>=4.4.0", "ruff>=0.5.5", "pyright>=1.1.372", ] [tool.hatch.build.targets.wheel] packages = ["obsidian_mcp"] [tool.ruff] line-length = 88 target-version = "py311" [tool.ruff.lint] select = ["E", "F", "I", "B"] ignore = [] [tool.pytest.ini_options] testpaths = ["tests"] python_files = ["test_*.py"] addopts = "-v"

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Vasallo94/obsidian-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server