Skip to main content
Glama
pyproject.toml1.46 kB
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "libralm-mcp-server" version = "0.1.2" description = "LibraLM MCP Server - A Model Context Protocol server for searching and retrieving book information" readme = "README.md" requires-python = ">=3.10" license = {text = "MIT"} authors = [ {name = "LibraLM", email = "support@libralm.com"} ] keywords = ["mcp", "server", "libralm", "books", "library", "search"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Libraries :: Python Modules", ] dependencies = [ "fastmcp>=2.11.0", "mcp>=1.12.3", "pydantic>=2.11.7", "requests>=2.32.4", "uvicorn>=0.34.0", "smithery>=0.1.0", ] [project.urls] Homepage = "https://github.com/libralm-ai/libralm_mcp_server" Documentation = "https://github.com/libralm-ai/libralm_mcp_server#readme" Repository = "https://github.com/libralm-ai/libralm_mcp_server" Issues = "https://github.com/libralm-ai/libralm_mcp_server/issues" [tool.hatch.build.targets.wheel] only-include = ["libralm_mcp_server.py", "middleware.py"] sources = ["."] [project.scripts] libralm-mcp-server = "libralm_mcp_server:main"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/libralm-ai/libralm_mcp_server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server