Skip to main content
Glama
pyproject.tomlโ€ข1.43 kB
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "lpdp-mcp-server" version = "0.1.0" description = "MCP Server untuk FAQ Pencairan Beasiswa LPDP menggunakan RAG dengan Pinecone dan Gemini" readme = "README.md" requires-python = ">=3.11" license = "MIT" authors = [ { name = "Aldy" } ] keywords = ["mcp", "lpdp", "beasiswa", "rag", "pinecone", "gemini"] classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] dependencies = [ "mcp>=1.0.0", "pinecone-client>=3.0.0", "google-generativeai>=0.8.0", "pymupdf>=1.24.0", "langchain>=0.3.0", "langchain-text-splitters>=0.3.0", "python-dotenv>=1.0.0", "pydantic>=2.0.0", ] [project.optional-dependencies] dev = [ "pytest>=8.0.0", "pytest-asyncio>=0.23.0", "black>=24.0.0", "ruff>=0.1.0", ] [project.scripts] lpdp-mcp = "src.server:main" index-docs = "scripts.index_documents:main" [tool.hatch.build.targets.wheel] packages = ["src"] [tool.black] line-length = 100 target-version = ['py311'] [tool.ruff] line-length = 100 target-version = "py311" select = ["E", "F", "I", "N", "W"] ignore = ["E501"] [tool.pytest.ini_options] asyncio_mode = "auto" testpaths = ["tests"]

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/adityaldy/mcp-training'

If you have feedback or need assistance with the MCP directory API, please join our Discord server