Skip to main content
Glama

OpenAPI Lambda MCP Server

by ingeno
pyproject.toml1.22 kB
[project] name = "mcp-openapi-monorepo" version = "0.2.5" description = "OpenAPI MCP Server - Monorepo" requires-python = ">=3.12" readme = "README.md" license = {text = "Apache-2.0"} license-files = ["LICENSE", "NOTICE"] dependencies = [ "pytest>=8.4.2", ] [tool.uv.workspace] members = [ "packages/infrastructure", "packages/apis/petstore", "packages/apis/zoho-crm", ] [tool.uv.sources] "mcp-openapi-infrastructure" = { workspace = true } [project.optional-dependencies] dev = [ "commitizen>=4.8.3", "pre-commit>=4.2.0", "pyright>=1.1.402", "ruff>=0.12.2", ] [tool.ruff] target-version = "py312" line-length = 100 exclude = [ ".venv", "**/__pycache__", "**/node_modules", "**/dist", "**/build", "**/env", "**/.ruff_cache", "**/.venv", "**/.ipynb_checkpoints" ] force-exclude = true [tool.ruff.lint] exclude = ["__init__.py"] select = ["C", "D", "E", "F", "I", "W"] ignore = ["C901", "E501", "E741", "F402", "F823", "D100", "D106"] [tool.ruff.lint.isort] lines-after-imports = 2 no-sections = true [tool.ruff.format] quote-style = "single" indent-style = "space" skip-magic-trailing-comma = false line-ending = "auto" docstring-code-format = true

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ingeno/mcp-openapi-lambda'

If you have feedback or need assistance with the MCP directory API, please join our Discord server