Skip to main content
Glama

Gemini MCP Server

pyproject.toml2.84 kB
[project] name = "zen-mcp-server" version = "5.11.0" description = "AI-powered MCP server with multiple model providers" requires-python = ">=3.9" dependencies = [ "mcp>=1.0.0", "google-genai>=1.19.0", "openai>=1.55.2", "pydantic>=2.0.0", "python-dotenv>=1.0.0", ] [tool.setuptools.packages.find] include = ["tools*", "providers*", "systemprompts*", "utils*", "conf*"] [tool.setuptools] py-modules = ["server", "config"] [tool.setuptools.package-data] "*" = ["conf/*.json"] [tool.setuptools.data-files] "conf" = ["conf/custom_models.json"] [project.scripts] zen-mcp-server = "server:run" [tool.black] line-length = 120 target-version = ['py39', 'py310', 'py311', 'py312', 'py313'] include = '\.pyi?$' extend-exclude = ''' /( # directories \.eggs | \.git | \.hg | \.mypy_cache | \.tox | \.venv | \.zen_venv | venv | _build | buck-out | build | dist )/ ''' [tool.isort] profile = "black" multi_line_output = 3 include_trailing_comma = true force_grid_wrap = 0 use_parentheses = true ensure_newline_before_comments = true line_length = 120 skip_glob = ["venv/*", ".venv/*", ".zen_venv/*"] [tool.ruff] target-version = "py39" line-length = 120 [tool.ruff.lint] select = [ "E", # pycodestyle errors "W", # pycodestyle warnings "F", # pyflakes "I", # isort "B", # flake8-bugbear "C4", # flake8-comprehensions "UP", # pyupgrade ] ignore = [ "E501", # line too long, handled by black "B008", # do not perform function calls in argument defaults "C901", # too complex "B904", # exception handling with raise from ] [tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] "tests/*" = ["B011"] "tests/conftest.py" = ["E402"] # Module level imports not at top of file - needed for test setup [tool.semantic_release] version_toml = ["pyproject.toml:project.version"] branch = "main" version_source = "tag" version_pattern = "v(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)" major_on_zero = false build_command = "python -m pip install --upgrade build && python -m build" dist_path = "dist/" upload_to_vcs_release = true upload_to_repository = false remove_dist = false commit_version_number = true commit_message = "chore(release): {version}\n\nAutomatically generated by python-semantic-release" tag_format = "v{version}" [tool.semantic_release.branches.main] match = "main" prerelease = false [tool.semantic_release.changelog] exclude_commit_patterns = [] [tool.semantic_release.commit_parser_options] allowed_tags = ["build", "chore", "ci", "docs", "feat", "fix", "perf", "style", "refactor", "test"] minor_tags = ["feat"] patch_tags = ["fix", "perf"] [tool.semantic_release.remote.token] env = "GH_TOKEN" [build-system] requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/BeehiveInnovations/gemini-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server