Skip to main content
Glama
xumingjun5208

Gemini MCP Server

pyproject.toml1.3 kB
[project] name = "aistudio-gemini-mcp" version = "1.0.0" description = "MCP server for Google Gemini via AIStudioProxyAPI" readme = "README.md" requires-python = ">=3.10" license = {text = "MIT"} authors = [ {name = "AIStudioProxyAPI Contributors"} ] keywords = ["mcp", "gemini", "ai-studio", "llm", "claude"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Libraries :: Python Modules", ] dependencies = [ "mcp>=1.0.0", "httpx>=0.25.0", "pydantic>=2.0.0", ] [project.optional-dependencies] dev = [ "pytest>=7.0.0", "pytest-asyncio>=0.21.0", "ruff>=0.1.0", ] [project.scripts] aistudio-gemini-mcp = "server:main" [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = ["."] [dependency-groups] dev = [ "pytest>=7.0.0", "pytest-asyncio>=0.21.0", "ruff>=0.1.0", ] [tool.ruff] line-length = 100 target-version = "py310" [tool.ruff.lint] select = ["E", "F", "I", "W"] ignore = ["E501"]

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/xumingjun5208/aistudio-gemini-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server