Skip to main content
Glama

A2A MCP Server

pyproject.toml1.97 kB
[project] name = "a2a_mcp_server" version = "0.1.5" description = "A bridge server that connects Model Context Protocol (MCP) with Agent-to-Agent (A2A) protocol" readme = "README.md" requires-python = ">=3.11" license = {text = "Apache-2.0"} authors = [ {name = "GongRzhe", email = "gongrzhe@gmail.com"} ] maintainers = [ {name = "GongRzhe", email = "gongrzhe@gmail.com"} ] keywords = ["MCP", "A2A", "Agent-to-Agent", "Model Context Protocol", "AI", "LLM"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.11", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Scientific/Engineering :: Artificial Intelligence", ] dependencies = [ "httpx>=0.28.1", "httpx-sse>=0.4.0", "jwcrypto>=1.5.6", "pydantic>=2.10.6", "pyjwt>=2.10.1", "sse-starlette>=2.2.1", "starlette>=0.46.1", "typing-extensions>=4.12.2", "uvicorn>=0.34.0", "fastmcp>=2.8.1", ] [project.urls] "Homepage" = "https://github.com/GongRzhe/A2A-MCP-Server" "Bug Tracker" = "https://github.com/GongRzhe/A2A-MCP-Server/issues" "Repository" = "https://github.com/GongRzhe/A2A-MCP-Server" "Documentation" = "https://github.com/GongRzhe/A2A-MCP-Server/blob/main/README.md" # This section creates the CLI command [project.scripts] a2a-mcp-server = "a2a_mcp_server:main" [build-system] requires = ["hatchling"] build-backend = "hatchling.build" # This section tells hatchling to include both your main script and the common module [tool.hatch.build] include = [ "a2a_mcp_server.py", "persistence_utils.py", "common/**/*.py", ] # No longer needed - using the include directive above # [tool.hatch.build.targets.wheel] # packages = ["common"] [project.optional-dependencies] dev = ["pytest>=8.3.5", "pytest-mock>=3.14.0", "ruff>=0.11.2"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/GongRzhe/A2A-MCP-Server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server