Skip to main content
Glama

All-Search MCP Server

by HyunjunJeon
pyproject.toml1.06 kB
[project] name = "ttimes-guide-coding" version = "0.1.0" description = "티타임즈 바이브 코딩 시연 예제" authors = [{ name = "Hyunjun Jeon", email = "jeonhj920@gmail.com" }] # readme = "README.md" requires-python = ">=3.13" dependencies = [ "a2a-sdk>=0.2.9", "fastmcp>=2.9.2", "langchain>=0.3.26", "langchain-google-genai>=2.1.5", "langchain-openai>=0.3.27", "langgraph>=0.5.0", "langsmith>=0.4.4", "pydantic-settings>=2.6.0", "python-dotenv>=1.0.0", "httpx>=0.27.0", "langchain-community>=0.3.0", "langgraph-checkpoint-redis>=0.0.8", "langchain-mcp-adapters>=0.1.7", "google-adk>=1.5.0", "tavily-python>=0.7.8", "uvloop>=0.21.0", "litellm>=1.73.6", ] [dependency-groups] dev = [ "langgraph-cli[inmem]>=0.3.3", "ruff>=0.12.1", "pytest>=8.3.0", "pytest-asyncio>=0.24.0", "pytest-cov>=5.0.0", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = ["agents", "a2a_client", "all-search-mcp"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/HyunjunJeon/vibecoding-lg-mcp-a2a'

If you have feedback or need assistance with the MCP directory API, please join our Discord server