Skip to main content
Glama

Model Context Protocol (MCP) Server

by hideya
pyproject.toml1.04 kB
[project] name = "mcp-chat" version = "0.2.14" description = "Simple MCP Client to quickly test and explore MCP servers from the command line" readme = "README.md" requires-python = ">=3.11" keywords = [ "mcp", "model-context-protocol", "client", "cli", "langchain", "tools", "python", "simple", "quick", "explore", "try", "test" ] dependencies = [ "langchain-mcp-tools>=0.2.12", "langchain>=0.3.26", "langgraph>=0.5.0", "langchain-google-genai>=2.1.5", "langchain-anthropic>=0.3.1", "langchain-openai>=0.3.0", "langchain-xai>=0.2.4", "pyjson5>=1.6.8", "python-dotenv>=1.0.1", "websockets>=15.0.1", "langchain-cerebras>=0.5.0", "langchain-groq>=0.3.7", ] [project.urls] "Bug Tracker" = "https://github.com/hideya/mcp-client-langchain-py/issues" "Source Code" = "https://github.com/hideya/mcp-client-langchain-py" [project.scripts] mcp-chat = "mcp_chat.cli_chat:main" [build-system] requires = ["hatchling"] build-backend = "hatchling.build"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/hideya/mcp-client-langchain-py'

If you have feedback or need assistance with the MCP directory API, please join our Discord server