Skip to main content
Glama

EX MCP Server

by Zazzles2908
pyproject.toml3.43 kB
[project] name = "ex-mcp-server" version = "0.1.0" description = "AI-powered MCP (Model Context Protocol) server with Moonshot/Kimi, GLM, custom OpenAI-compatible endpoints, and OpenRouter support" readme = "README.md" license = { file = "LICENSE" } requires-python = ">=3.9" authors = [ { name = "Zazzles", email = "jajireen1@gmail.com" } ] keywords = [ "mcp", "model context protocol", "ai", "moonshot", "kimi", "glm", "openrouter", "ollama", "vllm" ] classifiers = [ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries", "Environment :: Console" ] dependencies = [ "mcp>=1.0.0", "openai>=1.55.2", "pydantic>=2.0.0", "python-dotenv>=1.0.0", ] [project.urls] Homepage = "https://github.com/Zazzles2908/ex-mcp-server" Issues = "https://github.com/Zazzles2908/ex-mcp-server/issues" [project.optional-dependencies] # Provider-specific extras (isolated installation) moonshot = [ # Kimi/Moonshot uses OpenAI-compatible client; no extra SDK required ] zhipuai = [ # Official Z.AI SDK; install in isolated venv to avoid PyJWT conflicts "zhipuai>=2.0.0", ] # Remote server extras for network/SSE deployment remote = [ "fastapi>=0.110.0", "uvicorn[standard]>=0.30.0", "sse-starlette>=2.0.0", "fastmcp>=1.0.0", ] [tool.setuptools.packages.find] include = ["tools*", "providers*", "systemprompts*", "utils*", "conf*", "scripts*"] [tool.setuptools] py-modules = ["server", "config"] [tool.setuptools.package-data] "*" = ["conf/*.json"] [tool.setuptools.data-files] "conf" = ["conf/custom_models.json"] [project.scripts] ex-mcp-server = "server:run" [tool.black] line-length = 120 target-version = ['py39', 'py310', 'py311', 'py312', 'py313'] include = '\.pyi?$' extend-exclude = ''' /( # directories \.eggs | \.git | \.hg | \.mypy_cache | \.tox | \.venv | \.venv | venv | _build | buck-out | build | dist )/ ''' [tool.isort] profile = "black" multi_line_output = 3 include_trailing_comma = true force_grid_wrap = 0 use_parentheses = true ensure_newline_before_comments = true line_length = 120 skip_glob = ["venv/*", ".venv/*"] [tool.ruff] target-version = "py39" line-length = 120 [tool.ruff.lint] select = [ "E", # pycodestyle errors "W", # pycodestyle warnings "F", # pyflakes "I", # isort "B", # flake8-bugbear "C4", # flake8-comprehensions "UP", # pyupgrade ] ignore = [ "E501", # line too long, handled by black "B008", # do not perform function calls in argument defaults "C901", # too complex "B904", # exception handling with raise from ] [tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] "tests/*" = ["B011"] "tests/conftest.py" = ["E402"] # Module level imports not at top of file - needed for test setup [build-system] requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Zazzles2908/EX_AI-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server