Skip to main content
Glama

Blender MCP Router

pyproject.toml708 B
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "blender-mcp-router" version = "0.1.0" description = "FastMCP server that bridges LiteLLM routing with the Blender MCP add-on." readme = "README.md" authors = [{ name = "Mustafa Booren", email = "drmustafa@bdqholdings.com" }] license = { text = "MIT" } requires-python = ">=3.10" dependencies = [ "fastmcp>=0.3", "litellm>=1.40,<1.77", "fastapi>=0.110", "uvicorn>=0.30", "pydantic>=2.3", "httpx>=0.27", "python-dotenv>=1.0", ] [project.optional-dependencies] extras = ["coloredlogs>=15"] [project.scripts] blender-mcp-router = "server:main" [tool.hatch.build.targets.wheel] include = ["server.py"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/mustafa-boorenie/blessed-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server