Skip to main content
Glama

Blender Open MCP

pyproject.toml863 B
[project] name = "blender-open-mcp" version = "0.2.0" description = "Blender integration with local AI models via MCP and Ollama" readme = "README.md" requires-python = ">=3.10" authors = [ {name = "Nirajan Dhakal"} ] license = {text = "MIT"} classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ] dependencies = [ "mcp[cli]>=1.3.0", "httpx>=0.24.0", "ollama>=0.4.7", "requests", ] [project.scripts] blender-open-mcp = "blender_open_mcp.server:main" [build-system] requires = ["setuptools>=61.0", "wheel"] build-backend = "setuptools.build_meta" [tool.setuptools] package-dir = {"" = "src"} [project.urls] "Homepage" = "https://github.com/dhakalnirajan/blender-open-mcp" "Bug Tracker" = "https://github.com/dhakalnirajan/blender-open-mcp/issues"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/dhakalnirajan/blender-open-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server