Skip to main content
Glama

MCP-Blender

by shdann
server.py17.2 kB
#!/usr/bin/env python3 """ Blender MCP Server A Model Context Protocol server that enables Claude to interact with Blender by communicating with the Blender addon through a socket connection. """ import asyncio import json import logging import socket import sys from typing import Any, Dict, List import anyio from mcp.server import Server from mcp.server.stdio import stdio_server from mcp.types import ( CallToolRequest, CallToolResult, ListToolsRequest, ListToolsResult, Tool, TextContent, ) from pydantic import BaseModel # Set up logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) class BlenderMCPServer: """MCP Server that communicates with Blender addon via socket.""" def __init__(self, blender_host: str = "localhost", blender_port: int = 9876): self.blender_host = blender_host self.blender_port = blender_port self.server = Server("blender-mcp") self._setup_handlers() def _setup_handlers(self): """Set up MCP server handlers.""" @self.server.list_tools() async def list_tools() -> List[Tool]: """List available Blender tools.""" return [ Tool( name="get_scene_info", description="Get information about the current Blender scene", inputSchema={ "type": "object", "properties": {}, "required": [] } ), Tool( name="get_object_info", description="Get detailed information about a specific object", inputSchema={ "type": "object", "properties": { "name": { "type": "string", "description": "Name of the object to inspect" } }, "required": ["name"] } ), Tool( name="execute_code", description="Execute Python code in Blender", inputSchema={ "type": "object", "properties": { "code": { "type": "string", "description": "Python code to execute in Blender" } }, "required": ["code"] } ), Tool( name="get_polyhaven_status", description="Check if PolyHaven integration is enabled", inputSchema={ "type": "object", "properties": {}, "required": [] } ), Tool( name="get_polyhaven_categories", description="Get asset categories from PolyHaven", inputSchema={ "type": "object", "properties": { "asset_type": { "type": "string", "enum": ["hdris", "textures", "models", "all"], "description": "Type of assets to get categories for" } }, "required": ["asset_type"] } ), Tool( name="search_polyhaven_assets", description="Search for assets on PolyHaven", inputSchema={ "type": "object", "properties": { "asset_type": { "type": "string", "enum": ["hdris", "textures", "models", "all"], "description": "Type of assets to search for" }, "categories": { "type": "string", "description": "Category filter (optional)" } }, "required": [] } ), Tool( name="load_vehicle_context", description="Load VeFrank vehicle configuration for context-aware component generation", inputSchema={ "type": "object", "properties": { "vehicle_config_path": { "type": "string", "description": "Path to vehicle JSON config file", "default": "cars/demo_vehicle_2024.json" } }, "required": [] } ), Tool( name="generate_automotive_component", description="Generate automotive component with vehicle-specific connector layouts", inputSchema={ "type": "object", "properties": { "component_type": { "type": "string", "enum": ["ecu", "throttle_body", "sensor_ckp", "sensor_o2", "fuel_pump", "alternator", "gateway", "obd_port"], "description": "Type of automotive component to generate" }, "use_vehicle_context": { "type": "boolean", "description": "Use loaded vehicle context for accurate connector placement", "default": True }, "include_pin_labels": { "type": "boolean", "description": "Add text labels for pin assignments", "default": True }, "wire_color_coding": { "type": "boolean", "description": "Apply wire color coding to connector pins", "default": True } }, "required": ["component_type"] } ), Tool( name="generate_vehicle_assembly", description="Generate complete vehicle electrical assembly with wiring", inputSchema={ "type": "object", "properties": { "include_wires": { "type": "boolean", "description": "Include wire routing visualization", "default": True }, "show_bus_networks": { "type": "boolean", "description": "Highlight CAN/LIN/K-Line networks", "default": True }, "component_spacing": { "type": "number", "description": "Spacing between components in Blender units", "default": 2.0 } }, "required": [] } ), Tool( name="download_polyhaven_asset", description="Download and import a PolyHaven asset", inputSchema={ "type": "object", "properties": { "asset_id": { "type": "string", "description": "ID of the asset to download" }, "asset_type": { "type": "string", "enum": ["hdris", "textures", "models"], "description": "Type of asset" }, "resolution": { "type": "string", "default": "1k", "description": "Resolution (1k, 2k, 4k, etc.)" }, "file_format": { "type": "string", "description": "File format (hdr, exr for HDRIs; jpg, png for textures; gltf, fbx for models)" } }, "required": ["asset_id", "asset_type"] } ), Tool( name="set_texture", description="Apply a downloaded texture to an object", inputSchema={ "type": "object", "properties": { "object_name": { "type": "string", "description": "Name of the object to apply texture to" }, "texture_id": { "type": "string", "description": "ID of the texture asset" } }, "required": ["object_name", "texture_id"] } ), Tool( name="get_hyper3d_status", description="Check if Hyper3D Rodin integration is enabled", inputSchema={ "type": "object", "properties": {}, "required": [] } ), Tool( name="create_rodin_job", description="Create a 3D model generation job using Hyper3D Rodin", inputSchema={ "type": "object", "properties": { "text_prompt": { "type": "string", "description": "Text description of the 3D model to generate" }, "images": { "type": "array", "items": { "type": "array", "items": {"type": "string"}, "minItems": 2, "maxItems": 2 }, "description": "Array of [suffix, image_data] pairs for input images" }, "bbox_condition": { "type": "object", "description": "Bounding box conditions for generation" } }, "required": [] } ), Tool( name="poll_rodin_job_status", description="Check the status of a Rodin generation job", inputSchema={ "type": "object", "properties": { "subscription_key": { "type": "string", "description": "Subscription key for main site mode" }, "request_id": { "type": "string", "description": "Request ID for fal.ai mode" } }, "required": [] } ), Tool( name="import_generated_asset", description="Import a generated 3D asset from Rodin", inputSchema={ "type": "object", "properties": { "task_uuid": { "type": "string", "description": "Task UUID for main site mode" }, "request_id": { "type": "string", "description": "Request ID for fal.ai mode" }, "name": { "type": "string", "description": "Name for the imported asset" } }, "required": ["name"] } ) ] @self.server.call_tool() async def call_tool(name: str, arguments: Dict[str, Any]) -> CallToolResult: """Call a Blender tool.""" try: result = await self._send_command_to_blender({ "type": name, "params": arguments }) if result.get("status") == "success": return CallToolResult( content=[ TextContent( type="text", text=json.dumps(result.get("result", {}), indent=2) ) ] ) else: return CallToolResult( content=[ TextContent( type="text", text=f"Error: {result.get('message', 'Unknown error')}" ) ], isError=True ) except Exception as e: logger.error(f"Error calling tool {name}: {e}") return CallToolResult( content=[ TextContent( type="text", text=f"Error communicating with Blender: {str(e)}" ) ], isError=True ) async def _send_command_to_blender(self, command: Dict[str, Any]) -> Dict[str, Any]: """Send a command to Blender addon via socket.""" try: # Create socket connection reader, writer = await anyio.connect_tcp(self.blender_host, self.blender_port) # Send command command_json = json.dumps(command) writer.write(command_json.encode('utf-8')) await writer.aclose() # Read response response_data = b'' async for chunk in reader: response_data += chunk await reader.aclose() # Parse response response = json.loads(response_data.decode('utf-8')) return response except Exception as e: logger.error(f"Failed to communicate with Blender: {e}") return { "status": "error", "message": f"Failed to communicate with Blender addon. Make sure Blender is running with the MCP addon enabled and connected. Error: {str(e)}" } async def run(self): """Run the MCP server.""" async with stdio_server() as (read_stream, write_stream): await self.server.run(read_stream, write_stream, self.server.create_initialization_options()) def main(): """Main entry point.""" import argparse parser = argparse.ArgumentParser(description="Blender MCP Server") parser.add_argument("--host", default="localhost", help="Blender host") parser.add_argument("--port", type=int, default=9876, help="Blender port") args = parser.parse_args() server = BlenderMCPServer(args.host, args.port) try: anyio.run(server.run) except KeyboardInterrupt: logger.info("Server stopped by user") except Exception as e: logger.error(f"Server error: {e}") sys.exit(1) if __name__ == "__main__": main()

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/shdann/mcp-blend'

If you have feedback or need assistance with the MCP directory API, please join our Discord server