vefrank_blender_bridge.py•15.2 kB
#!/usr/bin/env python3
"""
VeFrank-Blender Bridge: Enhanced Integration
Combines VeFrank's zero-token asset generation with Blender's visualization
"""
import json
import sys
import asyncio
from pathlib import Path
from typing import Dict, Any, Optional, List
import socket
import subprocess
import time
# Add parent directory to path for VeFrank imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from utils.asset_generator_dynamic import DynamicAssetGenerator
from utils.asset_generator_dynamic import list_available_components
class VeFrankBlenderBridge:
"""Bridge between VeFrank's efficient generation and Blender's visualization."""
def __init__(self):
self.generator = DynamicAssetGenerator()
self.blender_host = "localhost"
self.blender_port = 9876
self.assets_cache = {}
self.generation_stats = {
"total_generated": 0,
"cache_hits": 0,
"generation_time_ms": [],
"token_usage": 0 # Always 0 with our approach
}
def is_blender_connected(self) -> bool:
"""Check if Blender addon is connected."""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
result = sock.connect_ex((self.blender_host, self.blender_port))
sock.close()
return result == 0
except:
return False
async def send_to_blender(self, command: Dict[str, Any]) -> Dict[str, Any]:
"""Send command to Blender addon."""
try:
reader, writer = await asyncio.open_connection(
self.blender_host, self.blender_port
)
# Send command
command_json = json.dumps(command)
writer.write(command_json.encode('utf-8'))
await writer.drain()
writer.close()
await writer.wait_closed()
# Read response
response_data = await reader.read(8192)
await reader.wait_closed()
return json.loads(response_data.decode('utf-8'))
except Exception as e:
return {
"status": "error",
"message": f"Failed to communicate with Blender: {str(e)}"
}
def generate_component_locally(self, component_type: str,
variant: str = "auto") -> Dict[str, Any]:
"""Generate component using VeFrank's zero-token approach."""
start_time = time.time()
# Check cache first
cache_key = f"{component_type}_{variant}"
if cache_key in self.assets_cache:
self.generation_stats["cache_hits"] += 1
return self.assets_cache[cache_key]
# Generate using VeFrank's dynamic generator
try:
# Use the generator's main method
result = self.generator.generate_component(component_type, variant)
if result["status"] != "success":
return result
# Read the generated files
files = result.get("files_created", [])
obj_file = next((f for f in files if f.endswith(".obj")), None)
mtl_file = next((f for f in files if f.endswith(".mtl")), None)
if not obj_file or not mtl_file:
return {
"status": "error",
"message": "Failed to generate OBJ/MTL files"
}
with open(obj_file, 'r') as f:
obj_content = f.read()
with open(mtl_file, 'r') as f:
mtl_content = f.read()
shape = result.get("shape", "unknown")
dimensions = {"generated": True}
# Create asset files
assets_dir = Path("../assets/3d_models/automotive")
assets_dir.mkdir(parents=True, exist_ok=True)
obj_path = assets_dir / f"{component_type}.obj"
mtl_path = assets_dir / f"{component_type}.mtl"
# Write OBJ file
with open(obj_path, 'w') as f:
f.write(obj_content)
# Write MTL file
with open(mtl_path, 'w') as f:
f.write(mtl_content)
# Track statistics
generation_time = (time.time() - start_time) * 1000
self.generation_stats["generation_time_ms"].append(generation_time)
self.generation_stats["total_generated"] += 1
result = {
"status": "success",
"component_type": component_type,
"variant": variant,
"shape": shape,
"dimensions": dimensions,
"connectors": [],
"files": {
"obj": str(obj_path),
"mtl": str(mtl_path)
},
"stats": {
"generation_time_ms": generation_time,
"token_usage": 0,
"method": "dynamic_discovery"
}
}
# Cache result
self.assets_cache[cache_key] = result
return result
except Exception as e:
return {
"status": "error",
"message": str(e)
}
async def import_to_blender(self, obj_path: str,
component_name: str) -> Dict[str, Any]:
"""Import generated OBJ file into Blender."""
if not self.is_blender_connected():
return {
"status": "error",
"message": "Blender not connected"
}
# Send import command to Blender
command = {
"type": "import_obj",
"params": {
"filepath": str(Path(obj_path).absolute()),
"name": component_name
}
}
return await self.send_to_blender(command)
async def apply_materials(self, component_name: str,
material_type: str = "metal") -> Dict[str, Any]:
"""Apply realistic materials in Blender."""
if not self.is_blender_connected():
return {
"status": "error",
"message": "Blender not connected"
}
# Material presets for automotive components
material_presets = {
"metal": {
"base_color": [0.7, 0.7, 0.7, 1.0],
"metallic": 0.9,
"roughness": 0.4
},
"plastic": {
"base_color": [0.1, 0.1, 0.1, 1.0],
"metallic": 0.0,
"roughness": 0.6
},
"ceramic": {
"base_color": [0.9, 0.9, 0.9, 1.0],
"metallic": 0.0,
"roughness": 0.3
},
"aluminum": {
"base_color": [0.85, 0.85, 0.88, 1.0],
"metallic": 0.95,
"roughness": 0.25
}
}
material = material_presets.get(material_type, material_presets["metal"])
command = {
"type": "apply_material",
"params": {
"object_name": component_name,
"material": material
}
}
return await self.send_to_blender(command)
async def add_connector_details(self, component_name: str,
connectors: List[Dict]) -> Dict[str, Any]:
"""Add detailed connectors in Blender."""
if not self.is_blender_connected():
return {
"status": "error",
"message": "Blender not connected"
}
command = {
"type": "add_connectors",
"params": {
"object_name": component_name,
"connectors": connectors
}
}
return await self.send_to_blender(command)
async def capture_screenshot(self, output_path: str = None) -> Dict[str, Any]:
"""Capture screenshot of current Blender viewport."""
if not self.is_blender_connected():
return {
"status": "error",
"message": "Blender not connected"
}
if not output_path:
output_path = f"screenshots/component_{int(time.time())}.png"
command = {
"type": "capture_viewport",
"params": {
"output_path": str(Path(output_path).absolute())
}
}
return await self.send_to_blender(command)
async def generate_and_visualize(self, component_type: str,
variant: str = "auto",
auto_import: bool = True) -> Dict[str, Any]:
"""Complete pipeline: Generate locally and visualize in Blender."""
print(f"VeFrank-Blender Bridge: Generating {component_type}")
print("=" * 50)
# Step 1: Generate component locally (zero tokens)
print("Step 1: Local generation using dynamic discovery...")
local_result = self.generate_component_locally(component_type, variant)
if local_result["status"] != "success":
return local_result
print(f"SUCCESS: Generated in {local_result['stats']['generation_time_ms']:.1f}ms")
print(f" Shape: {local_result['shape']}")
print(f" Tokens: {local_result['stats']['token_usage']}")
# Step 2: Import to Blender if connected
if auto_import and self.is_blender_connected():
print("\nStep 2: Importing to Blender...")
import_result = await self.import_to_blender(
local_result["files"]["obj"],
component_type
)
if import_result.get("status") == "success":
print("SUCCESS: Imported to Blender")
# Step 3: Apply materials
print("\nStep 3: Applying materials...")
material_type = "aluminum" if "alternator" in component_type else "plastic"
material_result = await self.apply_materials(
component_type, material_type
)
if material_result.get("status") == "success":
print(f"SUCCESS: Applied {material_type} material")
# Step 4: Add connectors if available
if local_result.get("connectors"):
print("\nStep 4: Adding connector details...")
connector_result = await self.add_connector_details(
component_type, local_result["connectors"]
)
if connector_result.get("status") == "success":
print(f"SUCCESS: Added {len(local_result['connectors'])} connectors")
# Step 5: Capture screenshot for verification
print("\nStep 5: Capturing screenshot...")
screenshot_result = await self.capture_screenshot()
if screenshot_result.get("status") == "success":
print(f"SUCCESS: Screenshot saved")
local_result["screenshot"] = screenshot_result.get("path")
else:
print("\nINFO: Blender not connected - files saved locally")
# Print efficiency report
print("\nEfficiency Report:")
print(f" Method: Dynamic discovery from vehicle JSONs")
print(f" Generation Time: {local_result['stats']['generation_time_ms']:.1f}ms")
print(f" Token Usage: {local_result['stats']['token_usage']}")
print(f" Cache Hits: {self.generation_stats['cache_hits']}")
print(f" Total Generated: {self.generation_stats['total_generated']}")
return local_result
async def batch_generate_vehicle(self, vehicle_json: str = None) -> Dict[str, Any]:
"""Generate all components for a vehicle."""
if vehicle_json:
components = self.generator.discover_from_json(vehicle_json)
else:
components = list_available_components()
print(f"Batch Generation: {len(components)} components")
print("=" * 50)
results = []
for component in components:
print(f"\nGenerating: {component}")
result = await self.generate_and_visualize(component, auto_import=False)
results.append(result)
# Summary
successful = sum(1 for r in results if r.get("status") == "success")
total_time = sum(r.get("stats", {}).get("generation_time_ms", 0)
for r in results if r.get("status") == "success")
print("\n" + "=" * 50)
print(f"Batch Generation Complete:")
print(f" Successful: {successful}/{len(components)}")
print(f" Total Time: {total_time:.1f}ms")
print(f" Average Time: {total_time/successful if successful else 0:.1f}ms per component")
print(f" Total Tokens: 0")
return {
"status": "success",
"components_generated": successful,
"total_time_ms": total_time,
"results": results
}
async def main():
"""Main entry point for testing."""
import argparse
parser = argparse.ArgumentParser(description="VeFrank-Blender Bridge")
parser.add_argument("component", nargs="?", default="alternator",
help="Component type to generate")
parser.add_argument("--variant", default="auto",
help="Component variant")
parser.add_argument("--batch", action="store_true",
help="Generate all components")
parser.add_argument("--vehicle", type=str,
help="Vehicle JSON file for batch generation")
parser.add_argument("--list", action="store_true",
help="List available components")
args = parser.parse_args()
bridge = VeFrankBlenderBridge()
if args.list:
components = list_available_components()
print(f"\nAvailable Components ({len(components)}):")
for comp in components:
print(f" - {comp}")
elif args.batch:
result = await bridge.batch_generate_vehicle(args.vehicle)
else:
result = await bridge.generate_and_visualize(
args.component, args.variant
)
if result["status"] == "success":
print(f"\nFiles created:")
for file_type, path in result["files"].items():
print(f" {file_type}: {Path(path).name}")
if __name__ == "__main__":
asyncio.run(main())