Skip to content

Instantly share code, notes, and snippets.

@jmanhype
Last active May 7, 2026 15:58
Show Gist options
  • Select an option

  • Save jmanhype/5f4b852332527260b479c9704ddf63a1 to your computer and use it in GitHub Desktop.

Select an option

Save jmanhype/5f4b852332527260b479c9704ddf63a1 to your computer and use it in GitHub Desktop.
CEBSam3d — Two Motion Capture Pipelines (Option A: Headless Blender 3D | Option B: ComfyUI Pixel)
import torch
import json
import os
import numpy as np
def export_mhr_from_pt(pt_path="mhr_model.pt"):
if not os.path.exists(pt_path):
print(f"Error: {pt_path} not found.")
return
print(f"Loading MHR model from {pt_path}...")
model = torch.jit.load(pt_path)
identity = torch.zeros(1, 45)
pose = torch.zeros(1, 204)
extra = torch.zeros(1, 72)
print("Running forward pass...")
with torch.no_grad():
verts, skel_state = model(identity, pose, extra)
skeleton = model.character_torch.skeleton
joint_names = list(skeleton.joint_names)
joint_parents = [int(p) for p in skeleton.joint_parents]
# Format: [tx, ty, tz, qx, qy, qz, qw, scale]
transforms = skel_state[0].cpu().tolist()
# Extract Weights
lbs = model.character_torch.linear_blend_skinning
skin_indices = lbs.skin_indices_flattened.cpu().tolist()
skin_weights = lbs.skin_weights_flattened.cpu().tolist()
vert_indices = lbs.vert_indices_flattened.cpu().tolist()
# Organize weights by vertex
# We need to know which weights belong to which vertex
weights_data = []
# Based on the naming 'flattened', it's likely a sparse representation
# Let's verify the logic: if we have N vertices, each might have K influences.
# Usually, vert_indices_flattened tells us which vertex each weight/index belongs to.
# Create a list of lists for weights per vertex
num_verts = verts.shape[1]
weights_per_vert = [[] for _ in range(num_verts)]
for i in range(len(vert_indices)):
v_idx = vert_indices[i]
j_idx = skin_indices[i]
weight = skin_weights[i]
weights_per_vert[v_idx].append((j_idx, weight))
skeleton_data = {
"joint_names": joint_names,
"joint_parents": joint_parents,
"transforms": transforms,
"weights": weights_per_vert
}
with open("mhr_skeleton.json", "w") as f:
json.dump(skeleton_data, f, indent=4)
verts_np = verts[0].cpu().numpy()
faces_np = model.character_torch.mesh.faces.cpu().numpy()
# Rotation fix: Swap Y and Z, and negate Y to handle Blender's Z-up
# Actually, often it's just swapping Y/Z or a -90 rotation on X.
# Let's try simple swap first in Blender side, but export raw here.
with open("mhr_mesh.obj", "w") as f:
for v in verts_np:
f.write(f"v {v[0]} {v[1]} {v[2]}\n")
for face in faces_np:
f.write(f"f {face[0]+1} {face[1]+1} {face[2]+1}\n")
print(f"Exported mhr_skeleton.json and mhr_mesh.obj with weights.")
if __name__ == "__main__":
export_mhr_from_pt()
import torch
import json
import pickle
import os
def export_pose(pkl_path="sam3dbody_pose.pkl", pt_path="mhr_model.pt"):
if not os.path.exists(pkl_path) or not os.path.exists(pt_path):
print("Error: Files not found.")
return
# 1. Load the pose parameters from pkl
with open(pkl_path, 'rb') as f:
data_list = pickle.load(f)
# Take the first detected person
pose_params = data_list[0]['mhr_model_params']
pose_tensor = torch.from_numpy(pose_params).unsqueeze(0) # [1, 204]
# 2. Load model and run forward pass to get global transforms
print(f"Loading MHR model to compute pose transforms...")
model = torch.jit.load(pt_path)
# MHR needs Identity (45), Pose (204), Expressions (72)
# We use the pose from the PKL
identity = torch.zeros(1, 45)
extra = torch.zeros(1, 72)
with torch.no_grad():
_, skel_state = model(identity, pose_tensor, extra)
# skel_state is [batch, num_joints, 8]
# Format: [tx, ty, tz, qx, qy, qz, qw, scale]
transforms = skel_state[0].cpu().tolist()
pose_data = {
"transforms": transforms
}
with open("mhr_pose.json", "w") as f:
json.dump(pose_data, f, indent=4)
print("Exported mhr_pose.json")
if __name__ == "__main__":
export_pose()
import bpy
import json
import os
import mathutils
def import_and_pose_mhr(skeleton_json_path, mesh_obj_path, pose_json_path):
# F matrix: MHR Y-up to Blender Z-up conversion
F = mathutils.Matrix((
(1, 0, 0, 0),
(0, 0, -1, 0),
(0, 1, 0, 0),
(0, 0, 0, 1)
))
def fix_coords(vec):
return mathutils.Vector((vec[0], -vec[2], vec[1]))
def get_mhr_matrix(t_mhr):
# t_mhr: [tx, ty, tz, qx, qy, qz, qw, scale]
tx, ty, tz = t_mhr[0], t_mhr[1], t_mhr[2]
qx, qy, qz, qw = t_mhr[3], t_mhr[4], t_mhr[5], t_mhr[6]
# mathutils.Quaternion is (w, x, y, z)
rot = mathutils.Quaternion((qw, qx, qy, qz)).to_matrix().to_4x4()
rot.translation = mathutils.Vector((tx, ty, tz))
return rot
# 1. Load Data
if not os.path.exists(skeleton_json_path):
print(f"Error: {skeleton_json_path} not found.")
return
with open(skeleton_json_path, "r") as f:
data = json.load(f)
joint_names = data["joint_names"]
joint_parents = data["joint_parents"]
rest_transforms = data["transforms"] # Global transforms at rest
all_weights = data["weights"]
# 2. Import Mesh
if not os.path.exists(mesh_obj_path):
print(f"Error: {mesh_obj_path} not found.")
return
mesh_data = bpy.data.meshes.new("MHR_Mesh")
mesh_obj = bpy.data.objects.new("MHR_Mesh", mesh_data)
bpy.context.collection.objects.link(mesh_obj)
verts = []
faces = []
with open(mesh_obj_path, 'r') as f:
for line in f:
if line.startswith('v '):
v = [float(x) for x in line.split()[1:]]
verts.append(fix_coords(v))
elif line.startswith('f '):
f_indices = [int(x.split('/')[0]) - 1 for x in line.split()[1:]]
faces.append(f_indices)
mesh_data.from_pydata(verts, [], faces)
mesh_data.update()
# 3. Create Armature
arm_data = bpy.data.armatures.new("MHR_Armature")
arm_obj = bpy.data.objects.new("MHR_Armature", arm_data)
bpy.context.collection.objects.link(arm_obj)
bpy.context.view_layer.objects.active = arm_obj
bpy.ops.object.mode_set(mode='EDIT')
bones = []
for i, name in enumerate(joint_names):
bone = arm_data.edit_bones.new(name)
bone.head = fix_coords(rest_transforms[i][0:3])
bones.append(bone)
for i, p in enumerate(joint_parents):
if p != -1:
bones[i].parent = bones[p]
# Set tails for better visualization
for i, bone in enumerate(bones):
children = [j for j, p in enumerate(joint_parents) if p == i]
if children:
bone.tail = fix_coords(rest_transforms[children[0]][0:3])
if (bone.tail - bone.head).length < 1e-4:
bone.tail += mathutils.Vector((0, 0, 0.01))
else:
if bone.parent:
direction = bone.head - bone.parent.head
if direction.length > 1e-4:
bone.tail = bone.head + direction.normalized() * 0.05
else:
bone.tail = bone.head + mathutils.Vector((0, 0, 0.05))
else:
bone.tail = bone.head + mathutils.Vector((0, 0, 0.05))
bpy.ops.object.mode_set(mode='OBJECT')
# 4. Weight Painting
if mesh_obj:
for name in joint_names:
mesh_obj.vertex_groups.new(name=name)
for v_idx, v_weights in enumerate(all_weights):
for j_idx, weight in v_weights:
if weight > 0:
mesh_obj.vertex_groups[joint_names[j_idx]].add([v_idx], weight, 'REPLACE')
mesh_obj.parent = arm_obj
modifier = mesh_obj.modifiers.new(name="Armature", type='ARMATURE')
modifier.object = arm_obj
# 5. Apply Pose (Relative Transformation Approach)
if os.path.exists(pose_json_path):
with open(pose_json_path, "r") as f:
pose_data = json.load(f)
pose_transforms = pose_data["transforms"]
bpy.ops.object.mode_set(mode='POSE')
for i, name in enumerate(joint_names):
bone = arm_obj.pose.bones.get(name)
if not bone or i >= len(pose_transforms):
continue
# Get global matrices in MHR space
m_rest = get_mhr_matrix(rest_transforms[i])
m_pose = get_mhr_matrix(pose_transforms[i])
# Calculate the transformation that takes the joint from rest to pose
# T_mhr = M_pose * M_rest.inverted()
delta_mhr = m_pose @ m_rest.inverted()
# Convert this transformation to Blender space
# T_blender = F * T_mhr * F_inv
delta_b = F @ delta_mhr @ F.inverted()
# Apply the transformation to the Edit Bone matrix to get target world matrix
# TargetWorld = T_blender * EditMatrix
target_world_matrix = delta_b @ bone.bone.matrix_local
if joint_parents[i] == -1:
# Root bone: allow translation
bone.matrix = target_world_matrix
else:
# Non-root bone: keep rigid distance from parent
bpy.context.view_layer.update()
current_pos = bone.matrix.to_translation()
new_mat = target_world_matrix.copy()
new_mat.translation = current_pos
bone.matrix = new_mat
# Force update for hierarchy
bpy.context.view_layer.update()
bpy.ops.object.mode_set(mode='OBJECT')
print("MHR Pose applied successfully.")
if __name__ == "__main__":
cwd = os.getcwd()
skeleton_path = os.path.join(cwd, "mhr_skeleton.json")
mesh_path = os.path.join(cwd, "mhr_mesh.obj")
pose_path = os.path.join(cwd, "mhr_pose.json")
import_and_pose_mhr(skeleton_path, mesh_path, pose_path)
import bpy
import json
import os
import sys
import mathutils
def import_and_pose_mhr(skeleton_json_path, mesh_obj_path, pose_dir, output_blend):
F = mathutils.Matrix(((1, 0, 0, 0), (0, 0, -1, 0), (0, 1, 0, 0), (0, 0, 0, 1)))
def fix_coords(vec):
return mathutils.Vector((vec[0], -vec[2], vec[1]))
def get_mhr_matrix(t_mhr):
rot = mathutils.Quaternion((t_mhr[6], t_mhr[3], t_mhr[4], t_mhr[5])).to_matrix().to_4x4()
rot.translation = mathutils.Vector((t_mhr[0], t_mhr[1], t_mhr[2]))
return rot
print(f"Loading skeleton from {skeleton_json_path}")
with open(skeleton_json_path, "r") as f:
data = json.load(f)
joint_names = data["joint_names"]
joint_parents = data["joint_parents"]
rest_transforms = data["transforms"]
all_weights = data["weights"]
# Mesh
mesh_data = bpy.data.meshes.new("MHR_Mesh")
mesh_obj = bpy.data.objects.new("MHR_Mesh", mesh_data)
bpy.context.collection.objects.link(mesh_obj)
verts, faces = [], []
with open(mesh_obj_path, 'r') as f:
for line in f:
if line.startswith('v '):
v = [float(x) for x in line.split()[1:]]
verts.append(fix_coords(v))
elif line.startswith('f '):
f_indices = [int(x.split('/')[0]) - 1 for x in line.split()[1:]]
faces.append(f_indices)
mesh_data.from_pydata(verts, [], faces)
mesh_data.update()
# Armature
arm_data = bpy.data.armatures.new("MHR_Armature")
arm_obj = bpy.data.objects.new("MHR_Armature", arm_data)
bpy.context.collection.objects.link(arm_obj)
bpy.context.view_layer.objects.active = arm_obj
bpy.ops.object.mode_set(mode='EDIT')
bones = []
for i, name in enumerate(joint_names):
bone = arm_data.edit_bones.new(name)
bone.head = fix_coords(rest_transforms[i][0:3])
bones.append(bone)
for i, p in enumerate(joint_parents):
if p != -1: bones[i].parent = bones[p]
for i, bone in enumerate(bones):
children = [j for j, p in enumerate(joint_parents) if p == i]
if children:
bone.tail = fix_coords(rest_transforms[children[0]][0:3])
if (bone.tail - bone.head).length < 1e-4: bone.tail += mathutils.Vector((0, 0, 0.01))
else:
if bone.parent:
direction = bone.head - bone.parent.head
bone.tail = bone.head + (direction.normalized() * 0.05 if direction.length > 1e-4 else mathutils.Vector((0, 0, 0.05)))
else: bone.tail = bone.head + mathutils.Vector((0, 0, 0.05))
bpy.ops.object.mode_set(mode='OBJECT')
# Weights
for name in joint_names: mesh_obj.vertex_groups.new(name=name)
for v_idx, v_weights in enumerate(all_weights):
for j_idx, weight in v_weights:
if weight > 0: mesh_obj.vertex_groups[joint_names[j_idx]].add([v_idx], weight, 'REPLACE')
mesh_obj.parent = arm_obj
modifier = mesh_obj.modifiers.new(name="Armature", type='ARMATURE')
modifier.object = arm_obj
# Apply Poses
pose_files = sorted([f for f in os.listdir(pose_dir) if f.endswith('.json')])
bpy.ops.object.mode_set(mode='POSE')
for frame_idx, pf in enumerate(pose_files):
bpy.context.scene.frame_set(frame_idx + 1)
with open(os.path.join(pose_dir, pf), "r") as f:
pose_transforms = json.load(f)["transforms"]
for i, name in enumerate(joint_names):
bone = arm_obj.pose.bones.get(name)
if not bone: continue
m_rest = get_mhr_matrix(rest_transforms[i])
m_pose = get_mhr_matrix(pose_transforms[i])
delta_mhr = m_pose @ m_rest.inverted()
delta_b = F @ delta_mhr @ F.inverted()
target_world_matrix = delta_b @ bone.bone.matrix_local
if joint_parents[i] == -1:
bone.matrix = target_world_matrix
else:
bpy.context.view_layer.update()
new_mat = target_world_matrix.copy()
new_mat.translation = bone.matrix.to_translation()
bone.matrix = new_mat
bpy.context.view_layer.update()
bone.keyframe_insert(data_path="location")
bone.keyframe_insert(data_path="rotation_quaternion")
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.wm.save_as_mainfile(filepath=output_blend)
print(f"Saved {output_blend}")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--skeleton")
parser.add_argument("--mesh")
parser.add_argument("--poses")
parser.add_argument("--out")
# Blender passes arguments after '--' to the script
argv = sys.argv[sys.argv.index("--") + 1:]
args = parser.parse_args(argv)
# Delete default cube
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete(use_global=False)
import_and_pose_mhr(args.skeleton, args.mesh, args.poses, args.out)

CEBSam3d — Two Motion Capture Pipelines

Both pipelines run 100% on the 3090 workstation (192.168.1.143). Your Mac does zero heavy lifting — it only sends the command and collects the finished .mp4.


Option A: High-Fidelity 3D Pipeline (Headless Blender)

What it does: Extracts the raw mathematical skeleton from the video, builds a real 3D rig with a weighted mesh, and renders a studio-lit silver mannequin video using Blender's EEVEE engine — all headlessly on the 3090.

Output: A production-quality .mp4 with controllable camera, lighting, and materials. Also produces a .blend file if you need to tweak anything.

When to use it: When you need the cleanest, most professional motion reference for Kling 3.0, or when you want to change camera angles, lighting, or export the skeleton to Unreal Engine.

Trade-off: Takes a bit longer due to full 3D rendering (~8-10 minutes total).

Pipeline steps:

  1. Extract frames from video → sync to 3090
  2. SAM 3D Body inference on 3090 (DINOv3 + MHR pose extraction)
  3. Blender MCP Server builds the scene headlessly (armature, mesh, weights, 300 keyframes)
  4. EEVEE renders PNG frames → ffmpeg compiles to H.264 MP4
  5. Final .mp4 transferred back to Mac

Key scripts:

  • run_option_a_mocap.sh — orchestrator
  • remote_wrapper.py — runs on 3090, handles SAM3D inference + MHR extraction
  • build_mhr_scene.py — runs inside 3090's Blender, builds the rigged scene
  • remote_mcp_render_client.py — sends render commands to 3090's Blender MCP server

Option B: Lightning-Fast Pixel Pipeline (ComfyUI)

What it does: Runs the video through ComfyUI on the 3090, using AI to paint the grey mannequin directly over the original pixels frame-by-frame. Skips all 3D math entirely.

Output: A single isolated mesh video (grey mannequin on black background).

When to use it: When you need a quick motion reference video to throw into Kling 3.0 immediately and don't need camera control.

Trade-off: It's "flat" 2D — you can't rotate the camera or export the skeleton. But it's blazing fast.

Pipeline steps:

  1. Upload video to 3090
  2. ComfyUI API triggers SAM 3D Body node with render_mode=mesh_only
  3. Isolated mesh video rendered directly
  4. Final .mp4 transferred back to Mac

Key scripts:

  • run_kling_mocap.sh — orchestrator
  • sam3d_comfy_api.py — sends workflow to ComfyUI API with configurable render mode

Render modes available:

  • mesh_only — isolated grey mannequin (default, recommended)
  • side_by_side — 3-way split (original | mask | overlay) for debugging
  • mask_only — just the silhouette mask
  • overlay — mannequin overlaid on original video

Quick Reference

Option A (Blender) Option B (ComfyUI)
Speed ~8-10 min ~3-5 min
Output quality Studio-lit 3D render AI pixel paint
Camera control ✅ Full 3D ❌ Fixed
Exportable rig ✅ .blend / Unreal ❌ No
Compute 100% 3090 100% 3090
Best for Final production ref Quick iteration
{"id":"0ee524cd-7bc8-46f2-8751-32399825f67b","revision":0,"last_node_id":39,"last_link_id":45,"nodes":[{"id":18,"type":"SAM3DBodyExportMultipleFBX","pos":[222.38422292904016,-227.90952348436795],"size":[472.8209914560281,82],"flags":{},"order":5,"mode":0,"inputs":[{"localized_name":"multi_mesh_data","name":"multi_mesh_data","type":"SAM3D_MULTI_OUTPUT","link":23},{"localized_name":"output_filename","name":"output_filename","type":"STRING","widget":{"name":"output_filename"},"link":null},{"localized_name":"combine","name":"combine","type":"BOOLEAN","widget":{"name":"combine"},"link":null}],"outputs":[{"localized_name":"fbx_path","name":"fbx_path","type":"STRING","links":[37]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"0cb5527a152b90d4cfa1d2389f47997ca701d141","Node name for S&R":"SAM3DBodyExportMultipleFBX"},"widgets_values":["sam3d_multi_rigged.fbx",true]},{"id":29,"type":"Preview3D","pos":[850.0933104762935,-260.723864792151],"size":[400,550],"flags":{},"order":7,"mode":0,"inputs":[{"localized_name":"camera_info","name":"camera_info","shape":7,"type":"LOAD3D_CAMERA","link":null},{"localized_name":"bg_image","name":"bg_image","shape":7,"type":"IMAGE","link":null},{"localized_name":"model_file","name":"model_file","type":"STRING,FILE_3D_GLB,FILE_3D_GLTF,FILE_3D_FBX,FILE_3D_OBJ,FILE_3D_STL,FILE_3D_USDZ,FILE_3D","widget":{"name":"model_file"},"link":37},{"localized_name":"image","name":"image","type":"PREVIEW_3D","widget":{"name":"image"},"link":null}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.6.0","Node name for S&R":"Preview3D","Camera Config":{"cameraType":"perspective","fov":75,"state":{"position":{"x":-1.1032997283145334,"y":3.2156095975937475,"z":4.408151459126793},"target":{"x":0,"y":1.290530645611945,"z":0},"zoom":1,"cameraType":"perspective"}},"Last Time Model File":"sam3d_multi_rigged.fbx","Scene Config":{"showGrid":true,"backgroundColor":"#282828","backgroundImage":"","backgroundRenderMode":"tiled"},"Light Config":{"intensity":3}},"widgets_values":["",""]},{"id":36,"type":"MultibandToImage","pos":[-175.9559301211466,-114.33880857459438],"size":[270,58],"flags":{},"order":2,"mode":0,"inputs":[{"localized_name":"multiband","name":"multiband","type":"MULTIBAND_IMAGE","link":42},{"localized_name":"channels","name":"channels","shape":7,"type":"STRING","widget":{"name":"channels"},"link":null}],"outputs":[{"localized_name":"image","name":"image","type":"IMAGE","links":[43]}],"properties":{"cnr_id":"comfyui-multiband","ver":"9b51a21173c154aa9adae9e8b76db5e82f87f657","Node name for S&R":"MultibandToImage","aux_id":"PozzettiAndrea/ComfyUI-Multiband"},"widgets_values":["0,1,2"]},{"id":16,"type":"SAM3DBodyProcessMultiple","pos":[310.13806988037925,-51.40382894461709],"size":[294.4166015625,182],"flags":{},"order":4,"mode":0,"inputs":[{"localized_name":"model","name":"model","type":"SAM3D_MODEL","link":32},{"localized_name":"image","name":"image","type":"IMAGE","link":43},{"localized_name":"masks","name":"masks","type":"MASK","link":45},{"localized_name":"depth_map","name":"depth_map","shape":7,"type":"IMAGE","link":null},{"localized_name":"intrinsics","name":"intrinsics","shape":7,"type":"INTRINSICS","link":null},{"localized_name":"depth_confidence","name":"depth_confidence","shape":7,"type":"IMAGE","link":null},{"localized_name":"inference_type","name":"inference_type","shape":7,"type":"COMBO","widget":{"name":"inference_type"},"link":null},{"localized_name":"adjust_position_from_depth","name":"adjust_position_from_depth","shape":7,"type":"BOOLEAN","widget":{"name":"adjust_position_from_depth"},"link":null}],"outputs":[{"localized_name":"multi_mesh_data","name":"multi_mesh_data","type":"SAM3D_MULTI_OUTPUT","links":[23]},{"localized_name":"preview","name":"preview","type":"IMAGE","links":[21]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"0cb5527a152b90d4cfa1d2389f47997ca701d141","Node name for S&R":"SAM3DBodyProcessMultiple"},"widgets_values":["full",false]},{"id":39,"type":"MultibandToMasks","pos":[-171.71536834138104,57.60299660377296],"size":[272.62109375,82],"flags":{},"order":3,"mode":0,"inputs":[{"localized_name":"multiband","name":"multiband","type":"MULTIBAND_IMAGE","link":44},{"localized_name":"channels","name":"channels","shape":7,"type":"STRING","widget":{"name":"channels"},"link":null},{"localized_name":"auto_detect_masks","name":"auto_detect_masks","shape":7,"type":"BOOLEAN","widget":{"name":"auto_detect_masks"},"link":null}],"outputs":[{"localized_name":"masks","name":"masks","type":"MASK","links":[45]}],"properties":{"cnr_id":"comfyui-multiband","ver":"fc620d0d8eee86ab8dc75990df5f0a24e35dbdb3","Node name for S&R":"MultibandToMasks","aux_id":"PozzettiAndrea/ComfyUI-Multiband"},"widgets_values":["0",true]},{"id":35,"type":"MultibandLoad","pos":[-504.6900417936712,25.823169588315913],"size":[270,122],"flags":{},"order":0,"mode":0,"inputs":[{"localized_name":"image","name":"image","type":"COMBO","widget":{"name":"image"},"link":null},{"localized_name":"normalize","name":"normalize","shape":7,"type":"BOOLEAN","widget":{"name":"normalize"},"link":null}],"outputs":[{"localized_name":"multiband","name":"multiband","type":"MULTIBAND_IMAGE","links":[42,44]},{"localized_name":"num_channels","name":"num_channels","type":"INT","links":null},{"localized_name":"channel_names","name":"channel_names","type":"STRING","links":null}],"properties":{"cnr_id":"comfyui-multiband","ver":"9b51a21173c154aa9adae9e8b76db5e82f87f657","Node name for S&R":"MultibandLoad","aux_id":"PozzettiAndrea/ComfyUI-Multiband"},"widgets_values":["people.npz",true]},{"id":4,"type":"PreviewImage","pos":[534.5683873065636,151.11197305797026],"size":[216.20494921875002,289.44467187500004],"flags":{},"order":6,"mode":0,"inputs":[{"localized_name":"images","name":"images","type":"IMAGE","link":21}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.3.70","Node name for S&R":"PreviewImage"},"widgets_values":[]},{"id":24,"type":"LoadSAM3DBodyModel","pos":[-181.73971384816508,-313.53856242252346],"size":[273.7994140625,106],"flags":{},"order":1,"mode":0,"inputs":[{"localized_name":"attn_backend","name":"attn_backend","type":"COMBO","widget":{"name":"attn_backend"},"link":null},{"localized_name":"precision","name":"precision","type":"COMBO","widget":{"name":"precision"},"link":null}],"outputs":[{"localized_name":"model","name":"model","type":"SAM3D_MODEL","links":[32]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"5fef822c291d4a440e8392acdc40fa3d93a0820d","Node name for S&R":"LoadSAM3DBodyModel"},"widgets_values":["auto","fp32"]}],"links":[[21,16,1,4,0,"IMAGE"],[23,16,0,18,0,"SAM3D_MULTI_OUTPUT"],[32,24,0,16,0,"SAM3D_MODEL"],[37,18,0,29,2,"STRING"],[42,35,0,36,0,"MULTIBAND_IMAGE"],[43,36,0,16,1,"IMAGE"],[44,35,0,39,0,"MULTIBAND_IMAGE"],[45,39,0,16,2,"MASK"]],"groups":[],"config":{},"extra":{"ds":{"scale":0.7620673308696438,"offset":[683.384247913206,617.7638838063901]},"frontendVersion":"1.35.9","workflowRendererVersion":"LG"},"version":0.4}
{"id":"0ee524cd-7bc8-46f2-8751-32399825f67b","revision":0,"last_node_id":60,"last_link_id":66,"nodes":[{"id":24,"type":"LoadSAM3DBodyModel","pos":[-181.73971384816508,-313.53856242252346],"size":[273.7994140625,58],"flags":{},"order":0,"mode":0,"inputs":[],"outputs":[{"localized_name":"model","name":"model","type":"SAM3D_MODEL","links":[32]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"5fef822c291d4a440e8392acdc40fa3d93a0820d","Node name for S&R":"LoadSAM3DBodyModel","aux_id":"PozzettiAndrea/ComfyUI-SAM3DBody"},"widgets_values":["auto","fp32"]},{"id":35,"type":"MultibandLoad","pos":[-504.6900417936712,25.823169588315913],"size":[270,122],"flags":{},"order":1,"mode":0,"inputs":[{"localized_name":"image","name":"image","type":"COMBO","widget":{"name":"image"},"link":null},{"localized_name":"normalize","name":"normalize","shape":7,"type":"BOOLEAN","widget":{"name":"normalize"},"link":null}],"outputs":[{"localized_name":"multiband","name":"multiband","type":"MULTIBAND_IMAGE","links":[42,44]},{"localized_name":"num_channels","name":"num_channels","type":"INT","links":null},{"localized_name":"channel_names","name":"channel_names","type":"STRING","links":null}],"properties":{"cnr_id":"comfyui-multiband","ver":"9b51a21173c154aa9adae9e8b76db5e82f87f657","Node name for S&R":"MultibandLoad","aux_id":"PozzettiAndrea/ComfyUI-Multiband"},"widgets_values":["fellowship.npz",true]},{"id":18,"type":"SAM3DBodyExportMultipleFBX","pos":[222.38422292904016,-227.90952348436795],"size":[472.8209914560281,82],"flags":{},"order":13,"mode":0,"inputs":[{"localized_name":"multi_mesh_data","name":"multi_mesh_data","type":"SAM3D_MULTI_OUTPUT","link":23},{"localized_name":"output_filename","name":"output_filename","type":"STRING","widget":{"name":"output_filename"},"link":null},{"localized_name":"combine","name":"combine","type":"BOOLEAN","widget":{"name":"combine"},"link":null}],"outputs":[{"localized_name":"fbx_path","name":"fbx_path","type":"STRING","links":[37]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"0cb5527a152b90d4cfa1d2389f47997ca701d141","Node name for S&R":"SAM3DBodyExportMultipleFBX","aux_id":"PozzettiAndrea/ComfyUI-SAM3DBody"},"widgets_values":["sam3d_multi_rigged.fbx",true]},{"id":39,"type":"MultibandToMasks","pos":[-171.71536834138104,57.60299660377296],"size":[272.62109375,82],"flags":{},"order":6,"mode":0,"inputs":[{"localized_name":"multiband","name":"multiband","type":"MULTIBAND_IMAGE","link":44},{"localized_name":"channels","name":"channels","shape":7,"type":"STRING","widget":{"name":"channels"},"link":null},{"localized_name":"auto_detect_masks","name":"auto_detect_masks","shape":7,"type":"BOOLEAN","widget":{"name":"auto_detect_masks"},"link":null}],"outputs":[{"localized_name":"masks","name":"masks","type":"MASK","links":[45,52]}],"properties":{"cnr_id":"comfyui-multiband","ver":"fc620d0d8eee86ab8dc75990df5f0a24e35dbdb3","Node name for S&R":"MultibandToMasks","aux_id":"PozzettiAndrea/ComfyUI-Multiband"},"widgets_values":["0",true]},{"id":36,"type":"MultibandToImage","pos":[-175.9559301211466,-114.33880857459438],"size":[270,58],"flags":{},"order":5,"mode":0,"inputs":[{"localized_name":"multiband","name":"multiband","type":"MULTIBAND_IMAGE","link":42},{"localized_name":"channels","name":"channels","shape":7,"type":"STRING","widget":{"name":"channels"},"link":null}],"outputs":[{"localized_name":"image","name":"image","type":"IMAGE","links":[43,46,53]}],"properties":{"cnr_id":"comfyui-multiband","ver":"9b51a21173c154aa9adae9e8b76db5e82f87f657","Node name for S&R":"MultibandToImage","aux_id":"PozzettiAndrea/ComfyUI-Multiband"},"widgets_values":["0,1,2"]},{"id":44,"type":"PreviewImage","pos":[174.37508842241317,-544.3421567130846],"size":[521.2849601956311,258],"flags":{},"order":8,"mode":0,"inputs":[{"localized_name":"images","name":"images","type":"IMAGE","link":53}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.10.0","Node name for S&R":"PreviewImage"},"widgets_values":[]},{"id":41,"type":"DownloadAndLoadDepthAnythingV3Model","pos":[-635.6202459821125,266.5827061019216],"size":[353.103125,82],"flags":{},"order":2,"mode":0,"inputs":[{"localized_name":"model","name":"model","type":"COMBO","widget":{"name":"model"},"link":null},{"localized_name":"precision","name":"precision","shape":7,"type":"COMBO","widget":{"name":"precision"},"link":null}],"outputs":[{"localized_name":"da3_model","name":"da3_model","type":"DA3MODEL","links":[47]}],"properties":{"cnr_id":"comfyui-depthanythingv3","ver":"0.1.0","Node name for S&R":"DownloadAndLoadDepthAnythingV3Model","aux_id":"PozzettiAndrea/ComfyUI-DepthAnythingV3"},"widgets_values":["da3_giant.safetensors","auto"]},{"id":16,"type":"SAM3DBodyProcessMultiple","pos":[310.13806988037925,-51.40382894461709],"size":[294.4166015625,182],"flags":{},"order":11,"mode":0,"inputs":[{"localized_name":"model","name":"model","type":"SAM3D_MODEL","link":32},{"localized_name":"image","name":"image","type":"IMAGE","link":43},{"localized_name":"masks","name":"masks","type":"MASK","link":45},{"localized_name":"depth_map","name":"depth_map","shape":7,"type":"IMAGE","link":48},{"localized_name":"intrinsics","name":"intrinsics","shape":7,"type":"INTRINSICS","link":50},{"localized_name":"depth_confidence","name":"depth_confidence","shape":7,"type":"IMAGE","link":49},{"localized_name":"inference_type","name":"inference_type","shape":7,"type":"COMBO","widget":{"name":"inference_type"},"link":null},{"localized_name":"adjust_position_from_depth","name":"adjust_position_from_depth","shape":7,"type":"BOOLEAN","widget":{"name":"adjust_position_from_depth"},"link":null}],"outputs":[{"localized_name":"multi_mesh_data","name":"multi_mesh_data","type":"SAM3D_MULTI_OUTPUT","links":[23]},{"localized_name":"preview","name":"preview","type":"IMAGE","links":[21]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"0cb5527a152b90d4cfa1d2389f47997ca701d141","Node name for S&R":"SAM3DBodyProcessMultiple","aux_id":"PozzettiAndrea/ComfyUI-SAM3DBody"},"widgets_values":["full",true]},{"id":43,"type":"MaskPreview","pos":[117.97631015629814,241.15343269485032],"size":[281.5215170556271,258],"flags":{},"order":9,"mode":0,"inputs":[{"localized_name":"mask","name":"mask","type":"MASK","link":52}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.10.0","Node name for S&R":"MaskPreview"},"widgets_values":[]},{"id":58,"type":"LoadImageDataSetFromFolder","pos":[558.8072020347307,177.2890386938821],"size":[312.1490234375,58],"flags":{},"order":3,"mode":0,"inputs":[{"localized_name":"folder","name":"folder","type":"COMBO","widget":{"name":"folder"},"link":null}],"outputs":[{"localized_name":"images","name":"images","shape":6,"type":"IMAGE","links":null}],"properties":{"cnr_id":"comfy-core","ver":"0.10.0","Node name for S&R":"LoadImageDataSetFromFolder"},"widgets_values":["3d"]},{"id":40,"type":"DepthAnything_V3","pos":[-239.70519251553551,249.89808418797932],"size":[302.296875,330],"flags":{},"order":7,"mode":0,"inputs":[{"localized_name":"da3_model","name":"da3_model","type":"DA3MODEL","link":47},{"localized_name":"images","name":"images","type":"IMAGE","link":46},{"localized_name":"camera_params","name":"camera_params","shape":7,"type":"CAMERA_PARAMS","link":null},{"localized_name":"normalization_mode","name":"normalization_mode","type":"COMBO","widget":{"name":"normalization_mode"},"link":null},{"localized_name":"resize_method","name":"resize_method","shape":7,"type":"COMBO","widget":{"name":"resize_method"},"link":null},{"localized_name":"invert_depth","name":"invert_depth","shape":7,"type":"BOOLEAN","widget":{"name":"invert_depth"},"link":null},{"localized_name":"keep_model_size","name":"keep_model_size","shape":7,"type":"BOOLEAN","widget":{"name":"keep_model_size"},"link":null}],"outputs":[{"localized_name":"depth","name":"depth","type":"IMAGE","links":[48]},{"localized_name":"confidence","name":"confidence","type":"IMAGE","links":[49]},{"localized_name":"resized_rgb_image","name":"resized_rgb_image","type":"IMAGE","links":null},{"localized_name":"ray_origin","name":"ray_origin","type":"IMAGE","links":null},{"localized_name":"ray_direction","name":"ray_direction","type":"IMAGE","links":null},{"localized_name":"extrinsics_json","name":"extrinsics_json","type":"STRING","links":null},{"localized_name":"intrinsics_json","name":"intrinsics_json","type":"STRING","links":null},{"localized_name":"sky_mask","name":"sky_mask","type":"MASK","links":null},{"localized_name":"extrinsics","name":"extrinsics","type":"EXTRINSICS","links":[65]},{"localized_name":"intrinsics","name":"intrinsics","type":"INTRINSICS","links":[50,54]},{"localized_name":"gaussian_ply_path","name":"gaussian_ply_path","type":"STRING","links":null}],"properties":{"cnr_id":"comfyui-depthanythingv3","ver":"0.1.0","Node name for S&R":"DepthAnything_V3","aux_id":"PozzettiAndrea/ComfyUI-DepthAnythingV3"},"widgets_values":["Raw","resize",false,false]},{"id":59,"type":"PreviewAny","pos":[-177.21437602186086,698.0069656707296],"size":[210,166],"flags":{},"order":10,"mode":0,"inputs":[{"localized_name":"source","name":"source","type":"*","link":65}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.10.0","Node name for S&R":"PreviewAny"},"widgets_values":[null,null,null]},{"id":46,"type":"PreviewAny","pos":[165.71577003435993,579.4866249672091],"size":[210,166],"flags":{},"order":12,"mode":0,"inputs":[{"localized_name":"source","name":"source","type":"*","link":54}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.10.0","Node name for S&R":"PreviewAny"},"widgets_values":[null,null,null]},{"id":60,"type":"CreateLoad3DCamera","pos":[612.3345865759417,-151.8541041840194],"size":[270,226],"flags":{},"order":4,"mode":0,"inputs":[{"localized_name":"pos_x","name":"pos_x","type":"FLOAT","widget":{"name":"pos_x"},"link":null},{"localized_name":"pos_y","name":"pos_y","type":"FLOAT","widget":{"name":"pos_y"},"link":null},{"localized_name":"pos_z","name":"pos_z","type":"FLOAT","widget":{"name":"pos_z"},"link":null},{"localized_name":"target_x","name":"target_x","type":"FLOAT","widget":{"name":"target_x"},"link":null},{"localized_name":"target_y","name":"target_y","type":"FLOAT","widget":{"name":"target_y"},"link":null},{"localized_name":"target_z","name":"target_z","type":"FLOAT","widget":{"name":"target_z"},"link":null},{"localized_name":"zoom","name":"zoom","type":"INT","widget":{"name":"zoom"},"link":null},{"localized_name":"camera_type","name":"camera_type","type":"COMBO","widget":{"name":"camera_type"},"link":null}],"outputs":[{"localized_name":"camera_info","name":"camera_info","type":"LOAD3D_CAMERA","links":[66]}],"properties":{"cnr_id":"comfyui-camerapack","ver":"5dceede2dff0d3415ee5531f5f038bbe2f9854fa","Node name for S&R":"CreateLoad3DCamera"},"widgets_values":[3,5.1,8,-0.5,2.1,0.4,3,"perspective"]},{"id":29,"type":"Preview3D","pos":[913.3440846489831,-281.64493985574006],"size":[724.8980177011981,433.9362874144434],"flags":{},"order":15,"mode":0,"inputs":[{"localized_name":"camera_info","name":"camera_info","shape":7,"type":"LOAD3D_CAMERA","link":66},{"localized_name":"bg_image","name":"bg_image","shape":7,"type":"IMAGE","link":null},{"localized_name":"model_file","name":"model_file","type":"STRING","widget":{"name":"model_file"},"link":37},{"localized_name":"image","name":"image","type":"PREVIEW_3D","widget":{"name":"image"},"link":null}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.6.0","Node name for S&R":"Preview3D","Camera Config":{"cameraType":"perspective","fov":73,"state":{"position":{"x":0,"y":0,"z":0},"target":{"x":0,"y":0,"z":0},"zoom":50,"cameraType":"perspective"}},"Last Time Model File":"sam3d_multi_rigged.fbx","Scene Config":{"showGrid":true,"backgroundColor":"#282828","backgroundImage":"","backgroundRenderMode":"tiled"},"Light Config":{"intensity":2}},"widgets_values":["",""]},{"id":4,"type":"PreviewImage","pos":[464.1940613863452,305.263977592208],"size":[475.647173301051,258],"flags":{},"order":14,"mode":0,"inputs":[{"localized_name":"images","name":"images","type":"IMAGE","link":21}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.3.70","Node name for S&R":"PreviewImage"},"widgets_values":[]}],"links":[[21,16,1,4,0,"IMAGE"],[23,16,0,18,0,"SAM3D_MULTI_OUTPUT"],[32,24,0,16,0,"SAM3D_MODEL"],[37,18,0,29,2,"STRING"],[42,35,0,36,0,"MULTIBAND_IMAGE"],[43,36,0,16,1,"IMAGE"],[44,35,0,39,0,"MULTIBAND_IMAGE"],[45,39,0,16,2,"MASK"],[46,36,0,40,1,"IMAGE"],[47,41,0,40,0,"DA3MODEL"],[48,40,0,16,3,"IMAGE"],[49,40,1,16,5,"IMAGE"],[50,40,9,16,4,"INTRINSICS"],[52,39,0,43,0,"MASK"],[53,36,0,44,0,"IMAGE"],[54,40,9,46,0,"INTRINSICS"],[65,40,8,59,0,"EXTRINSICS"],[66,60,0,29,0,"LOAD3D_CAMERA"]],"groups":[],"config":{},"extra":{"ds":{"scale":0.6323843630627971,"offset":[646.8775660882231,647.470418399589]},"frontendVersion":"1.35.9","workflowRendererVersion":"LG"},"version":0.4}
import bpy
import mathutils
# 1. Clear everything
for o in bpy.data.objects:
if o.type in ['CAMERA', 'LIGHT', 'MESH'] and o.name not in ['MHR_Mesh', 'MHR_Armature']:
bpy.data.objects.remove(o, do_unlink=True)
mesh = bpy.data.objects.get("MHR_Mesh")
bpy.context.view_layer.objects.active = mesh
# 2. Add Camera and Point it at the character
cam_data = bpy.data.cameras.new("ProofCam")
cam_obj = bpy.data.objects.new("ProofCam", cam_data)
bpy.context.collection.objects.link(cam_obj)
bpy.context.scene.camera = cam_obj
# Position it far away and high up
cam_obj.location = (0, -300, 100)
# Point at the mesh
direction = mesh.location - cam_obj.location
rot_quat = direction.to_track_quat('-Z', 'Y')
cam_obj.rotation_euler = rot_quat.to_euler()
# 3. Add light
light_data = bpy.data.lights.new(name="Sun", type='SUN')
light_obj = bpy.data.objects.new(name="Sun", object_data=light_data)
bpy.context.collection.objects.link(light_obj)
light_obj.location = (50, -50, 100)
# 4. Render ONE frame
bpy.context.scene.render.image_settings.file_format = 'PNG'
bpy.context.scene.render.filepath = "/Users/speed/CEBSam3d/PROOFOFLIFE.png"
bpy.ops.render.render(write_still=True)
print("PROOFOFLIFE RENDERED")
import socket
import json
import sys
def run_remote_mcp(host, port, payload_code):
req = {
"type": "execute",
"code": payload_code,
"strict_json": False
}
req_bytes = json.dumps(req).encode('utf-8') + b'\0'
print(f"Connecting to Blender MCP on {host}:{port}...")
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(300) # 5 minutes timeout for rendering/building
s.connect((host, port))
print("Connected! Sending payload...")
s.sendall(req_bytes)
# Read until null byte
buf = bytearray()
while b'\0' not in buf:
chunk = s.recv(4096)
if not chunk:
break
buf.extend(chunk)
if b'\0' in buf:
resp_data = buf[:buf.index(b'\0')]
response = json.loads(resp_data.decode('utf-8'))
print("\n--- MCP SERVER RESPONSE ---")
print(f"Status: {response.get('status')}")
if 'stdout' in response and response['stdout']:
print("\nSTDOUT:")
print(response['stdout'])
if 'stderr' in response and response['stderr']:
print("\nSTDERR:")
print(response['stderr'])
if response.get('status') == 'error':
print("\nERROR MESSAGE:")
print(response.get('message'))
else:
print("Did not receive a complete response.")
except Exception as e:
print(f"Connection failed: {e}")
finally:
s.close()
if __name__ == "__main__":
code = """
import sys
sys.argv = [
'blender', '--',
'--skeleton', '/home/straughter/sam-3d-body/temp_output/mhr_skeleton.json',
'--mesh', '/home/straughter/sam-3d-body/temp_output/mhr_mesh.obj',
'--poses', '/home/straughter/sam-3d-body/temp_output/poses',
'--out', '/home/straughter/sam-3d-body/temp_output/Option_A_Kpop.blend'
]
file_path = '/home/straughter/sam-3d-body/build_mhr_scene.py'
with open(file_path, 'r') as f:
exec(f.read(), {'__name__': '__main__'})
result = {"message": "Scene built successfully"}
"""
run_remote_mcp("192.168.1.143", 8199, code)
import socket
import json
import sys
def run_remote_mcp(host, port, payload_code):
req = {
"type": "execute",
"code": payload_code,
"strict_json": False
}
req_bytes = json.dumps(req).encode('utf-8') + b'\0'
print(f"Connecting to Blender MCP on {host}:{port}...")
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(600) # 10 minutes timeout for rendering
s.connect((host, port))
print("Connected! Sending render payload...")
s.sendall(req_bytes)
# Read until null byte
buf = bytearray()
while b'\0' not in buf:
chunk = s.recv(4096)
if not chunk:
break
buf.extend(chunk)
if b'\0' in buf:
resp_data = buf[:buf.index(b'\0')]
response = json.loads(resp_data.decode('utf-8'))
print("\n--- MCP SERVER RESPONSE ---")
print(f"Status: {response.get('status')}")
if 'stdout' in response and response['stdout']:
print("\nSTDOUT:")
print(response['stdout'])
if 'stderr' in response and response['stderr']:
print("\nSTDERR:")
print(response['stderr'])
if response.get('status') == 'error':
print("\nERROR MESSAGE:")
print(response.get('message'))
else:
print("Did not receive a complete response.")
except Exception as e:
print(f"Connection failed: {e}")
finally:
s.close()
if __name__ == "__main__":
code = """
import bpy
import math
import mathutils
# Load the file deterministically
bpy.ops.wm.open_mainfile(filepath="/home/straughter/sam-3d-body/temp_output/Option_A_Kpop.blend")
# 1. Clean up existing camera/lights/plane
for obj in bpy.data.objects:
if obj.type in ['CAMERA', 'LIGHT', 'MESH'] and obj.name in ['RenderCamera', 'SunLight', 'Plane']:
bpy.data.objects.remove(obj, do_unlink=True)
# 2. Find the character mesh
mesh_obj = bpy.data.objects.get("MHR_Mesh")
arm_obj = bpy.data.objects.get("MHR_Armature")
if not mesh_obj or not arm_obj:
print("ERROR: MHR_Mesh or MHR_Armature not found!")
result = {"status": "error", "message": "Mesh/Armature not found"}
else:
# 3. Dynamic frame range based on animation data
if arm_obj.animation_data and arm_obj.animation_data.action:
action = arm_obj.animation_data.action
frame_end = int(action.frame_range[1])
else:
frame_end = 300
# 4. Materials (Silver mannequin)
silver_mat = bpy.data.materials.new(name="Silver")
silver_mat.use_nodes = True
bsdf = silver_mat.node_tree.nodes["Principled BSDF"]
bsdf.inputs['Base Color'].default_value = (0.8, 0.8, 0.8, 1)
bsdf.inputs['Metallic'].default_value = 0.8
bsdf.inputs['Roughness'].default_value = 0.3
if len(mesh_obj.data.materials) == 0:
mesh_obj.data.materials.append(silver_mat)
else:
mesh_obj.data.materials[0] = silver_mat
# World background (Solid Dark Gray)
bpy.data.worlds["World"].node_tree.nodes["Background"].inputs[0].default_value = (0.05, 0.05, 0.05, 1)
# Calculate Bounding Box
bpy.context.view_layer.update()
bbox = [mesh_obj.matrix_world @ mathutils.Vector(corner) for corner in mesh_obj.bound_box]
min_x = min(v.x for v in bbox)
max_x = max(v.x for v in bbox)
min_y = min(v.y for v in bbox)
max_y = max(v.y for v in bbox)
min_z = min(v.z for v in bbox)
max_z = max(v.z for v in bbox)
center = ((min_x + max_x) / 2, (min_y + max_y) / 2, (min_z + max_z) / 2)
height = max_z - min_z
width = max(max_x - min_x, max_y - min_y)
# Position Camera
dist = max(height, width) * 2.5
camera_data = bpy.data.cameras.new("RenderCamera")
camera_obj = bpy.data.objects.new("RenderCamera", camera_data)
bpy.context.collection.objects.link(camera_obj)
camera_obj.location = (center[0], center[1] - dist, center[2])
camera_obj.rotation_euler = (math.radians(90), 0.0, 0.0)
bpy.context.scene.camera = camera_obj
# Add Lighting
light_data = bpy.data.lights.new(name="SunLight", type='SUN')
light_data.energy = 5.0
light_obj = bpy.data.objects.new(name="SunLight", object_data=light_data)
bpy.context.collection.objects.link(light_obj)
light_obj.location = (center[0] + 5, center[1] - 5, center[2] + height + 2)
light_data_fill = bpy.data.lights.new(name="FillLight", type='AREA')
light_data_fill.energy = 100.0
light_obj_fill = bpy.data.objects.new(name="FillLight", object_data=light_data_fill)
bpy.context.collection.objects.link(light_obj_fill)
light_obj_fill.location = (center[0] - 3, center[1] - dist/2, center[2])
light_obj_fill.rotation_euler = (math.radians(90), 0.0, math.radians(-45))
# Render Settings
scene = bpy.context.scene
scene.render.engine = 'BLENDER_EEVEE'
scene.render.image_settings.file_format = 'PNG'
scene.render.resolution_x = 1080
scene.render.resolution_y = 1920
scene.render.filepath = "/home/straughter/sam-3d-body/temp_output/frames/frame_"
scene.frame_start = 1
scene.frame_end = frame_end
print("Starting Headless Render...")
bpy.ops.render.render(animation=True)
print("Render complete!")
result = {"message": f"Rendered {frame_end} frames successfully", "output": scene.render.filepath}
"""
run_remote_mcp("192.168.1.143", 8199, code)
import os, sys, subprocess, json, pickle, torch
sam_dir = os.path.expanduser("~/sam-3d-body")
in_dir = f"{sam_dir}/temp_input"
out_dir = f"{sam_dir}/temp_output"
ckpt = "/home/straughter/ComfyUI/models/sam3dbody/model.ckpt"
mhr = "/home/straughter/ComfyUI/models/sam3dbody/assets/mhr_model.pt"
print(">> Running demo.py...")
subprocess.run([f"{sam_dir}/venv/bin/python", f"{sam_dir}/demo.py",
"--checkpoint_path", ckpt,
"--image_folder", in_dir,
"--output_folder", out_dir,
"--mhr_path", mhr], check=True)
print(">> Extracting MHR Base...")
model = torch.jit.load(mhr)
identity, pose, extra = torch.zeros(1, 45), torch.zeros(1, 204), torch.zeros(1, 72)
with torch.no_grad(): verts, skel_state = model(identity, pose, extra)
skeleton = model.character_torch.skeleton
transforms = skel_state[0].cpu().tolist()
lbs = model.character_torch.linear_blend_skinning
weights_per_vert = [[] for _ in range(verts.shape[1])]
for i in range(len(lbs.vert_indices_flattened)):
v, j, w = lbs.vert_indices_flattened[i], lbs.skin_indices_flattened[i], lbs.skin_weights_flattened[i]
weights_per_vert[v.item()].append((j.item(), w.item()))
skel_data = {"joint_names": list(skeleton.joint_names), "joint_parents": [int(p) for p in skeleton.joint_parents], "transforms": transforms, "weights": weights_per_vert}
with open(f"{out_dir}/mhr_skeleton.json", "w") as f: json.dump(skel_data, f)
verts_np, faces_np = verts[0].cpu().numpy(), model.character_torch.mesh.faces.cpu().numpy()
with open(f"{out_dir}/mhr_mesh.obj", "w") as f:
for v in verts_np: f.write(f"v {v[0]} {v[1]} {v[2]}\n")
for face in faces_np: f.write(f"f {face[0]+1} {face[1]+1} {face[2]+1}\n")
print(">> Extracting MHR Poses...")
pkl_files = sorted([f for f in os.listdir(out_dir) if f.endswith('.pkl')])
os.makedirs(f"{out_dir}/poses", exist_ok=True)
for pkl in pkl_files:
with open(f"{out_dir}/{pkl}", 'rb') as f: data = pickle.load(f)
pose_tensor = torch.from_numpy(data[0]['mhr_model_params']).unsqueeze(0)
with torch.no_grad(): _, skel = model(identity, pose_tensor, extra)
with open(f"{out_dir}/poses/{pkl.replace('.pkl', '.json')}", "w") as f:
json.dump({"transforms": skel[0].cpu().tolist()}, f)
print(">> Done.")
import bpy
import math
import mathutils
# 1. Clean up existing camera/lights
for obj in bpy.data.objects:
if obj.type in ['CAMERA', 'LIGHT', 'MESH'] and obj.name in ['RenderCamera', 'SunLight', 'Plane']:
bpy.data.objects.remove(obj, do_unlink=True)
# 2. Find the character mesh
mesh_obj = bpy.data.objects.get("MHR_Mesh")
if not mesh_obj:
print("ERROR: MHR_Mesh not found!")
exit()
# 3. Calculate Bounding Box to find the character's center and size
bbox = [mesh_obj.matrix_world @ mathutils.Vector(corner) for corner in mesh_obj.bound_box]
min_x = min(v.x for v in bbox)
max_x = max(v.x for v in bbox)
min_y = min(v.y for v in bbox)
max_y = max(v.y for v in bbox)
min_z = min(v.z for v in bbox)
max_z = max(v.z for v in bbox)
center = ((min_x + max_x) / 2, (min_y + max_y) / 2, (min_z + max_z) / 2)
height = max_z - min_z
width = max(max_x - min_x, max_y - min_y)
print(f"Character found at {center} with height {height} and width {width}")
# 4. Position Camera based on character size
dist = max(height, width) * 2.5
camera_data = bpy.data.cameras.new("RenderCamera")
camera_obj = bpy.data.objects.new("RenderCamera", camera_data)
bpy.context.collection.objects.link(camera_obj)
camera_obj.location = (center[0], center[1] - dist, center[2])
camera_obj.rotation_euler = (math.radians(90), 0.0, 0.0)
bpy.context.scene.camera = camera_obj
# 5. Add Lighting
light_data = bpy.data.lights.new(name="SunLight", type='SUN')
light_data.energy = 10.0
light_obj = bpy.data.objects.new(name="SunLight", object_data=light_data)
bpy.context.collection.objects.link(light_obj)
light_obj.location = (center[0] + 10, center[1] - 10, center[2] + height)
# 6. Add Ground Plane
bpy.ops.mesh.primitive_plane_add(size=dist*4, location=(center[0], center[1], min_z))
# 7. Render Settings
scene = bpy.context.scene
scene.render.engine = 'BLENDER_EEVEE_NEXT'
scene.render.image_settings.file_format = 'FFMPEG'
scene.render.ffmpeg.format = 'MPEG4'
scene.render.ffmpeg.codec = 'H264'
scene.render.filepath = "//Option_A_Rendered.mp4"
scene.frame_start = 1
scene.frame_end = 151
print("Starting Auto-Framed Render...")
bpy.ops.render.render(animation=True)
print("Render complete!")
{"id":"62ccaa08-0470-4297-b90a-653ea8045ec2","revision":0,"last_node_id":4,"last_link_id":2,"nodes":[{"id":2,"type":"SAM3DBodyPreviewRiggedMesh","pos":[423.20330913834636,-390.29866822590526],"size":[512,798],"flags":{},"order":1,"mode":0,"inputs":[{"localized_name":"fbx_output_path","name":"fbx_output_path","type":"STRING","widget":{"name":"fbx_output_path"},"link":2}],"outputs":[],"properties":{"aux_id":"PozzettiAndrea/ComfyUI-SAM3DBody","ver":"42fa5234c813bd69356499d27469f06fbdb8b0a0","Node name for S&R":"SAM3DBodyPreviewRiggedMesh","cnr_id":"ComfyUI-SAM3DBody"},"widgets_values":["",""]},{"id":4,"type":"SAM3DBodySelectMesh","pos":[-118.91450336165337,-347.0742541634052],"size":[308.683203125,82],"flags":{},"order":0,"mode":0,"inputs":[{"localized_name":"source_folder","name":"source_folder","type":"COMBO","widget":{"name":"source_folder"},"link":null},{"localized_name":"file_path","name":"file_path","type":"COMBO","widget":{"name":"file_path"},"link":null}],"outputs":[{"localized_name":"file_path","name":"file_path","type":"STRING","links":[2]}],"properties":{"aux_id":"PozzettiAndrea/ComfyUI-SAM3DBody","ver":"e803eec6cf27c47dea1cf4f6ef011c8a6011552b","Node name for S&R":"SAM3DBodySelectMesh","cnr_id":"ComfyUI-SAM3DBody"},"widgets_values":["input","sam3d_rigged.fbx"]}],"links":[[2,4,0,2,0,"STRING"]],"groups":[],"config":{},"extra":{"ds":{"scale":0.8264462809917354,"offset":[772.6595424241534,768.5796447884053]},"frontendVersion":"1.28.9","workflowRendererVersion":"LG"},"version":0.4}
{"id":"0ee524cd-7bc8-46f2-8751-32399825f67b","revision":0,"last_node_id":15,"last_link_id":19,"nodes":[{"id":3,"type":"LoadImage","pos":[-139.9653501897791,-168.41867158918637],"size":[274.080078125,314],"flags":{},"order":0,"mode":0,"inputs":[],"outputs":[{"name":"IMAGE","type":"IMAGE","links":[11]},{"name":"MASK","type":"MASK","links":[12]}],"properties":{"cnr_id":"comfy-core","ver":"0.3.70","Node name for S&R":"LoadImage"},"widgets_values":["masked_dancing.png","image"]},{"id":10,"type":"SAM3DBodyProcess","pos":[223.78491070084618,-388.3276916634053],"size":[288.9751953125,194],"flags":{},"order":2,"mode":0,"inputs":[{"name":"model","type":"SAM3D_MODEL","link":19},{"name":"image","type":"IMAGE","link":11},{"name":"mask","shape":7,"type":"MASK","link":12}],"outputs":[{"name":"mesh_data","type":"SAM3D_OUTPUT","links":[16]},{"name":"skeleton","type":"SKELETON","links":[]},{"name":"debug_image","type":"IMAGE","links":[14]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"e84b3445ce4879dbe66d5d60fe2be7a29f6f9518","Node name for S&R":"SAM3DBodyProcess","aux_id":"PozzettiAndrea/ComfyUI-SAM3DBody"},"widgets_values":[0.8,"full"]},{"id":4,"type":"PreviewImage","pos":[460.21531387272097,-87.02999971418635],"size":[140,246],"flags":{},"order":4,"mode":0,"inputs":[{"name":"images","type":"IMAGE","link":14}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.3.70","Node name for S&R":"PreviewImage"},"widgets_values":[]},{"id":12,"type":"SAM3DBodyExportFBX","pos":[567.5667075758462,-397.3081604134053],"size":[270,58],"flags":{},"order":3,"mode":0,"inputs":[{"name":"mesh_data","type":"SAM3D_OUTPUT","link":16}],"outputs":[{"name":"fbx_path","type":"STRING","links":[17]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"e84b3445ce4879dbe66d5d60fe2be7a29f6f9518","Node name for S&R":"SAM3DBodyExportFBX","aux_id":"PozzettiAndrea/ComfyUI-SAM3DBody"},"widgets_values":["sam3d_rigged.fbx"]},{"id":13,"type":"SAM3DBodyPreviewRiggedMesh","pos":[894.9851450758457,-393.91921510090526],"size":[512,798],"flags":{},"order":5,"mode":0,"inputs":[{"name":"fbx_output_path","type":"STRING","widget":{"name":"fbx_output_path"},"link":17}],"outputs":[],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"e84b3445ce4879dbe66d5d60fe2be7a29f6f9518","Node name for S&R":"SAM3DBodyPreviewRiggedMesh","aux_id":"PozzettiAndrea/ComfyUI-SAM3DBody"},"widgets_values":["",""]},{"id":15,"type":"LoadSAM3DBodyModel","pos":[-145.44942523665372,-381.6774182259052],"size":[273.7994140625,58],"flags":{},"order":1,"mode":0,"inputs":[],"outputs":[{"name":"model","type":"SAM3D_MODEL","links":[19]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"5fef822c291d4a440e8392acdc40fa3d93a0820d","Node name for S&R":"LoadSAM3DBodyModel"},"widgets_values":["auto","fp32"]}],"links":[[11,3,0,10,1,"IMAGE"],[12,3,1,10,2,"MASK"],[14,10,2,4,0,"IMAGE"],[16,10,0,12,0,"SAM3D_OUTPUT"],[17,12,0,13,0,"STRING"],[19,15,0,10,0,"SAM3D_MODEL"]],"groups":[],"config":{},"extra":{"ds":{"scale":0.7513148009015777,"offset":[632.6570509474147,694.5531213151539]},"frontendVersion":"1.35.9","workflowRendererVersion":"LG"},"version":0.4}
import urllib.request
import json
import time
# ComfyUI server address (3090 machine)
SERVER_ADDRESS = "192.168.1.143:8188"
CLIENT_ID = "kling_mocap_client"
def build_workflow(input_video_path, frame_load_cap=0, output_filename="kling_ref_video"):
prompt = {
"1": {
"class_type": "VHS_LoadVideo",
"inputs": {
"video": input_video_path,
"force_rate": 0,
"force_size": "Disabled",
"custom_width": 512,
"custom_height": 512,
"frame_load_cap": frame_load_cap,
"skip_first_frames": 0,
"select_every_nth": 1
}
},
"2": {
"class_type": "LoadSAM3DBodyModel",
"inputs": {
"attn_backend": "auto",
"precision": "fp32"
}
},
"3": {
"class_type": "SAM3DBodyProcessBatch",
"inputs": {
"model": ["2", 0],
"image": ["1", 0],
"bbox_threshold": 0.8,
"render_mode": "mesh_only"
}
},
"4": {
"class_type": "VHS_VideoCombine",
"inputs": {
"images": ["3", 0],
"frame_rate": ["1", 1],
"loop_count": 0,
"filename_prefix": output_filename,
"format": "video/h264-mp4",
"pix_fmt": "yuv420p",
"crf": 19,
"save_output": True,
"pingpong": False,
"save_metadata": True
}
}
}
return {"prompt": prompt, "client_id": CLIENT_ID}
def queue_prompt(prompt):
p = {"prompt": prompt["prompt"], "client_id": prompt["client_id"]}
data = json.dumps(p).encode('utf-8')
req = urllib.request.Request(f"http://{SERVER_ADDRESS}/prompt", data=data)
req.add_header("Content-Type", "application/json")
try:
response = urllib.request.urlopen(req)
return json.loads(response.read())
except Exception as e:
print(f"Error queuing prompt: {e}")
return None
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("Usage: python3 run_comfy_mocap.py <path_to_input_video_on_remote_server> [frame_cap]")
sys.exit(1)
input_video = sys.argv[1]
frame_cap = int(sys.argv[2]) if len(sys.argv) > 2 else 0
print(f"Submitting Option B (ComfyUI) workflow for: {input_video} (cap: {frame_cap}) to {SERVER_ADDRESS}")
workflow = build_workflow(input_video, frame_cap)
res = queue_prompt(workflow)
if res and "prompt_id" in res:
print(f"Successfully queued! Prompt ID: {res['prompt_id']}")
print("You can view the progress on the ComfyUI web interface at http://192.168.1.143:8188")
print("The finished motion reference video will be in the output folder of ComfyUI.")
else:
print("Failed to queue workflow. Check if ComfyUI is running and has the required nodes installed.")
#!/bin/bash
# Ensure script stops on first error
set -e
if [ "$#" -ne 1 ]; then
echo "Usage: ./run_kling_mocap.sh <local_video_file.mp4>"
exit 1
fi
LOCAL_VIDEO="$1"
FILENAME=$(basename "$LOCAL_VIDEO")
REMOTE_TEMP_PATH="/tmp/$FILENAME"
echo "================================================="
echo "🚀 Starting SOTA Kling 3.0 Mocap Workflow"
echo "================================================="
# Step 1: Upload video to 3090
echo "[1/3] Uploading video to 3090 GPU..."
scp "$LOCAL_VIDEO" straughter@192.168.1.143:"$REMOTE_TEMP_PATH"
# Step 2: Run ComfyUI API script
echo "[2/3] Running Meta SAM 3D Body inference via ComfyUI API..."
python3 sam3d_comfy_api.py "$REMOTE_TEMP_PATH" > /tmp/sam3d_comfy_out.txt
cat /tmp/sam3d_comfy_out.txt
# Extract the output filename from the script output
OUTPUT_FILE=$(cat /tmp/sam3d_comfy_out.txt | grep "output folder as:" | awk -F': ' '{print $2}')
if [ -z "$OUTPUT_FILE" ]; then
echo "❌ Error: Could not parse output filename from ComfyUI."
exit 1
fi
# Step 3: Download the clean reference video
echo "[3/3] Downloading clean Kling reference video..."
scp "straughter@192.168.1.143:/home/straughter/ComfyUI/output/$OUTPUT_FILE" .
# Cleanup
ssh straughter@192.168.1.143 "rm '$REMOTE_TEMP_PATH'"
echo "================================================="
echo "✅ Done! Your Kling 3.0 reference video is ready: ./$OUTPUT_FILE"
echo "================================================="
#!/bin/bash
set -e
if [ -z "$1" ]; then
echo "Usage: ./run_option_a_mocap.sh <video_path>"
exit 1
fi
VIDEO_PATH="$1"
REMOTE_HOST="straughter@192.168.1.143"
REMOTE_SAM="~/sam-3d-body"
REMOTE_INPUT="$REMOTE_SAM/temp_input"
REMOTE_OUTPUT="$REMOTE_SAM/temp_output"
LOCAL_WORKSPACE="/Users/speed/CEBSam3d"
echo "[1/6] Extracting frames from video..."
rm -rf "$LOCAL_WORKSPACE/temp_input" "$LOCAL_WORKSPACE/temp_output"
mkdir -p "$LOCAL_WORKSPACE/temp_input"
# Extract at 30 fps
ffmpeg -y -i "$VIDEO_PATH" -r 30 -q:v 2 "$LOCAL_WORKSPACE/temp_input/%05d.jpg" -v warning
echo "[2/6] Syncing frames to 3090..."
ssh $REMOTE_HOST "mkdir -p $REMOTE_INPUT $REMOTE_OUTPUT && rm -rf $REMOTE_INPUT/* $REMOTE_OUTPUT/*"
rsync -a --delete "$LOCAL_WORKSPACE/temp_input/" "$REMOTE_HOST:$REMOTE_INPUT/"
echo "[3/6] Running SAM3D Inference on 3090..."
# We create a python wrapper on the remote to run demo.py AND extract all the MHR json data!
cat << 'EOF' > "$LOCAL_WORKSPACE/remote_wrapper.py"
import os, sys, subprocess, json, pickle, torch
sam_dir = os.path.expanduser("~/sam-3d-body")
in_dir = f"{sam_dir}/temp_input"
out_dir = f"{sam_dir}/temp_output"
ckpt = "/home/straughter/ComfyUI/models/sam3dbody/model.ckpt"
mhr = "/home/straughter/ComfyUI/models/sam3dbody/assets/mhr_model.pt"
print(">> Running demo.py...")
subprocess.run([f"{sam_dir}/venv/bin/python", f"{sam_dir}/demo.py",
"--checkpoint_path", ckpt,
"--image_folder", in_dir,
"--output_folder", out_dir,
"--mhr_path", mhr], check=True)
print(">> Extracting MHR Base...")
model = torch.jit.load(mhr)
identity, pose, extra = torch.zeros(1, 45), torch.zeros(1, 204), torch.zeros(1, 72)
with torch.no_grad(): verts, skel_state = model(identity, pose, extra)
skeleton = model.character_torch.skeleton
transforms = skel_state[0].cpu().tolist()
lbs = model.character_torch.linear_blend_skinning
weights_per_vert = [[] for _ in range(verts.shape[1])]
for i in range(len(lbs.vert_indices_flattened)):
v, j, w = lbs.vert_indices_flattened[i], lbs.skin_indices_flattened[i], lbs.skin_weights_flattened[i]
weights_per_vert[v.item()].append((j.item(), w.item()))
skel_data = {"joint_names": list(skeleton.joint_names), "joint_parents": [int(p) for p in skeleton.joint_parents], "transforms": transforms, "weights": weights_per_vert}
with open(f"{out_dir}/mhr_skeleton.json", "w") as f: json.dump(skel_data, f)
verts_np, faces_np = verts[0].cpu().numpy(), model.character_torch.mesh.faces.cpu().numpy()
with open(f"{out_dir}/mhr_mesh.obj", "w") as f:
for v in verts_np: f.write(f"v {v[0]} {v[1]} {v[2]}\n")
for face in faces_np: f.write(f"f {face[0]+1} {face[1]+1} {face[2]+1}\n")
print(">> Extracting MHR Poses...")
pkl_files = sorted([f for f in os.listdir(out_dir) if f.endswith('.pkl')])
os.makedirs(f"{out_dir}/poses", exist_ok=True)
for pkl in pkl_files:
with open(f"{out_dir}/{pkl}", 'rb') as f: data = pickle.load(f)
pose_tensor = torch.from_numpy(data[0]['mhr_model_params']).unsqueeze(0)
with torch.no_grad(): _, skel = model(identity, pose_tensor, extra)
with open(f"{out_dir}/poses/{pkl.replace('.pkl', '.json')}", "w") as f:
json.dump({"transforms": skel[0].cpu().tolist()}, f)
print(">> Done.")
EOF
scp "$LOCAL_WORKSPACE/remote_wrapper.py" "$REMOTE_HOST:$REMOTE_SAM/remote_wrapper.py"
ssh $REMOTE_HOST "cd $REMOTE_SAM && ./venv/bin/python remote_wrapper.py"
echo "[4/6] Syncing MHR Data back to Mac..."
mkdir -p "$LOCAL_WORKSPACE/temp_output"
rsync -a "$REMOTE_HOST:$REMOTE_OUTPUT/mhr_skeleton.json" "$LOCAL_WORKSPACE/temp_output/"
rsync -a "$REMOTE_HOST:$REMOTE_OUTPUT/mhr_mesh.obj" "$LOCAL_WORKSPACE/temp_output/"
rsync -a "$REMOTE_HOST:$REMOTE_OUTPUT/poses/" "$LOCAL_WORKSPACE/temp_output/poses/"
echo "[5/6] Building Blender Scene..."
/Applications/Blender.app/Contents/MacOS/Blender --background --python "$LOCAL_WORKSPACE/build_mhr_scene.py" -- \
--skeleton "$LOCAL_WORKSPACE/temp_output/mhr_skeleton.json" \
--mesh "$LOCAL_WORKSPACE/temp_output/mhr_mesh.obj" \
--poses "$LOCAL_WORKSPACE/temp_output/poses" \
--out "$LOCAL_WORKSPACE/Option_A_Mocap.blend"
echo "[6/6] Success! Output saved to $LOCAL_WORKSPACE/Option_A_Mocap.blend"
import urllib.request
import urllib.parse
import json
import time
import sys
import os
COMFY_API_URL = "http://192.168.1.143:8188"
def queue_prompt(prompt):
p = {"prompt": prompt}
data = json.dumps(p).encode('utf-8')
req = urllib.request.Request(f"{COMFY_API_URL}/prompt", data=data)
req.add_header('Content-Type', 'application/json')
try:
response = urllib.request.urlopen(req)
return json.loads(response.read())
except urllib.error.URLError as e:
print(f"Error connecting to ComfyUI at {COMFY_API_URL}: {e}")
sys.exit(1)
def get_history(prompt_id):
req = urllib.request.Request(f"{COMFY_API_URL}/history/{prompt_id}")
response = urllib.request.urlopen(req)
return json.loads(response.read())
def generate_reference_video(video_path, render_mode="mesh_only"):
# Construct the ComfyUI API workflow dictionary
prompt = {
"1": {
"class_type": "VHS_LoadVideoPath",
"inputs": {
"video": video_path,
"force_rate": 0,
"custom_width": 0,
"custom_height": 0,
"frame_load_cap": 0,
"skip_first_frames": 0,
"select_every_nth": 1,
}
},
"2": {
"class_type": "LoadSAM3DBodyModel",
"inputs": {
"attn_backend": "auto",
"precision": "fp32"
}
},
"3": {
"class_type": "SAM3DBodyProcessBatch",
"inputs": {
"model": ["2", 0],
"image": ["1", 0],
"bbox_threshold": 0.8,
"render_mode": render_mode
}
},
"4": {
"class_type": "VHS_VideoInfoSource",
"inputs": {
"video_info": ["1", 3]
}
},
"5": {
"class_type": "VHS_VideoCombine",
"inputs": {
"images": ["3", 0],
"frame_rate": ["4", 0],
"loop_count": 0,
"filename_prefix": "sam3d_kling_ref",
"format": "video/h264-mp4",
"pingpong": False,
"save_output": True
}
}
}
print(f"Queueing SAM 3D Body job for video: {video_path}")
response = queue_prompt(prompt)
prompt_id = response['prompt_id']
print(f"Job queued! Prompt ID: {prompt_id}")
# Polling for completion
while True:
history = get_history(prompt_id)
if prompt_id in history:
print("Job completed!")
outputs = history[prompt_id].get('outputs', {})
# Look for the VHS_VideoCombine node output (Node 5)
if "5" in outputs:
gifs = outputs["5"].get("gifs", [])
if gifs:
filename = gifs[0].get("filename")
print(f"\nSuccess! Output video saved to 3090 ComfyUI output folder as: {filename}")
print(f"To view it, you can fetch it with: ")
print(f"scp straughter@192.168.1.143:/home/straughter/ComfyUI/output/{filename} .")
return
print("Job completed but couldn't find output filename.")
break
print("Processing... (this may take a few minutes for a long video)", end="\r")
time.sleep(5)
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: python sam3d_comfy_api.py /path/to/video/on/3090.mp4 [render_mode]")
print("Valid render_modes: side_by_side, mesh_only, mask_only, overlay")
sys.exit(1)
video_path = sys.argv[1]
render_mode = sys.argv[2] if len(sys.argv) > 2 else "mesh_only"
generate_reference_video(video_path, render_mode)
bl_info = {
"name": "CEB SAM 3D Body Loader",
"author": "CEB Studios / Gemini CLI",
"version": (1, 0),
"blender": (4, 0, 0),
"location": "View3D > N-Panel > CEB SAM 3D",
"description": "Loads SAM 3D Body animations into Blender using MHR character.",
"category": "Animation",
}
import bpy
import os
import subprocess
import json
import pickle
import mathutils
# ------------------------------------------------------------------------
# Properties
# ------------------------------------------------------------------------
class SAM3D_Properties(bpy.types.PropertyGroup):
portable_python: bpy.props.StringProperty(
name="Portable Python",
description="Path to the python executable in the portable SAM folder",
default="",
subtype='FILE_PATH'
)
sam_3d_body_path: bpy.props.StringProperty(
name="SAM 3D Body Folder",
description="Path to the sam-3d-body repository folder",
default="",
subtype='DIR_PATH'
)
checkpoint_path: bpy.props.StringProperty(
name="SAM Checkpoint",
description="Path to the Dino V3/V8 model (.ckpt or .pth)",
default="",
subtype='FILE_PATH'
)
mhr_model_path: bpy.props.StringProperty(
name="MHR Model Path",
description="Path to mhr_model.pt from MHR repository",
default="",
subtype='FILE_PATH'
)
input_folder: bpy.props.StringProperty(
name="Image Folder",
description="Folder containing the rendered frames",
default="",
subtype='DIR_PATH'
)
output_folder: bpy.props.StringProperty(
name="Results Folder",
description="Folder where results (PKL) will be saved",
default="",
subtype='DIR_PATH'
)
character_prefix: bpy.props.StringProperty(
name="File Prefix",
description="Prefix of the PKL files to load",
default=""
)
character_index: bpy.props.IntProperty(
name="Character Index",
description="Index of the character to load (if multiple detected)",
default=1,
min=1
)
# Remote Settings
use_remote: bpy.props.BoolProperty(
name="Use Remote (3090)",
description="Run inference on a remote machine via SSH",
default=True
)
remote_host: bpy.props.StringProperty(
name="Remote Host",
description="IP or hostname of the 3090 machine",
default="192.168.1.143"
)
remote_user: bpy.props.StringProperty(
name="Remote User",
description="SSH username for the remote machine",
default="straughter"
)
remote_sam_path: bpy.props.StringProperty(
name="Remote SAM Path",
description="Path to sam-3d-body repo on remote machine",
default="~/sam-3d-body"
)
remote_ckpt_path: bpy.props.StringProperty(
name="Remote Checkpoint Path",
description="Path to the SAM 3D Body checkpoint on the remote machine",
default="/home/straughter/ComfyUI/models/sam3dbody/model.ckpt"
)
remote_mhr_path: bpy.props.StringProperty(
name="Remote MHR Model Path",
description="Path to mhr_model.pt on the remote machine",
default="/home/straughter/ComfyUI/models/sam3dbody/assets/mhr_model.pt"
)
# ------------------------------------------------------------------------
# Helper Scripts (to be run in External Python)
# ------------------------------------------------------------------------
MHR_EXPORTER_SCRIPT = """
import torch
import json
import os
import numpy as np
# Writes mhr_skeleton.json and mhr_mesh.obj to the current working directory.
# Usage: python temp_exporter.py <path/to/mhr_model.pt>
def export_mhr_from_pt(pt_path="mhr_model.pt"):
if not os.path.exists(pt_path):
print(f"Error: {pt_path} not found.")
return
print(f"Loading MHR model from {pt_path}...")
model = torch.jit.load(pt_path)
identity = torch.zeros(1, 45)
pose = torch.zeros(1, 204)
extra = torch.zeros(1, 72)
print("Running forward pass...")
with torch.no_grad():
verts, skel_state = model(identity, pose, extra)
skeleton = model.character_torch.skeleton
joint_names = list(skeleton.joint_names)
joint_parents = [int(p) for p in skeleton.joint_parents]
transforms = skel_state[0].cpu().tolist()
lbs = model.character_torch.linear_blend_skinning
skin_indices = lbs.skin_indices_flattened.cpu().tolist()
skin_weights = lbs.skin_weights_flattened.cpu().tolist()
vert_indices = lbs.vert_indices_flattened.cpu().tolist()
num_verts = verts.shape[1]
weights_per_vert = [[] for _ in range(num_verts)]
for i in range(len(vert_indices)):
v_idx = vert_indices[i]
j_idx = skin_indices[i]
weight = skin_weights[i]
weights_per_vert[v_idx].append((j_idx, weight))
skeleton_data = {
"joint_names": joint_names,
"joint_parents": joint_parents,
"transforms": transforms,
"weights": weights_per_vert
}
with open("mhr_skeleton.json", "w") as f:
json.dump(skeleton_data, f, indent=4)
verts_np = verts[0].cpu().numpy()
faces_np = model.character_torch.mesh.faces.cpu().numpy()
with open("mhr_mesh.obj", "w") as f:
for v in verts_np:
f.write(f"v {v[0]} {v[1]} {v[2]}\\n")
for face in faces_np:
f.write(f"f {face[0]+1} {face[1]+1} {face[2]+1}\\n")
print("Exported mhr_skeleton.json and mhr_mesh.obj")
if __name__ == "__main__":
import sys
pt = sys.argv[1] if len(sys.argv) > 1 else "mhr_model.pt"
export_mhr_from_pt(pt)
"""
MHR_POSE_EXPORTER_SCRIPT = """
import torch
import json
import pickle
import os
import sys
# Writes mhr_pose.json to the current working directory.
# Usage: python temp_pose_exporter.py <path/to/pose.pkl> <path/to/mhr_model.pt>
def export_pose(pkl_path="sam3dbody_pose.pkl", pt_path="mhr_model.pt"):
if not os.path.exists(pkl_path) or not os.path.exists(pt_path):
print("Error: Files not found.")
return
with open(pkl_path, 'rb') as f:
data_list = pickle.load(f)
# Take the first detected person
pose_params = data_list[0]['mhr_model_params']
pose_tensor = torch.from_numpy(pose_params).unsqueeze(0) # [1, 204]
print(f"Loading MHR model to compute pose transforms...")
model = torch.jit.load(pt_path)
identity = torch.zeros(1, 45)
extra = torch.zeros(1, 72)
with torch.no_grad():
_, skel_state = model(identity, pose_tensor, extra)
# skel_state: [batch, num_joints, 8] — [tx,ty,tz, qx,qy,qz,qw, scale]
transforms = skel_state[0].cpu().tolist()
pose_data = {"transforms": transforms}
with open("mhr_pose.json", "w") as f:
json.dump(pose_data, f, indent=4)
print("Exported mhr_pose.json")
if __name__ == "__main__":
pkl = sys.argv[1] if len(sys.argv) > 1 else "sam3dbody_pose.pkl"
pt = sys.argv[2] if len(sys.argv) > 2 else "mhr_model.pt"
export_pose(pkl, pt)
"""
# ------------------------------------------------------------------------
# Operators
# ------------------------------------------------------------------------
class SAM3D_OT_RunInference(bpy.types.Operator):
bl_idname = "sam3d.run_inference"
bl_label = "Run SAM 3D Inference"
bl_description = "Run the SAM 3D Body inference on the selected images"
def execute(self, context):
props = context.scene.sam3d_props
if props.use_remote:
return self.execute_remote(context, props)
else:
return self.execute_local(context, props)
def execute_local(self, context, props):
if not props.portable_python or not props.sam_3d_body_path:
self.report({'ERROR'}, "Please set Python and SAM 3D Body paths")
return {'CANCELLED'}
demo_path = os.path.join(props.sam_3d_body_path, "demo.py")
if not os.path.exists(demo_path):
self.report({'ERROR'}, f"demo.py not found in {props.sam_3d_body_path}")
return {'CANCELLED'}
cmd = [
props.portable_python,
demo_path,
"--checkpoint", props.checkpoint_path,
"--image_folder", props.input_folder,
"--output_folder", props.output_folder,
]
try:
subprocess.run(cmd, check=True)
self.report({'INFO'}, "Local Inference completed successfully")
except subprocess.CalledProcessError as e:
self.report({'ERROR'}, f"Local Inference failed: {e}")
return {'CANCELLED'}
return {'FINISHED'}
def execute_remote(self, context, props):
host = props.remote_host
user = props.remote_user
remote_sam = os.path.expanduser(props.remote_sam_path) if not props.remote_sam_path.startswith("~") else props.remote_sam_path
# Paths on remote
remote_input = f"{remote_sam}/temp_input"
remote_output = f"{remote_sam}/temp_output"
# 1. Sync images to remote
self.report({'INFO'}, "Syncing images to remote...")
try:
subprocess.run(["rsync", "-avz", "--delete", props.input_folder + "/", f"{user}@{host}:{remote_input}"], check=True)
except Exception as e:
self.report({'ERROR'}, f"Failed to sync images to remote: {e}")
return {'CANCELLED'}
# 2. Run inference on remote via SSH
# Use the dedicated remote checkpoint + mhr paths
remote_ckpt = props.remote_ckpt_path
remote_mhr = props.remote_mhr_path
# Command to run on remote (note: arg is --checkpoint_path not --checkpoint)
cmd_str = (
f"cd {remote_sam} && "
f"./venv/bin/python demo.py "
f"--checkpoint_path {remote_ckpt} "
f"--image_folder {remote_input} "
f"--output_folder {remote_output} "
f"--mhr_path {remote_mhr}"
)
self.report({'INFO'}, "Running remote inference on 3090...")
try:
subprocess.run(["ssh", f"{user}@{host}", cmd_str], check=True)
except subprocess.CalledProcessError as e:
self.report({'ERROR'}, f"Remote inference failed: {e}")
return {'CANCELLED'}
# 3. Sync results back
self.report({'INFO'}, "Syncing results back...")
os.makedirs(props.output_folder, exist_ok=True)
try:
subprocess.run(["rsync", "-avz", f"{user}@{host}:{remote_output}/", props.output_folder], check=True)
except Exception as e:
self.report({'ERROR'}, f"Failed to sync results back: {e}")
return {'CANCELLED'}
self.report({'INFO'}, "Remote Inference completed successfully")
return {'FINISHED'}
class SAM3D_OT_LoadMHR(bpy.types.Operator):
bl_idname = "sam3d.load_mhr"
bl_label = "Load MHR Character"
bl_description = "Load the base MHR character (Mesh + Armature)"
def execute(self, context):
props = context.scene.sam3d_props
if not props.mhr_model_path or not props.portable_python:
self.report({'ERROR'}, "Please set MHR Model Path and Portable Python")
return {'CANCELLED'}
assets_dir = os.path.join(props.output_folder, "assets")
os.makedirs(assets_dir, exist_ok=True)
skel_json = os.path.join(assets_dir, "mhr_skeleton.json")
mesh_obj = os.path.join(assets_dir, "mhr_mesh.obj")
if not os.path.exists(skel_json) or not os.path.exists(mesh_obj):
# Run exporter script — it writes outputs to CWD, so run from assets_dir
exporter_py = os.path.join(assets_dir, "temp_exporter.py")
with open(exporter_py, "w") as f:
f.write(MHR_EXPORTER_SCRIPT)
cmd = [props.portable_python, exporter_py, props.mhr_model_path]
try:
subprocess.run(cmd, check=True, cwd=assets_dir)
except Exception as e:
self.report({'ERROR'}, f"Failed to export MHR base: {e}")
return {'CANCELLED'}
# Now import into Blender (logic from script 3)
self.import_mhr(skel_json, mesh_obj)
return {'FINISHED'}
def import_mhr(self, skel_json, mesh_obj_path):
with open(skel_json, "r") as f:
data = json.load(f)
joint_names = data["joint_names"]
joint_parents = data["joint_parents"]
rest_transforms = data["transforms"]
all_weights = data["weights"]
def fix_coords(vec):
return mathutils.Vector((vec[0], -vec[2], vec[1]))
# Create Mesh
mesh_data = bpy.data.meshes.new("MHR_Mesh")
mesh_obj = bpy.data.objects.new("MHR_Mesh", mesh_data)
bpy.context.collection.objects.link(mesh_obj)
verts = []
faces = []
with open(mesh_obj_path, 'r') as f:
for line in f:
if line.startswith('v '):
v = [float(x) for x in line.split()[1:]]
verts.append(fix_coords(v))
elif line.startswith('f '):
f_indices = [int(x.split('/')[0]) - 1 for x in line.split()[1:]]
faces.append(f_indices)
mesh_data.from_pydata(verts, [], faces)
mesh_data.update()
# Create Armature
arm_data = bpy.data.armatures.new("MHR_Armature")
arm_obj = bpy.data.objects.new("MHR_Armature", arm_data)
bpy.context.collection.objects.link(arm_obj)
bpy.context.view_layer.objects.active = arm_obj
bpy.ops.object.mode_set(mode='EDIT')
bones = []
for i, name in enumerate(joint_names):
bone = arm_data.edit_bones.new(name)
bone.head = fix_coords(rest_transforms[i][0:3])
bones.append(bone)
for i, p in enumerate(joint_parents):
if p != -1:
bones[i].parent = bones[p]
# Set tails
for i, bone in enumerate(bones):
children = [j for j, p in enumerate(joint_parents) if p == i]
if children:
bone.tail = fix_coords(rest_transforms[children[0]][0:3])
if (bone.tail - bone.head).length < 1e-4: bone.tail += mathutils.Vector((0, 0, 0.01))
else:
if bone.parent:
direction = bone.head - bone.parent.head
bone.tail = bone.head + (direction.normalized() * 0.05 if direction.length > 1e-4 else mathutils.Vector((0, 0, 0.05)))
else:
bone.tail = bone.head + mathutils.Vector((0, 0, 0.05))
bpy.ops.object.mode_set(mode='OBJECT')
# Weight Painting
for name in joint_names:
mesh_obj.vertex_groups.new(name=name)
for v_idx, v_weights in enumerate(all_weights):
for j_idx, weight in v_weights:
if weight > 0:
mesh_obj.vertex_groups[joint_names[j_idx]].add([v_idx], weight, 'REPLACE')
mesh_obj.parent = arm_obj
modifier = mesh_obj.modifiers.new(name="Armature", type='ARMATURE')
modifier.object = arm_obj
class SAM3D_OT_ScanNew(bpy.types.Operator):
bl_idname = "sam3d.scan_new"
bl_label = "Scan PKL for Characters"
bl_description = "Scan a PKL file to see how many characters it contains"
filepath: bpy.props.StringProperty(subtype="FILE_PATH")
def execute(self, context):
if not self.filepath.endswith(".pkl"):
self.report({'ERROR'}, "Please select a .pkl file")
return {'CANCELLED'}
with open(self.filepath, 'rb') as f:
data = pickle.load(f)
count = len(data)
self.report({'INFO'}, f"Detected {count} characters in file")
# Update prefix based on filename
context.scene.sam3d_props.character_prefix = os.path.basename(self.filepath).split('.')[0]
return {'FINISHED'}
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
class SAM3D_OT_BatchLoad(bpy.types.Operator):
bl_idname = "sam3d.batch_load"
bl_label = "Batch Load Poses"
bl_description = "Batch load animations from PKL files in the results folder"
def execute(self, context):
props = context.scene.sam3d_props
if not props.output_folder or not props.portable_python:
self.report({'ERROR'}, "Output Folder or Python path not set")
return {'CANCELLED'}
arm_obj = context.active_object
if not arm_obj or arm_obj.type != 'ARMATURE':
self.report({'ERROR'}, "Please select the MHR Armature")
return {'CANCELLED'}
# Setup temp script for pose extraction
assets_dir = os.path.join(props.output_folder, "assets")
os.makedirs(assets_dir, exist_ok=True)
pose_exporter_py = os.path.join(assets_dir, "temp_pose_exporter.py")
with open(pose_exporter_py, "w") as f:
f.write(MHR_POSE_EXPORTER_SCRIPT)
# Get rest transforms from skeleton json
skel_json = os.path.join(assets_dir, "mhr_skeleton.json")
if not os.path.exists(skel_json):
self.report({'ERROR'}, "mhr_skeleton.json not found — run 'Load MHR Character' first")
return {'CANCELLED'}
with open(skel_json, "r") as f:
skel_data = json.load(f)
joint_names = skel_data["joint_names"]
rest_transforms = skel_data["transforms"]
joint_parents = skel_data["joint_parents"]
# Loop through PKL files matching prefix
all_files = os.listdir(props.output_folder)
files = sorted([f for f in all_files if f.startswith(props.character_prefix) and f.endswith(".pkl")])
if not files:
self.report({'ERROR'}, f"No PKL files with prefix '{props.character_prefix}' found in {props.output_folder}")
return {'CANCELLED'}
F = mathutils.Matrix(((1, 0, 0, 0), (0, 0, -1, 0), (0, 1, 0, 0), (0, 0, 0, 1)))
for i, filename in enumerate(files):
pkl_path = os.path.join(props.output_folder, filename)
# Script writes mhr_pose.json to its CWD — run from assets_dir
cmd = [props.portable_python, pose_exporter_py, pkl_path, props.mhr_model_path]
subprocess.run(cmd, check=True, cwd=assets_dir)
temp_json = os.path.join(assets_dir, "mhr_pose.json")
# Load JSON and apply to frame
with open(temp_json, "r") as f:
pose_data = json.load(f)
pose_transforms = pose_data["transforms"]
context.scene.frame_set(i + 1)
self.apply_pose(arm_obj, joint_names, rest_transforms, pose_transforms, joint_parents, F)
# Keyframe all bones
for bone in arm_obj.pose.bones:
bone.keyframe_insert(data_path="location")
bone.keyframe_insert(data_path="rotation_quaternion")
self.report({'INFO'}, f"Loaded {len(files)} frames")
return {'FINISHED'}
def apply_pose(self, arm_obj, joint_names, rest_transforms, pose_transforms, joint_parents, F):
def get_mhr_matrix(t_mhr):
rot = mathutils.Quaternion((t_mhr[6], t_mhr[3], t_mhr[4], t_mhr[5])).to_matrix().to_4x4()
rot.translation = mathutils.Vector((t_mhr[0], t_mhr[1], t_mhr[2]))
return rot
for i, name in enumerate(joint_names):
bone = arm_obj.pose.bones.get(name)
if not bone: continue
m_rest = get_mhr_matrix(rest_transforms[i])
m_pose = get_mhr_matrix(pose_transforms[i])
delta_mhr = m_pose @ m_rest.inverted()
delta_b = F @ delta_mhr @ F.inverted()
target_world_matrix = delta_b @ bone.bone.matrix_local
if joint_parents[i] == -1:
bone.matrix = target_world_matrix
else:
# Keep rigid distance (relative to parent)
bpy.context.view_layer.update()
new_mat = target_world_matrix.copy()
new_mat.translation = bone.matrix.to_translation()
bone.matrix = new_mat
bpy.context.view_layer.update()
class SAM3D_OT_PatchRemoteDemo(bpy.types.Operator):
bl_idname = "sam3d.patch_remote_demo"
bl_label = "Patch Remote demo.py"
bl_description = "SSH to 3090 and inject PKL export code into demo.py (matches CEB fork)"
def execute(self, context):
props = context.scene.sam3d_props
host = props.remote_host
user = props.remote_user
remote_sam = props.remote_sam_path
# Python lines to inject after the cv2.imwrite() call
# We insert: save outputs as a .pkl file alongside the .jpg
patch_script = r"""
import re, sys
demo_path = sys.argv[1]
with open(demo_path, 'r') as f:
src = f.read()
marker = "cv2.imwrite("
if "pickle.dump(outputs" in src:
print("Already patched.")
sys.exit(0)
# Add pickle import at top
if "import pickle" not in src:
src = src.replace("import cv2", "import cv2\nimport pickle", 1)
# Inject PKL save after cv2.imwrite block
# Find the cv2.imwrite line and add pkl dump after it
pkl_code = '''
pickle_file = f"{output_folder}/{os.path.basename(image_path)[:-4]}.pkl"
with open(pickle_file, "wb") as f:
pickle.dump(outputs, f)
'''
# Insert after the closing ) of cv2.imwrite(...)
# Find index of the imwrite call
idx = src.find('cv2.imwrite(')
end = src.find('\n )\n', idx)
if end == -1:
end = src.find('\n )\r\n', idx)
insert_at = end + len('\n )\n')
src = src[:insert_at] + pkl_code + src[insert_at:]
with open(demo_path, 'w') as f:
f.write(src)
print("Patched successfully.")
"""
# Write patch script locally, scp it over, run it, clean up
import tempfile, os
tmp = tempfile.NamedTemporaryFile(suffix=".py", delete=False, mode='w')
tmp.write(patch_script)
tmp.close()
remote_tmp = f"/tmp/patch_demo_{user}.py"
try:
import subprocess
subprocess.run(["scp", tmp.name, f"{user}@{host}:{remote_tmp}"], check=True)
result = subprocess.run(
["ssh", f"{user}@{host}",
f"python3 {remote_tmp} {remote_sam}/demo.py && rm {remote_tmp}"],
capture_output=True, text=True
)
if result.returncode == 0:
self.report({'INFO'}, f"Remote patch: {result.stdout.strip()}")
else:
self.report({'ERROR'}, f"Patch failed: {result.stderr.strip()}")
except Exception as e:
self.report({'ERROR'}, f"SSH error: {e}")
finally:
os.unlink(tmp.name)
return {'FINISHED'}
# ------------------------------------------------------------------------
# Panel
# ------------------------------------------------------------------------
class SAM3D_PT_MainPanel(bpy.types.Panel):
bl_label = "CEB SAM 3D Body Loader"
bl_idname = "SAM3D_PT_MainPanel"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'CEB SAM 3D'
def draw(self, context):
layout = self.layout
props = context.scene.sam3d_props
col = layout.column(align=True)
col.prop(props, "use_remote")
if props.use_remote:
box = layout.box()
box.label(text="Remote Settings (3090)", icon='NETWORK_DRIVE')
box.prop(props, "remote_host")
box.prop(props, "remote_user")
box.prop(props, "remote_sam_path")
box.prop(props, "remote_ckpt_path")
box.prop(props, "remote_mhr_path")
box.operator("sam3d.patch_remote_demo", text="Patch Remote demo.py (add PKL export)", icon='SCRIPTPLUGINS')
else:
col.prop(props, "portable_python")
col.prop(props, "sam_3d_body_path")
col.prop(props, "checkpoint_path")
layout.separator()
layout.label(text="Local Paths", icon='FILE_FOLDER')
col2 = layout.column(align=True)
col2.prop(props, "mhr_model_path")
col2.prop(props, "input_folder")
col2.prop(props, "output_folder")
layout.separator()
layout.operator("sam3d.run_inference", icon='PLAY')
layout.separator()
layout.operator("sam3d.load_mhr", icon='USER')
layout.separator()
box = layout.box()
box.label(text="Character Selection", icon='OUTLINER_OB_ARMATURE')
box.operator("sam3d.scan_new", text="Scan New")
box.prop(props, "character_index")
layout.separator()
box = layout.box()
box.label(text="Batch Posing", icon='ACTION')
box.prop(props, "character_prefix")
box.operator("sam3d.batch_load", text="Batch Load from Folder")
# ------------------------------------------------------------------------
# Registration
# ------------------------------------------------------------------------
classes = (
SAM3D_Properties,
SAM3D_OT_RunInference,
SAM3D_OT_LoadMHR,
SAM3D_OT_ScanNew,
SAM3D_OT_BatchLoad,
SAM3D_OT_PatchRemoteDemo,
SAM3D_PT_MainPanel,
)
def register():
for cls in classes:
bpy.utils.register_class(cls)
bpy.types.Scene.sam3d_props = bpy.props.PointerProperty(type=SAM3D_Properties)
def unregister():
del bpy.types.Scene.sam3d_props
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
if __name__ == "__main__":
register()
{"id":"0ee524cd-7bc8-46f2-8751-32399825f67b","revision":0,"last_node_id":15,"last_link_id":19,"nodes":[{"id":3,"type":"LoadImage","pos":[-139.9653501897791,-168.41867158918637],"size":[274.080078125,314],"flags":{},"order":0,"mode":0,"inputs":[{"localized_name":"image","name":"image","type":"COMBO","widget":{"name":"image"},"link":null},{"localized_name":"choose file to upload","name":"upload","type":"IMAGEUPLOAD","widget":{"name":"upload"},"link":null}],"outputs":[{"localized_name":"IMAGE","name":"IMAGE","type":"IMAGE","links":[14]},{"localized_name":"MASK","name":"MASK","type":"MASK","links":[15]}],"properties":{"cnr_id":"comfy-core","ver":"0.3.70","Node name for S&R":"LoadImage"},"widgets_values":["masked_dancing.png","image"]},{"id":4,"type":"PreviewImage","pos":[705.8783998102209,-199.28585908918637],"size":[243.07247747469546,259.74870734460893],"flags":{},"order":4,"mode":0,"inputs":[{"localized_name":"images","name":"images","type":"IMAGE","link":17}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.3.70","Node name for S&R":"PreviewImage"},"widgets_values":[]},{"id":13,"type":"SAM3DBodyProcess","pos":[267.1573144481552,-348.8879498866859],"size":[297.73203125,122],"flags":{},"order":2,"mode":0,"inputs":[{"localized_name":"model","name":"model","type":"SAM3D_MODEL","link":13},{"localized_name":"image","name":"image","type":"IMAGE","link":14},{"localized_name":"mask","name":"mask","shape":7,"type":"MASK","link":15},{"localized_name":"bbox_threshold","name":"bbox_threshold","type":"FLOAT","widget":{"name":"bbox_threshold"},"link":null},{"localized_name":"inference_type","name":"inference_type","type":"COMBO","widget":{"name":"inference_type"},"link":null}],"outputs":[{"localized_name":"mesh_data","name":"mesh_data","type":"SAM3D_OUTPUT","links":[18]},{"localized_name":"skeleton","name":"skeleton","type":"SKELETON","links":null},{"localized_name":"debug_image","name":"debug_image","type":"IMAGE","links":[17]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"bcefa6d2943977abb5a194fef84e3bd8a5a4321b","Node name for S&R":"SAM3DBodyProcess"},"widgets_values":[0.8,"full"]},{"id":15,"type":"SAM3DBodyExportMesh","pos":[614.9907006843894,-537.7353359649674],"size":[283.3560546875,58],"flags":{},"order":3,"mode":0,"inputs":[{"localized_name":"mesh_data","name":"mesh_data","type":"SAM3D_OUTPUT","link":18},{"localized_name":"filename","name":"filename","type":"STRING","widget":{"name":"filename"},"link":null}],"outputs":[{"localized_name":"file_path","name":"file_path","type":"STRING","links":[19]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"bcefa6d2943977abb5a194fef84e3bd8a5a4321b","Node name for S&R":"SAM3DBodyExportMesh"},"widgets_values":["output_mesh.stl"]},{"id":10,"type":"Preview3D","pos":[1054.7903972406693,-443.20047541480005],"size":[400,550],"flags":{},"order":5,"mode":0,"inputs":[{"localized_name":"camera_info","name":"camera_info","shape":7,"type":"LOAD3D_CAMERA","link":null},{"localized_name":"bg_image","name":"bg_image","shape":7,"type":"IMAGE","link":null},{"localized_name":"model_file","name":"model_file","type":"STRING,FILE_3D_GLB,FILE_3D_GLTF,FILE_3D_FBX,FILE_3D_OBJ,FILE_3D_STL,FILE_3D_USDZ,FILE_3D","widget":{"name":"model_file"},"link":19},{"localized_name":"image","name":"image","type":"PREVIEW_3D","widget":{"name":"image"},"link":null}],"outputs":[],"properties":{"cnr_id":"comfy-core","ver":"0.3.75","Node name for S&R":"Preview3D","Last Time Model File":"output_mesh.stl","Scene Config":{"showGrid":true,"backgroundColor":"#282828","backgroundImage":"","backgroundRenderMode":"tiled"},"Camera Config":{"cameraType":"perspective","fov":35,"state":{"position":{"x":6.17623428216848,"y":10,"z":6.1762342821684815},"target":{"x":0,"y":2.5,"z":0},"zoom":1,"cameraType":"perspective"}},"Light Config":{"intensity":3}},"widgets_values":["",""]},{"id":12,"type":"LoadSAM3DBodyModel","pos":[-111.51691116659465,-471.03296285519843],"size":[273.7994140625,106],"flags":{},"order":1,"mode":0,"inputs":[{"localized_name":"attn_backend","name":"attn_backend","type":"COMBO","widget":{"name":"attn_backend"},"link":null},{"localized_name":"precision","name":"precision","type":"COMBO","widget":{"name":"precision"},"link":null}],"outputs":[{"localized_name":"model","name":"model","type":"SAM3D_MODEL","links":[13]}],"properties":{"cnr_id":"ComfyUI-SAM3DBody","ver":"bcefa6d2943977abb5a194fef84e3bd8a5a4321b","Node name for S&R":"LoadSAM3DBodyModel"},"widgets_values":["auto","fp32"]}],"links":[[13,12,0,13,0,"SAM3D_MODEL"],[14,3,0,13,1,"IMAGE"],[15,3,1,13,2,"MASK"],[17,13,2,4,0,"IMAGE"],[18,13,0,15,0,"SAM3D_OUTPUT"],[19,15,0,10,2,"STRING"]],"groups":[],"config":{},"extra":{"ds":{"scale":0.9810956222248073,"offset":[150.981718798901,734.9200209922046]},"frontendVersion":"1.35.9","workflowRendererVersion":"LG"},"version":0.4}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment