Now we generate clothes properly

This commit is contained in:
2026-02-26 04:31:05 +03:00
parent fe083f13da
commit 51dda7e79d
3 changed files with 319 additions and 120 deletions

View File

@@ -5,145 +5,344 @@ import os
import mathutils
from mathutils.bvhtree import BVHTree
def run_batch_combine():
try:
args = sys.argv[sys.argv.index("--") + 1:]
body_blend_path, clothes_blend_path, output_path = args[0], args[1], args[2]
except (ValueError, IndexError):
print("Usage: blender -b -P script.py -- <body.blend> <clothes.blend>")
return
bpy.ops.wm.read_homefile(use_empty=True)
# 1. Append Files
def load_blend_files(clothes_blend_path, body_blend_path):
"""Load objects from blend files and return all loaded objects"""
loaded_objects = []
for path in [clothes_blend_path, body_blend_path]:
with bpy.data.libraries.load(path) as (data_from, data_to):
data_to.objects = data_from.objects
for obj in data_to.objects:
if obj: bpy.context.collection.objects.link(obj)
if obj:
bpy.context.collection.objects.link(obj)
loaded_objects.append(obj)
return loaded_objects
all_objs = bpy.data.objects
clothing_meshes = [o for o in all_objs if o.type == 'MESH' and "ref_part" in o]
whitelist = set()
def setup_bvh_and_matrices(obj):
"""Setup BVH tree and transformation matrices for an object"""
depsgraph = bpy.context.evaluated_depsgraph_get()
obj_eval = obj.evaluated_get(depsgraph)
bvh = BVHTree.FromObject(obj_eval, depsgraph)
return bvh
for cloth in clothing_meshes:
target_name = cloth["ref_part"]
target_body = all_objs.get(target_name)
if not target_body: continue
def get_transformation_matrices(obj):
"""Get transformation matrices for an object"""
m = obj.matrix_world
m_inv = m.inverted()
m_normal = m.to_3x3().inverted().transposed()
return m, m_inv, m_normal
# --- STEP A: RAYCAST & INITIAL HIT LIST ---
depsgraph = bpy.context.evaluated_depsgraph_get()
cloth_eval = cloth.evaluated_get(depsgraph)
bvh_cloth = BVHTree.FromObject(cloth_eval, depsgraph)
def raycast_and_adjust_vertices(target_body, bvh_cloth):
"""Raycast from body to cloth and adjust vertices that intersect"""
m_body, m_body_inv, m_body_normal = get_transformation_matrices(target_body)
num_verts = len(target_body.data.vertices)
hit_values = [0] * num_verts
has_shape_keys = target_body.data.shape_keys is not None
# Forward raycast (into cloth)
for i, v in enumerate(target_body.data.vertices):
v_world = m_body @ v.co
n_world = (m_body_normal @ v.normal).normalized()
# Raycast forward (into cloth) and backward (from inside cloth)
hit_f, _, _, _ = bvh_cloth.ray_cast(v_world, n_world, 0.015)
hit_b, _, _, _ = bvh_cloth.ray_cast(v_world, -n_world, 0.005)
if hit_f or hit_b:
hit_values[i] = 1
# Adjust vertex position to be slightly outside cloth
offset = -n_world * (0.005 if hit_f else 0.01)
new_co = m_body_inv @ (v_world + offset)
v.co = new_co
# Update shape keys if they exist
if has_shape_keys:
for kb in target_body.data.shape_keys.key_blocks:
kb.data[i].co = new_co
return hit_values
m_body = target_body.matrix_world
m_body_inv = m_body.inverted()
m_body_normal = m_body.to_3x3().inverted().transposed()
num_verts = len(target_body.data.vertices)
hit_values = [0] * num_verts
has_shape_keys = target_body.data.shape_keys is not None
for i, v in enumerate(target_body.data.vertices):
v_world = m_body @ v.co
n_world = (m_body_normal @ v.normal).normalized()
hit_f, _, _, _ = bvh_cloth.ray_cast(v_world, n_world, 0.015)
hit_b, _, _, _ = bvh_cloth.ray_cast(v_world, -n_world, 0.005)
if hit_f or hit_b:
hit_values[i] = 1
offset = -n_world * (0.005 if hit_f else 0.01)
new_co = m_body_inv @ (v_world + offset)
v.co = new_co
if has_shape_keys:
for kb in target_body.data.shape_keys.key_blocks:
kb.data[i].co = new_co
# --- STEP B: MULTI-LAYER PROTECTION & DELETE ---
bm = bmesh.new()
bm.from_mesh(target_body.data)
bm.verts.ensure_lookup_table()
# Phase 1: Identify "Layer 1 Border" (Immediate neighbors of visible verts)
border_l1 = set()
for v in bm.verts:
if hit_values[v.index] == 0:
for edge in v.link_edges:
neighbor = edge.other_vert(v)
if hit_values[neighbor.index] == 1:
border_l1.add(neighbor.index)
# Phase 2: Identify "Layer 2 Buffer" (Neighbors of Layer 1)
border_l2 = set()
for idx in border_l1:
v = bm.verts[idx]
def protect_and_remove_hidden_geometry(target_body, hit_values, threshold=4.0):
"""Protect visible vertices and remove hidden geometry"""
bm = bmesh.new()
bm.from_mesh(target_body.data)
bm.verts.ensure_lookup_table()
# Phase 1: Identify "Layer 1 Border" (Immediate neighbors of visible verts)
border_l1 = set()
for v in bm.verts:
if hit_values[v.index] == 0: # Visible vertex
for edge in v.link_edges:
neighbor = edge.other_vert(v)
if hit_values[neighbor.index] == 1:
border_l2.add(neighbor.index)
# Merge all protected vertices (Layer 0 (visible) + Layer 1 + Layer 2)
protected_indices = set(border_l1) | set(border_l2)
for i, val in enumerate(hit_values):
if val == 0: protected_indices.add(i)
# Deletion logic
to_delete = []
THRESHOLD = 4.0 # Higher threshold for deep cleaning
border_l1.add(neighbor.index)
# Phase 2: Identify "Layer 2 Buffer" (Neighbors of Layer 1)
border_l2 = set()
for idx in border_l1:
v = bm.verts[idx]
for edge in v.link_edges:
neighbor = edge.other_vert(v)
if hit_values[neighbor.index] == 1:
border_l2.add(neighbor.index)
# Merge all protected vertices
protected_indices = set(border_l1) | set(border_l2)
for i, val in enumerate(hit_values):
if val == 0: # Visible vertices
protected_indices.add(i)
# Deletion logic
to_delete = []
for v in bm.verts:
if v.index in protected_indices:
continue
for v in bm.verts:
if v.index in protected_indices:
continue
# Sum hits of neighbors
neighbor_hit_sum = hit_values[v.index]
for edge in v.link_edges:
neighbor = edge.other_vert(v)
neighbor_hit_sum += hit_values[neighbor.index]
if neighbor_hit_sum >= THRESHOLD:
to_delete.append(v)
elif len(v.link_edges) == 1:
to_delete.append(v)
bmesh.ops.delete(bm, geom=to_delete, context='VERTS')
bm.to_mesh(target_body.data)
bm.free()
target_body.data.update()
# --- STEP C: ARMATURE & JOIN ---
master_arm = target_body.parent if (target_body.parent and target_body.parent.type == 'ARMATURE') else None
if master_arm: whitelist.add(master_arm)
# Sum hits of neighbors
neighbor_hit_sum = hit_values[v.index]
for edge in v.link_edges:
neighbor = edge.other_vert(v)
neighbor_hit_sum += hit_values[neighbor.index]
cloth_label, body_label = cloth.name, target_body.name
if neighbor_hit_sum >= threshold:
to_delete.append(v)
elif len(v.link_edges) == 1: # Loose vertices
to_delete.append(v)
# Perform deletion
bmesh.ops.delete(bm, geom=to_delete, context='VERTS')
bm.to_mesh(target_body.data)
bm.free()
target_body.data.update()
if cloth.parent and cloth.parent.type == 'ARMATURE':
old_arm = cloth.parent
cloth.matrix_world = cloth.matrix_world.copy()
cloth.parent = None
if old_arm not in whitelist: bpy.data.objects.remove(old_arm, do_unlink=True)
def process_clothing_pair(clothing_obj, target_obj, whitelist, is_clothing_copy=False, original_clothing_name=None):
"""Process a clothing-body pair
if master_arm:
cloth.parent = master_arm
for mod in cloth.modifiers:
if mod.type == 'ARMATURE': mod.object = master_arm
Args:
clothing_obj: The clothing object to process
target_obj: The target object to combine with (body or combined object)
whitelist: Set of objects to keep
is_clothing_copy: Whether clothing_obj is a copy (for layer 2 processing)
original_clothing_name: Original name of clothing if it's a copy (for layer 2 naming)
"""
# Create a copy of the target object
new_target = target_obj.copy()
new_target.data = target_obj.data.copy()
bpy.context.collection.objects.link(new_target)
bpy.ops.object.select_all(action='DESELECT')
cloth.select_set(True)
target_body.select_set(True)
bpy.context.view_layer.objects.active = target_body
bpy.ops.object.join()
target_body.name = f"{body_label}_{cloth_label}"
whitelist.add(target_body)
# Copy custom properties
for key in target_obj.keys():
new_target[key] = target_obj[key]
# 4. Final Cleanup
# Ensure the copy has the same transformations
new_target.matrix_world = target_obj.matrix_world.copy()
target_name = target_obj.name
# Determine the name to use for the clothing in the final combined object
if is_clothing_copy and original_clothing_name:
clothing_name_for_final = original_clothing_name
else:
clothing_name_for_final = clothing_obj.name
print(f"Processing: {clothing_name_for_final} -> {target_name} (using copy)")
# Step A: Raycast & adjust vertices
bvh_cloth = setup_bvh_and_matrices(clothing_obj)
hit_values = raycast_and_adjust_vertices(new_target, bvh_cloth)
# Step B: Remove hidden geometry
protect_and_remove_hidden_geometry(new_target, hit_values)
# Step C: Handle armature and join
master_arm = new_target.parent if (new_target.parent and new_target.parent.type == 'ARMATURE') else None
if master_arm:
whitelist.add(master_arm)
# Handle clothing armature (if it's not a copy for layer 2)
if not is_clothing_copy and clothing_obj.parent and clothing_obj.parent.type == 'ARMATURE':
old_arm = clothing_obj.parent
clothing_obj.matrix_world = clothing_obj.matrix_world.copy()
clothing_obj.parent = None
if old_arm not in whitelist:
bpy.data.objects.remove(old_arm, do_unlink=True)
# For layer 2 clothing copies, we don't need to handle armature separately
# as they'll inherit from the target
# Reparent to master armature if exists
if master_arm:
clothing_obj.parent = master_arm
for mod in clothing_obj.modifiers:
if mod.type == 'ARMATURE':
mod.object = master_arm
# Join clothing with target copy
bpy.ops.object.select_all(action='DESELECT')
clothing_obj.select_set(True)
new_target.select_set(True)
bpy.context.view_layer.objects.active = new_target
bpy.ops.object.join()
# Rename the combined object using the appropriate clothing name
new_target.name = f"{target_name}_{clothing_name_for_final}"
whitelist.add(new_target)
return new_target
def cleanup_unused_objects(whitelist):
"""Remove all objects not in whitelist"""
for obj in bpy.data.objects[:]:
if obj.type in {'MESH', 'ARMATURE'} and obj not in whitelist:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
def run_batch_combine():
"""Main function to run the batch combine process"""
try:
args = sys.argv[sys.argv.index("--") + 1:]
body_blend_path, clothes_blend_path, output_path = args[0], args[1], args[2]
except (ValueError, IndexError):
print("Usage: blender -b -P script.py -- <body.blend> <clothes.blend> <output.blend>")
return
# Start fresh
bpy.ops.wm.read_homefile(use_empty=True)
# Load objects from blend files
loaded_objects = load_blend_files(clothes_blend_path, body_blend_path)
# Categorize loaded objects
all_objs = bpy.data.objects
# Separate body objects (no ref_layer property)
body_objects = [o for o in all_objs if o.type == 'MESH' and "ref_layer" not in o]
# Separate clothing by layer
clothing_layer1 = [o for o in all_objs if o.type == 'MESH' and "ref_layer" in o and o["ref_layer"] == 1]
clothing_layer2 = [o for o in all_objs if o.type == 'MESH' and "ref_layer" in o and o["ref_layer"] == 2]
print(f"Found {len(body_objects)} body objects")
print(f"Found {len(clothing_layer1)} layer 1 clothing objects")
print(f"Found {len(clothing_layer2)} layer 2 clothing objects")
# Create dictionary of body objects for quick lookup
body_objects_dict = {obj.name: obj for obj in body_objects}
# Track objects to keep
whitelist = set()
# List to store combined objects from layer 1
combined_objects = []
# PROCESS LAYER 1: Combine with original body parts
print("\n=== PROCESSING LAYER 1 CLOTHING ===")
for clothing_obj in clothing_layer1:
if "ref_part" not in clothing_obj:
print(f"Warning: Layer 1 clothing '{clothing_obj.name}' missing ref_part property, skipping")
continue
target_name = clothing_obj["ref_part"]
original_body = body_objects_dict.get(target_name)
if not original_body or original_body.type != 'MESH':
print(f"Warning: Target body '{target_name}' not found for clothing '{clothing_obj.name}', skipping")
continue
# Process the pair
result = process_clothing_pair(clothing_obj, original_body, whitelist)
if result:
combined_objects.append(result)
print(f"Added '{result.name}' to combined objects list")
print(f"Layer 1 complete. Created {len(combined_objects)} combined objects")
# PROCESS LAYER 2 (First Pass): Combine with layer 1 results
print("\n=== PROCESSING LAYER 2 CLOTHING (First Pass - Over Layer 1) ===")
layer2_results_over_l1 = []
for clothing_obj in clothing_layer2:
print(f"\nProcessing layer 2 clothing: {clothing_obj.name}")
# Store the original clothing name for later use
original_clothing_name = clothing_obj.name
# For each combined object from layer 1
for combined_obj in combined_objects:
# Create a copy of the layer 2 clothing
clothing_copy = clothing_obj.copy()
clothing_copy.data = clothing_obj.data.copy()
bpy.context.collection.objects.link(clothing_copy)
# Copy custom properties
for key in clothing_obj.keys():
clothing_copy[key] = clothing_obj[key]
# Set the ref_part to point to the combined object
clothing_copy["ref_part"] = combined_obj.name
print(f" Combining with layer 1 result: {combined_obj.name}")
# Process the pair
result = process_clothing_pair(clothing_copy, combined_obj, whitelist,
is_clothing_copy=True,
original_clothing_name=original_clothing_name)
if result:
layer2_results_over_l1.append(result)
print(f" Created: {result.name}")
print(f"Layer 2 first pass complete. Created {len(layer2_results_over_l1)} combined objects")
# PROCESS LAYER 2 (Second Pass): Combine directly with body parts (like layer 1)
print("\n=== PROCESSING LAYER 2 CLOTHING (Second Pass - Direct to Body) ===")
layer2_results_direct = []
for clothing_obj in clothing_layer2:
if "ref_part" not in clothing_obj:
print(f"Warning: Layer 2 clothing '{clothing_obj.name}' missing ref_part property, skipping")
continue
target_name = clothing_obj["ref_part"]
original_body = body_objects_dict.get(target_name)
if not original_body or original_body.type != 'MESH':
print(f"Warning: Target body '{target_name}' not found for clothing '{clothing_obj.name}', skipping")
continue
print(f"\nProcessing layer 2 clothing directly with body: {clothing_obj.name} -> {target_name}")
# Process directly with body part (no copy needed for the clothing itself)
result = process_clothing_pair(clothing_obj, original_body, whitelist, is_clothing_copy=False)
if result:
layer2_results_direct.append(result)
print(f" Created: {result.name}")
print(f"Layer 2 second pass complete. Created {len(layer2_results_direct)} combined objects")
# Add all results to combined objects list
all_results = combined_objects + layer2_results_over_l1 + layer2_results_direct
print(f"\n=== SUMMARY ===")
print(f"Layer 1 results: {len(combined_objects)}")
print(f"Layer 2 over layer 1 results: {len(layer2_results_over_l1)}")
print(f"Layer 2 direct to body results: {len(layer2_results_direct)}")
print(f"Total combined objects: {len(all_results)}")
# Final cleanup - keep all combined objects and their armatures
for obj in all_results:
whitelist.add(obj)
cleanup_unused_objects(whitelist)
# Save the result
bpy.ops.wm.save_as_mainfile(filepath=output_path)
print(f"\nSaved to: {output_path}")
if __name__ == "__main__":
run_batch_combine()