Removed mBodyNode from CharacterBase
This commit is contained in:
@@ -229,7 +229,17 @@ function(weight_clothes SRC)
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
)
|
||||
endfunction()
|
||||
weight_clothes(${CMAKE_CURRENT_SOURCE_DIR}/clothes-male-bottom.blend)
|
||||
function(transfer_shape_keys SRC CLOTH DST)
|
||||
add_custom_command(
|
||||
OUTPUT ${DST}
|
||||
COMMAND ${BLENDER} -b -Y -P ${CMAKE_CURRENT_SOURCE_DIR}/transfer_shape_keys.py -- ${SRC} ${CLOTH} ${DST}
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${DST}
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
DEPENDS ${SRC} ${CLOTH}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/transfer_shape_keys.py
|
||||
)
|
||||
endfunction()
|
||||
|
||||
# Function to combine clothes into a blend file
|
||||
# Parameters:
|
||||
@@ -383,15 +393,21 @@ function(add_clothes_pipeline INPUT_BLEND WEIGHTED_BLEND FINAL_OUTPUT_BLEND)
|
||||
else()
|
||||
set(COMBINED_BLEND "${PIPELINE_INTERMEDIATE_DIR}/${TARGET_BASE}_combined.blend")
|
||||
endif()
|
||||
set(SHAPED_BLEND "${PIPELINE_INTERMEDIATE_DIR}/${TARGET_BASE}_shaped.blend")
|
||||
|
||||
|
||||
# Step 1: Combine clothes
|
||||
add_clothes_combination(
|
||||
"${INPUT_BLEND}"
|
||||
"${WEIGHTED_BLEND}"
|
||||
"${WEIGHTED_BLEND}"
|
||||
"${COMBINED_BLEND}"
|
||||
OUTPUT_DIR "${PIPELINE_INTERMEDIATE_DIR}"
|
||||
)
|
||||
|
||||
transfer_shape_keys(${INPUT_BLEND}
|
||||
${COMBINED_BLEND}
|
||||
${SHAPED_BLEND}
|
||||
)
|
||||
# Step 2: Consolidate
|
||||
add_blend_consolidation(
|
||||
"${INPUT_BLEND}"
|
||||
@@ -401,11 +417,13 @@ function(add_clothes_pipeline INPUT_BLEND WEIGHTED_BLEND FINAL_OUTPUT_BLEND)
|
||||
|
||||
# Create a custom target to drive the whole pipeline
|
||||
add_custom_target(${TARGET_BASE}_pipeline ALL
|
||||
DEPENDS ${FINAL_OUTPUT_BLEND}
|
||||
DEPENDS ${FINAL_OUTPUT_BLEND} ${SHAPED_BLEND}
|
||||
COMMENT "Running complete clothes pipeline for ${TARGET_BASE}"
|
||||
)
|
||||
endfunction()
|
||||
|
||||
weight_clothes(${CMAKE_CURRENT_SOURCE_DIR}/clothes-male-bottom.blend)
|
||||
|
||||
add_clothes_pipeline(
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/edited-normal-male.blend" # INPUT_BLEND
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/clothes/clothes-male-bottom_weighted.blend" # WEIGHTED_BLEND
|
||||
|
||||
Binary file not shown.
Binary file not shown.
970
assets/blender/characters/transfer_shape_keys.py
Normal file
970
assets/blender/characters/transfer_shape_keys.py
Normal file
@@ -0,0 +1,970 @@
|
||||
"""
|
||||
Blender 3.6.20 Script: Transfer Shape Keys with Boundary Velocity Limiting
|
||||
Usage: blender -b --python transfer_shape_keys.py -- <source_file> <target_file> <output_file>
|
||||
"""
|
||||
|
||||
import bpy
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import mathutils
|
||||
from mathutils.bvhtree import BVHTree
|
||||
import numpy as np
|
||||
from collections import defaultdict
|
||||
|
||||
def parse_args():
|
||||
"""Parse command line arguments after '--'"""
|
||||
if '--' in sys.argv:
|
||||
args_start = sys.argv.index('--') + 1
|
||||
args = sys.argv[args_start:]
|
||||
|
||||
if len(args) >= 3:
|
||||
return args[0], args[1], args[2]
|
||||
else:
|
||||
print("Error: Please provide source, target, and output .blend files")
|
||||
print("Usage: blender -b --python transfer_shape_keys.py -- <source_file> <target_file> <output_file>")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Error: No arguments provided")
|
||||
print("Usage: blender -b --python transfer_shape_keys.py -- <source_file> <target_file> <output_file>")
|
||||
sys.exit(1)
|
||||
|
||||
def load_source_data(source_path):
|
||||
"""Load source file and extract shape key information"""
|
||||
print(f"\nLoading source file: {source_path}")
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
print(f"Error: Source file not found: {source_path}")
|
||||
sys.exit(1)
|
||||
|
||||
current_file = bpy.data.filepath if bpy.data.filepath else None
|
||||
|
||||
bpy.ops.wm.open_mainfile(filepath=source_path)
|
||||
|
||||
body_object = None
|
||||
for obj in bpy.data.objects:
|
||||
if obj.name == "Body_shapes" and obj.type == 'MESH':
|
||||
body_object = obj
|
||||
break
|
||||
|
||||
if not body_object:
|
||||
for obj in bpy.data.objects:
|
||||
if obj.name == "Body" and obj.type == 'MESH':
|
||||
body_object = obj
|
||||
break
|
||||
if not body_object:
|
||||
print("Error: Could not find mesh object named 'Body' in source file")
|
||||
sys.exit(1)
|
||||
|
||||
if not body_object.data.shape_keys:
|
||||
print("Error: Body object has no shape keys")
|
||||
sys.exit(1)
|
||||
|
||||
shape_key_data = {
|
||||
'names': [],
|
||||
'vertex_positions': {},
|
||||
'polygons': [],
|
||||
'is_relative': body_object.data.shape_keys.use_relative
|
||||
}
|
||||
|
||||
# Store polygon data for BVH
|
||||
mesh = body_object.data
|
||||
for poly in mesh.polygons:
|
||||
shape_key_data['polygons'].append([v for v in poly.vertices])
|
||||
|
||||
source_shape_keys = body_object.data.shape_keys.key_blocks
|
||||
print(f"Found Body object with {len(source_shape_keys)} shape keys")
|
||||
|
||||
for sk in source_shape_keys:
|
||||
shape_key_data['names'].append(sk.name)
|
||||
print(f" - {sk.name}")
|
||||
|
||||
vertex_positions = []
|
||||
for v in sk.data:
|
||||
vertex_positions.append((v.co.x, v.co.y, v.co.z))
|
||||
|
||||
shape_key_data['vertex_positions'][sk.name] = vertex_positions
|
||||
|
||||
if current_file and os.path.exists(current_file):
|
||||
bpy.ops.wm.open_mainfile(filepath=current_file)
|
||||
elif current_file:
|
||||
bpy.ops.wm.read_homefile()
|
||||
|
||||
return shape_key_data
|
||||
|
||||
def load_target_file(target_path):
|
||||
"""Load the target .blend file and find objects with custom properties"""
|
||||
print(f"\nLoading target file: {target_path}")
|
||||
|
||||
if not os.path.exists(target_path):
|
||||
print(f"Error: Target file not found: {target_path}")
|
||||
sys.exit(1)
|
||||
|
||||
temp_dir = os.path.join(os.path.dirname(target_path), "temp_blend_files")
|
||||
os.makedirs(temp_dir, exist_ok=True)
|
||||
|
||||
temp_target = os.path.join(temp_dir, os.path.basename(target_path))
|
||||
shutil.copy2(target_path, temp_target)
|
||||
|
||||
bpy.ops.wm.open_mainfile(filepath=temp_target)
|
||||
|
||||
target_objects = []
|
||||
for obj in bpy.data.objects:
|
||||
if obj.type == 'MESH' and obj.data:
|
||||
if all(prop in obj for prop in ['age', 'sex', 'slot']):
|
||||
target_objects.append(obj)
|
||||
print(f"Found target object: {obj.name}")
|
||||
print(f" - age: {obj['age']}")
|
||||
print(f" - sex: {obj['sex']}")
|
||||
print(f" - slot: {obj['slot']}")
|
||||
print(f" - vertices: {len(obj.data.vertices)}")
|
||||
|
||||
return target_objects, temp_target
|
||||
|
||||
def delete_existing_shape_keys(target_obj):
|
||||
"""Delete all existing shape keys from target object"""
|
||||
if not target_obj.data.shape_keys:
|
||||
return False
|
||||
|
||||
num_keys = len(target_obj.data.shape_keys.key_blocks)
|
||||
print(f" Deleting {num_keys} existing shape keys...")
|
||||
|
||||
bpy.context.view_layer.objects.active = target_obj
|
||||
target_obj.select_set(True)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
while target_obj.data.shape_keys:
|
||||
target_obj.active_shape_key_index = 0
|
||||
bpy.ops.object.shape_key_remove()
|
||||
|
||||
target_obj.select_set(False)
|
||||
return True
|
||||
|
||||
def ensure_shape_keys_structure(shape_key_names, target_obj):
|
||||
"""Create shape key structure on target object"""
|
||||
delete_existing_shape_keys(target_obj)
|
||||
|
||||
bpy.context.view_layer.objects.active = target_obj
|
||||
target_obj.select_set(True)
|
||||
|
||||
# Create Basis with current mesh positions
|
||||
target_obj.shape_key_add(name='Basis', from_mix=True)
|
||||
print(f" Created Basis shape key")
|
||||
|
||||
# Create other shape keys
|
||||
for sk_name in shape_key_names:
|
||||
if sk_name != 'Basis':
|
||||
target_obj.shape_key_add(name=sk_name, from_mix=False)
|
||||
print(f" Created shape key: {sk_name}")
|
||||
|
||||
target_obj.data.shape_keys.use_relative = True
|
||||
target_obj.select_set(False)
|
||||
|
||||
def create_bvh_for_source(source_data, shape_key_name='Basis'):
|
||||
"""Create BVH tree for source mesh in specified pose"""
|
||||
|
||||
positions = source_data['vertex_positions'][shape_key_name]
|
||||
verts = [mathutils.Vector(v) for v in positions]
|
||||
|
||||
# Create BVH tree
|
||||
bvh = BVHTree.FromPolygons(verts, source_data['polygons'], all_triangles=False)
|
||||
|
||||
return bvh, verts
|
||||
|
||||
def compute_signed_distance_and_direction(bvh, point, reference_normal=None):
|
||||
"""Compute signed distance and direction to surface"""
|
||||
|
||||
# Find closest point on surface
|
||||
location, normal, index, distance = bvh.find_nearest(point)
|
||||
|
||||
if location is None:
|
||||
return None, None, None, None
|
||||
|
||||
# Determine sign using reference normal
|
||||
if reference_normal is not None:
|
||||
to_surface = location - point
|
||||
if to_surface.length > 0:
|
||||
dot = to_surface.normalized().dot(reference_normal)
|
||||
signed_distance = distance if dot > 0 else -distance
|
||||
else:
|
||||
signed_distance = 0
|
||||
else:
|
||||
to_surface = point - location
|
||||
if to_surface.length > 0 and normal.length > 0:
|
||||
dot = to_surface.normalized().dot(normal)
|
||||
signed_distance = distance if dot > 0 else -distance
|
||||
else:
|
||||
signed_distance = distance
|
||||
|
||||
return location, normal, index, signed_distance
|
||||
|
||||
def detect_boundary_vertices(target_obj, mapping, threshold=0.3):
|
||||
"""Detect vertices that lie on the boundary between inner and outer surfaces"""
|
||||
|
||||
print(f" Detecting boundary vertices...")
|
||||
|
||||
num_verts = len(mapping['target_verts'])
|
||||
adjacency = mapping['adjacency']
|
||||
boundary_vertices = set()
|
||||
|
||||
# For each vertex, check if its neighbors have different side flags
|
||||
for i in range(num_verts):
|
||||
if i >= len(mapping['side_flags']):
|
||||
continue
|
||||
|
||||
side_i = mapping['side_flags'][i]
|
||||
if side_i == 0: # On surface, definitely boundary
|
||||
boundary_vertices.add(i)
|
||||
continue
|
||||
|
||||
neighbors = adjacency[i]
|
||||
opposite_side_count = 0
|
||||
total_neighbors = 0
|
||||
|
||||
for n in neighbors:
|
||||
if n < len(mapping['side_flags']):
|
||||
side_n = mapping['side_flags'][n]
|
||||
if side_n != 0 and side_n != side_i:
|
||||
opposite_side_count += 1
|
||||
total_neighbors += 1
|
||||
|
||||
# If significant portion of neighbors are on opposite side, this is a boundary
|
||||
if total_neighbors > 0 and opposite_side_count / total_neighbors > threshold:
|
||||
boundary_vertices.add(i)
|
||||
|
||||
# Also check vertices that are close to the surface (small signed distance)
|
||||
for i in range(num_verts):
|
||||
if i < len(mapping['signed_distances']):
|
||||
if abs(mapping['signed_distances'][i]) < 0.05: # Very close to surface
|
||||
boundary_vertices.add(i)
|
||||
|
||||
print(f" Found {len(boundary_vertices)} boundary vertices")
|
||||
return boundary_vertices
|
||||
|
||||
def compute_robust_surface_mapping(target_obj, source_data):
|
||||
"""Create robust surface mapping with signed distances"""
|
||||
|
||||
print(f" Computing robust surface mapping...")
|
||||
|
||||
# Get target vertices and normals
|
||||
target_verts = [v.co.copy() for v in target_obj.data.vertices]
|
||||
target_normals = [v.normal.copy() for v in target_obj.data.vertices]
|
||||
num_verts = len(target_verts)
|
||||
|
||||
# Create BVH for source basis
|
||||
bvh_basis, basis_verts = create_bvh_for_source(source_data, 'Basis')
|
||||
|
||||
# Build adjacency for later smoothing
|
||||
adjacency = defaultdict(set)
|
||||
mesh = target_obj.data
|
||||
for edge in mesh.edges:
|
||||
v1, v2 = edge.vertices
|
||||
adjacency[v1].add(v2)
|
||||
adjacency[v2].add(v1)
|
||||
|
||||
mapping = {
|
||||
'target_verts': target_verts,
|
||||
'source_indices': [],
|
||||
'surface_points': [],
|
||||
'signed_distances': [],
|
||||
'direction_vectors': [],
|
||||
'face_indices': [],
|
||||
'barycentric_coords': [],
|
||||
'side_flags': [],
|
||||
'confidence': [],
|
||||
'adjacency': adjacency,
|
||||
'deformation_magnitude': [0.0] * num_verts,
|
||||
'boundary_vertices': set()
|
||||
}
|
||||
|
||||
# First pass: find closest points and determine side
|
||||
for i, (target_pos, target_normal) in enumerate(zip(target_verts, target_normals)):
|
||||
location, normal, index, signed_dist = compute_signed_distance_and_direction(
|
||||
bvh_basis, target_pos, target_normal
|
||||
)
|
||||
|
||||
if location is None:
|
||||
# Fallback to nearest vertex
|
||||
if i % 100 == 0:
|
||||
print(f" Warning: Vertex {i} using nearest vertex fallback")
|
||||
min_dist = float('inf')
|
||||
nearest_idx = 0
|
||||
for j, src_pos in enumerate(basis_verts):
|
||||
dist = (target_pos - src_pos).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
nearest_idx = j
|
||||
|
||||
mapping['source_indices'].append([nearest_idx])
|
||||
mapping['surface_points'].append(basis_verts[nearest_idx])
|
||||
mapping['signed_distances'].append(min_dist)
|
||||
mapping['direction_vectors'].append(target_pos - basis_verts[nearest_idx])
|
||||
mapping['face_indices'].append(-1)
|
||||
mapping['barycentric_coords'].append([1.0])
|
||||
mapping['side_flags'].append(1)
|
||||
mapping['confidence'].append(0.5)
|
||||
else:
|
||||
mapping['surface_points'].append(location)
|
||||
mapping['signed_distances'].append(signed_dist)
|
||||
|
||||
abs_dist = abs(signed_dist)
|
||||
if abs_dist < 0.001:
|
||||
side_flag = 0
|
||||
confidence = 1.0
|
||||
else:
|
||||
ray_hits = 0
|
||||
for ray_dir in [target_normal, -target_normal, mathutils.Vector((1,0,0)), mathutils.Vector((-1,0,0))]:
|
||||
hit, _, _, _ = bvh_basis.ray_cast(target_pos + ray_dir * 0.1, -ray_dir)
|
||||
if hit is not None:
|
||||
ray_hits += 1
|
||||
|
||||
side_flag = 1 if signed_dist > 0 else -1
|
||||
confidence = min(1.0, ray_hits / 4.0 + 0.5)
|
||||
|
||||
mapping['side_flags'].append(side_flag)
|
||||
mapping['confidence'].append(confidence)
|
||||
|
||||
if index is not None and index < len(source_data['polygons']):
|
||||
face = source_data['polygons'][index]
|
||||
mapping['face_indices'].append(index)
|
||||
|
||||
if len(face) == 3:
|
||||
face_verts = [basis_verts[idx] for idx in face]
|
||||
try:
|
||||
coords = mathutils.geometry.barycentric_transform(
|
||||
location, face_verts[0], face_verts[1], face_verts[2]
|
||||
)
|
||||
mapping['barycentric_coords'].append(coords)
|
||||
|
||||
min_dist = float('inf')
|
||||
nearest_idx = face[0]
|
||||
for idx in face:
|
||||
dist = (location - basis_verts[idx]).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
nearest_idx = idx
|
||||
mapping['source_indices'].append([nearest_idx])
|
||||
except:
|
||||
min_dist = float('inf')
|
||||
nearest_idx = face[0]
|
||||
for idx in face:
|
||||
dist = (location - basis_verts[idx]).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
nearest_idx = idx
|
||||
mapping['source_indices'].append([nearest_idx])
|
||||
mapping['barycentric_coords'].append([1.0])
|
||||
else:
|
||||
min_dist = float('inf')
|
||||
nearest_idx = face[0]
|
||||
for idx in face:
|
||||
dist = (location - basis_verts[idx]).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
nearest_idx = idx
|
||||
mapping['source_indices'].append([nearest_idx])
|
||||
mapping['barycentric_coords'].append([1.0])
|
||||
else:
|
||||
mapping['face_indices'].append(-1)
|
||||
mapping['source_indices'].append([0])
|
||||
mapping['barycentric_coords'].append([1.0])
|
||||
|
||||
direction = target_pos - location
|
||||
mapping['direction_vectors'].append(direction)
|
||||
|
||||
# Detect boundary vertices
|
||||
mapping['boundary_vertices'] = detect_boundary_vertices(target_obj, mapping)
|
||||
|
||||
print(f" Mapping complete. Side distribution: "
|
||||
f"Outside: {mapping['side_flags'].count(1)}, "
|
||||
f"Inside: {mapping['side_flags'].count(-1)}, "
|
||||
f"On surface: {mapping['side_flags'].count(0)}")
|
||||
|
||||
return mapping
|
||||
|
||||
def interpolate_source_position_safe(source_data, sk_name, surface_point, face_idx, bary_coords, source_indices):
|
||||
"""Interpolate source position with side preservation"""
|
||||
|
||||
if sk_name == 'Basis':
|
||||
return surface_point
|
||||
|
||||
source_positions = source_data['vertex_positions'][sk_name]
|
||||
|
||||
if face_idx >= 0 and face_idx < len(source_data['polygons']):
|
||||
face = source_data['polygons'][face_idx]
|
||||
|
||||
if len(face) == 3 and len(bary_coords) == 3:
|
||||
pos = mathutils.Vector((0, 0, 0))
|
||||
for j, vert_idx in enumerate(face):
|
||||
if j < len(bary_coords) and vert_idx < len(source_positions):
|
||||
pos += bary_coords[j] * mathutils.Vector(source_positions[vert_idx])
|
||||
return pos
|
||||
|
||||
if source_indices and len(source_indices) > 0:
|
||||
idx = source_indices[0]
|
||||
if idx < len(source_positions):
|
||||
return mathutils.Vector(source_positions[idx])
|
||||
|
||||
return surface_point
|
||||
|
||||
def compute_deformation_magnitudes(source_data, mapping):
|
||||
"""Compute how much each vertex deforms in the source"""
|
||||
|
||||
print(f" Computing deformation magnitudes...")
|
||||
|
||||
basis_positions = [mathutils.Vector(v) for v in source_data['vertex_positions']['Basis']]
|
||||
num_verts = len(mapping['target_verts'])
|
||||
magnitudes = [0.0] * num_verts
|
||||
|
||||
for sk_name in source_data['names']:
|
||||
if sk_name == 'Basis':
|
||||
continue
|
||||
|
||||
sk_positions = [mathutils.Vector(v) for v in source_data['vertex_positions'][sk_name]]
|
||||
|
||||
for i in range(num_verts):
|
||||
if i < len(mapping['source_indices']):
|
||||
src_indices = mapping['source_indices'][i]
|
||||
if src_indices and len(src_indices) > 0:
|
||||
src_idx = src_indices[0]
|
||||
if src_idx < len(basis_positions) and src_idx < len(sk_positions):
|
||||
deformation = (sk_positions[src_idx] - basis_positions[src_idx]).length
|
||||
magnitudes[i] = max(magnitudes[i], deformation)
|
||||
|
||||
max_mag = max(magnitudes) if magnitudes else 1.0
|
||||
if max_mag > 0:
|
||||
magnitudes = [m / max_mag for m in magnitudes]
|
||||
|
||||
high_count = sum(1 for m in magnitudes if m > 0.7)
|
||||
print(f" High deformation vertices (>0.7): {high_count}/{num_verts}")
|
||||
|
||||
return magnitudes
|
||||
|
||||
def enforce_side_constraint(pos, surface_point, reference_normal, target_side, confidence, is_boundary=False):
|
||||
"""Enforce that vertex stays on correct side of surface"""
|
||||
|
||||
to_surface = surface_point - pos
|
||||
if to_surface.length < 0.001:
|
||||
return pos
|
||||
|
||||
current_side = 1 if to_surface.dot(reference_normal) < 0 else -1
|
||||
|
||||
# For boundary vertices, use very gentle enforcement
|
||||
if is_boundary:
|
||||
if current_side != target_side and target_side != 0:
|
||||
# Just pull back slightly toward surface
|
||||
proj_factor = to_surface.dot(reference_normal) / reference_normal.length_squared
|
||||
proj_point = pos + reference_normal * proj_factor
|
||||
|
||||
# Very small offset to stay near surface
|
||||
offset = abs((surface_point - pos).length) * 0.2
|
||||
if target_side > 0:
|
||||
return proj_point - reference_normal * offset
|
||||
else:
|
||||
return proj_point + reference_normal * offset
|
||||
return pos
|
||||
|
||||
# Regular vertices - stronger enforcement
|
||||
if current_side != target_side and target_side != 0:
|
||||
proj_factor = to_surface.dot(reference_normal) / reference_normal.length_squared
|
||||
proj_point = pos + reference_normal * proj_factor
|
||||
|
||||
offset = abs((surface_point - pos).length) * 0.95
|
||||
if target_side > 0:
|
||||
corrected_pos = proj_point - reference_normal * offset
|
||||
else:
|
||||
corrected_pos = proj_point + reference_normal * offset
|
||||
|
||||
return corrected_pos
|
||||
|
||||
return pos
|
||||
|
||||
def limit_boundary_velocity(target_idx, target_pos, deformed_surface, t, mapping):
|
||||
"""Limit how fast boundary vertices can move"""
|
||||
|
||||
if target_idx not in mapping['boundary_vertices']:
|
||||
return deformed_surface
|
||||
|
||||
# Get the full deformation target
|
||||
full_target = deformed_surface
|
||||
|
||||
# Calculate how far this vertex should move at this t value
|
||||
# Use a slower progression for boundary vertices
|
||||
safe_t = t * 0.7 # Boundary vertices only move 70% as fast
|
||||
|
||||
# Interpolate between start and target with limited speed
|
||||
limited_pos = (1 - safe_t) * target_pos + safe_t * full_target
|
||||
|
||||
return limited_pos
|
||||
|
||||
def adaptive_damping(pos, target_pos, deformed_surface, magnitude, t, threshold=0.55, is_boundary=False):
|
||||
"""Apply adaptive damping based on deformation magnitude and t value"""
|
||||
|
||||
if t <= threshold:
|
||||
return pos
|
||||
|
||||
# For boundary vertices, use much less damping to avoid pinching
|
||||
if is_boundary:
|
||||
return pos
|
||||
|
||||
excess = (t - threshold) / (1.0 - threshold)
|
||||
damping = min(1.0, magnitude * 0.8 + 0.2)
|
||||
damped_weight = excess * damping * 0.5
|
||||
|
||||
safe_pos = (target_pos + deformed_surface) * 0.5
|
||||
damped_pos = (1 - damped_weight) * pos + damped_weight * safe_pos
|
||||
|
||||
return damped_pos
|
||||
|
||||
def compute_side_preserving_position(target_idx, sk_name, mapping, source_data, t):
|
||||
"""Compute position that preserves side and signed distance"""
|
||||
|
||||
if target_idx >= len(mapping['target_verts']):
|
||||
return mathutils.Vector((0, 0, 0))
|
||||
|
||||
target_pos = mapping['target_verts'][target_idx]
|
||||
|
||||
if target_idx >= len(mapping['surface_points']):
|
||||
return target_pos
|
||||
|
||||
# Check if this is a boundary vertex
|
||||
is_boundary = target_idx in mapping.get('boundary_vertices', set())
|
||||
|
||||
surface_point = mapping['surface_points'][target_idx]
|
||||
signed_dist = mapping['signed_distances'][target_idx] if target_idx < len(mapping['signed_distances']) else 0
|
||||
side_flag = mapping['side_flags'][target_idx] if target_idx < len(mapping['side_flags']) else 1
|
||||
confidence = mapping['confidence'][target_idx] if target_idx < len(mapping['confidence']) else 0.5
|
||||
|
||||
face_idx = mapping['face_indices'][target_idx] if target_idx < len(mapping['face_indices']) else -1
|
||||
bary_coords = mapping['barycentric_coords'][target_idx] if target_idx < len(mapping['barycentric_coords']) else [1.0]
|
||||
source_indices = mapping['source_indices'][target_idx] if target_idx < len(mapping['source_indices']) else [0]
|
||||
|
||||
deformed_surface = interpolate_source_position_safe(
|
||||
source_data, sk_name, surface_point, face_idx, bary_coords, source_indices
|
||||
)
|
||||
|
||||
# For boundary vertices, limit their velocity
|
||||
if is_boundary:
|
||||
deformed_surface = limit_boundary_velocity(target_idx, target_pos, deformed_surface, t, mapping)
|
||||
|
||||
basis_surface = surface_point
|
||||
current_surface = (1 - t) * basis_surface + t * deformed_surface
|
||||
|
||||
if target_idx < len(mapping['direction_vectors']):
|
||||
reference_normal = mapping['direction_vectors'][target_idx].normalized()
|
||||
else:
|
||||
reference_normal = mathutils.Vector((0, 0, 1))
|
||||
|
||||
if reference_normal.length < 0.1:
|
||||
reference_normal = mathutils.Vector((0, 0, 1))
|
||||
|
||||
abs_dist = abs(signed_dist)
|
||||
|
||||
# For boundary vertices, use a much smaller offset to keep them near the surface
|
||||
if is_boundary:
|
||||
# Boundary vertices should stay very close to the surface
|
||||
# Use a fraction of their original distance
|
||||
abs_dist = abs_dist * 0.2 # Only 20% of original offset
|
||||
else:
|
||||
# Regular vertices use full offset but with confidence weighting
|
||||
abs_dist = abs_dist * confidence
|
||||
|
||||
if side_flag > 0:
|
||||
base_pos = current_surface + reference_normal * abs_dist
|
||||
elif side_flag < 0:
|
||||
base_pos = current_surface - reference_normal * abs_dist
|
||||
else:
|
||||
base_pos = current_surface
|
||||
|
||||
direct_mapped = (1 - t) * target_pos + t * deformed_surface
|
||||
|
||||
# For boundary vertices, blend more heavily with direct mapping
|
||||
if is_boundary:
|
||||
blend_weight = 0.1 # Only 10% side-preserving for boundaries
|
||||
else:
|
||||
blend_weight = confidence * (1 - t * 0.3)
|
||||
|
||||
blended_pos = (1 - blend_weight) * direct_mapped + blend_weight * base_pos
|
||||
|
||||
if side_flag != 0:
|
||||
final_pos = enforce_side_constraint(
|
||||
blended_pos, current_surface, reference_normal, side_flag, confidence, is_boundary
|
||||
)
|
||||
else:
|
||||
final_pos = blended_pos
|
||||
|
||||
if 'deformation_magnitude' in mapping:
|
||||
if target_idx < len(mapping['deformation_magnitude']):
|
||||
magnitude = mapping['deformation_magnitude'][target_idx]
|
||||
final_pos = adaptive_damping(final_pos, target_pos, deformed_surface, magnitude, t, is_boundary=is_boundary)
|
||||
|
||||
return final_pos
|
||||
|
||||
def smooth_boundary_areas(target_obj, sk_name, mapping, source_data):
|
||||
"""Specifically smooth boundary vertices to prevent them from popping out"""
|
||||
|
||||
print(f" Smoothing boundary areas for {sk_name}...")
|
||||
|
||||
sk = target_obj.data.shape_keys.key_blocks[sk_name]
|
||||
current_positions = [v.co.copy() for v in sk.data]
|
||||
num_verts = len(current_positions)
|
||||
|
||||
boundary_vertices = mapping.get('boundary_vertices', set())
|
||||
if not boundary_vertices:
|
||||
return
|
||||
|
||||
adjacency = mapping['adjacency']
|
||||
|
||||
# Multiple smoothing passes focused on boundaries
|
||||
for iteration in range(3): # More iterations for boundaries
|
||||
new_positions = current_positions.copy()
|
||||
|
||||
for i in boundary_vertices:
|
||||
if i >= num_verts:
|
||||
continue
|
||||
|
||||
neighbors = adjacency[i]
|
||||
if not neighbors:
|
||||
continue
|
||||
|
||||
# Average with neighbors, weighting by side similarity
|
||||
weighted_sum = mathutils.Vector((0, 0, 0))
|
||||
total_weight = 0
|
||||
|
||||
for n in neighbors:
|
||||
if n < num_verts:
|
||||
# Higher weight for neighbors on same side
|
||||
if n < len(mapping['side_flags']) and i < len(mapping['side_flags']):
|
||||
if mapping['side_flags'][n] == mapping['side_flags'][i]:
|
||||
weight = 2.0
|
||||
else:
|
||||
weight = 1.0
|
||||
else:
|
||||
weight = 1.0
|
||||
|
||||
weighted_sum += current_positions[n] * weight
|
||||
total_weight += weight
|
||||
|
||||
if total_weight > 0:
|
||||
avg_pos = weighted_sum / total_weight
|
||||
|
||||
# Progressive smoothing
|
||||
blend = 0.2 + iteration * 0.1
|
||||
new_positions[i] = (1 - blend) * current_positions[i] + blend * avg_pos
|
||||
|
||||
current_positions = new_positions
|
||||
|
||||
# Apply smoothed positions
|
||||
for i, pos in enumerate(current_positions):
|
||||
if i < len(sk.data):
|
||||
sk.data[i].co = pos
|
||||
|
||||
sk.data.update()
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
print(f" Boundary smoothing complete")
|
||||
|
||||
def smooth_penetration_areas(target_obj, sk_name, mapping, source_data, threshold_t=0.55):
|
||||
"""Smooth areas where penetration occurs at high t values"""
|
||||
|
||||
print(f" Smoothing penetration areas for {sk_name}...")
|
||||
|
||||
sk = target_obj.data.shape_keys.key_blocks[sk_name]
|
||||
current_positions = [v.co.copy() for v in sk.data]
|
||||
num_verts = len(current_positions)
|
||||
|
||||
boundary_vertices = mapping.get('boundary_vertices', set())
|
||||
|
||||
# Detect penetration at t=1.0
|
||||
problem_vertices = set()
|
||||
bvh_deformed, _ = create_bvh_for_source(source_data, sk_name)
|
||||
|
||||
for i, pos in enumerate(current_positions):
|
||||
if i < len(mapping['surface_points']) and i < len(mapping['side_flags']):
|
||||
surface_point = mapping['surface_points'][i]
|
||||
target_side = mapping['side_flags'][i]
|
||||
|
||||
if target_side != 0:
|
||||
location, _, _, _ = bvh_deformed.find_nearest(pos)
|
||||
if location and i < len(mapping['direction_vectors']):
|
||||
to_surface = pos - location
|
||||
current_side = 1 if to_surface.dot(mapping['direction_vectors'][i]) > 0 else -1
|
||||
|
||||
if current_side != target_side:
|
||||
problem_vertices.add(i)
|
||||
|
||||
if not problem_vertices:
|
||||
print(f" No penetration detected")
|
||||
return
|
||||
|
||||
print(f" Found {len(problem_vertices)} penetrating vertices")
|
||||
|
||||
# Separate boundary and interior problem vertices
|
||||
boundary_problems = problem_vertices.intersection(boundary_vertices)
|
||||
interior_problems = problem_vertices - boundary_vertices
|
||||
|
||||
if boundary_problems:
|
||||
print(f" {len(boundary_problems)} are boundary vertices - these need special care")
|
||||
|
||||
# For boundary vertices, use very gentle smoothing
|
||||
adjacency = mapping['adjacency']
|
||||
for iteration in range(2):
|
||||
new_positions = current_positions.copy()
|
||||
|
||||
for i in boundary_problems:
|
||||
neighbors = adjacency[i]
|
||||
if not neighbors:
|
||||
continue
|
||||
|
||||
# Only average with non-problem neighbors
|
||||
weighted_sum = mathutils.Vector((0, 0, 0))
|
||||
total_weight = 0
|
||||
|
||||
for n in neighbors:
|
||||
if n < num_verts and n not in boundary_problems:
|
||||
weighted_sum += current_positions[n]
|
||||
total_weight += 1
|
||||
|
||||
if total_weight > 0:
|
||||
avg_pos = weighted_sum / total_weight
|
||||
blend = 0.15 # Very gentle
|
||||
new_positions[i] = (1 - blend) * current_positions[i] + blend * avg_pos
|
||||
|
||||
current_positions = new_positions
|
||||
|
||||
# Get vertices with large deformation
|
||||
large_deformation = set()
|
||||
if 'deformation_magnitude' in mapping:
|
||||
for i, mag in enumerate(mapping['deformation_magnitude']):
|
||||
if i < num_verts and mag > 0.7:
|
||||
large_deformation.add(i)
|
||||
|
||||
# Focus on interior problems with large deformation
|
||||
focus_vertices = interior_problems.intersection(large_deformation)
|
||||
if focus_vertices:
|
||||
print(f" {len(focus_vertices)} interior high-deformation vertices")
|
||||
|
||||
# Smooth interior problems
|
||||
adjacency = mapping['adjacency']
|
||||
for iteration in range(3):
|
||||
new_positions = current_positions.copy()
|
||||
|
||||
for i in focus_vertices:
|
||||
neighbors = adjacency[i]
|
||||
if not neighbors:
|
||||
continue
|
||||
|
||||
weighted_sum = mathutils.Vector((0, 0, 0))
|
||||
total_weight = 0
|
||||
|
||||
for n in neighbors:
|
||||
if n < num_verts:
|
||||
weight = 1.0
|
||||
if n in boundary_vertices:
|
||||
weight = 0.3 # Less influence from boundaries
|
||||
|
||||
weighted_sum += current_positions[n] * weight
|
||||
total_weight += weight
|
||||
|
||||
if total_weight > 0:
|
||||
avg_pos = weighted_sum / total_weight
|
||||
blend = 0.3 + iteration * 0.1
|
||||
new_positions[i] = (1 - blend) * current_positions[i] + blend * avg_pos
|
||||
|
||||
current_positions = new_positions
|
||||
|
||||
# Apply smoothed positions
|
||||
for i, pos in enumerate(current_positions):
|
||||
if i < len(sk.data):
|
||||
sk.data[i].co = pos
|
||||
|
||||
sk.data.update()
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
print(f" Smoothing complete")
|
||||
|
||||
def set_shape_key_with_side_preservation(target_obj, sk_name, mapping, source_data):
|
||||
"""Set shape key with side preservation"""
|
||||
|
||||
print(f" Setting {sk_name} with side preservation...")
|
||||
|
||||
if 'deformation_magnitude' not in mapping or len(mapping['deformation_magnitude']) != len(mapping['target_verts']):
|
||||
mapping['deformation_magnitude'] = compute_deformation_magnitudes(source_data, mapping)
|
||||
|
||||
target_obj.data.shape_keys.use_relative = False
|
||||
sk = target_obj.data.shape_keys.key_blocks[sk_name]
|
||||
|
||||
for i, v in enumerate(sk.data):
|
||||
if i < len(mapping['target_verts']):
|
||||
pos = compute_side_preserving_position(i, sk_name, mapping, source_data, 1.0)
|
||||
v.co = pos
|
||||
|
||||
sk.data.update()
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
target_obj.data.shape_keys.use_relative = True
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
# First smooth boundary areas
|
||||
smooth_boundary_areas(target_obj, sk_name, mapping, source_data)
|
||||
|
||||
# Then smooth penetration areas
|
||||
smooth_penetration_areas(target_obj, sk_name, mapping, source_data)
|
||||
|
||||
def test_shape_key_quality(target_obj, sk_name, mapping, source_data):
|
||||
"""Test shape key quality with side tracking"""
|
||||
|
||||
sk = target_obj.data.shape_keys.key_blocks[sk_name]
|
||||
|
||||
print(f" Testing quality of '{sk_name}':")
|
||||
|
||||
test_values = [0.0, 0.3, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.77, 0.8, 0.9, 1.0]
|
||||
prev_positions = None
|
||||
boundary_vertices = mapping.get('boundary_vertices', set())
|
||||
|
||||
for val in test_values:
|
||||
if val == 0.0:
|
||||
sk.value = 0.0
|
||||
else:
|
||||
target_obj.data.shape_keys.use_relative = False
|
||||
for i, v in enumerate(sk.data):
|
||||
if i < len(mapping['target_verts']):
|
||||
pos = compute_side_preserving_position(i, sk_name, mapping, source_data, val)
|
||||
v.co = pos
|
||||
target_obj.data.shape_keys.use_relative = True
|
||||
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
if prev_positions is not None:
|
||||
movements = []
|
||||
side_changes = 0
|
||||
boundary_issues = 0
|
||||
|
||||
for i, v in enumerate(target_obj.data.vertices):
|
||||
if i < len(prev_positions) and i < len(mapping['side_flags']):
|
||||
movement = (v.co - prev_positions[i]).length
|
||||
if movement > 0.001:
|
||||
movements.append(movement)
|
||||
|
||||
if i < len(mapping['surface_points']) and i < len(mapping['direction_vectors']):
|
||||
current_to_surface = v.co - mapping['surface_points'][i]
|
||||
current_side = 1 if current_to_surface.dot(mapping['direction_vectors'][i]) > 0 else -1
|
||||
|
||||
if current_side != mapping['side_flags'][i] and mapping['side_flags'][i] != 0:
|
||||
side_changes += 1
|
||||
if i in boundary_vertices:
|
||||
boundary_issues += 1
|
||||
|
||||
if movements:
|
||||
avg_movement = sum(movements) / len(movements)
|
||||
max_movement = max(movements)
|
||||
print(f" Value {val:.2f}: avg Δ {avg_movement:.4f}, max Δ {max_movement:.4f}")
|
||||
if side_changes > 0:
|
||||
print(f" ⚠ {side_changes} vertices changed side ({boundary_issues} on boundary)")
|
||||
|
||||
prev_positions = [v.co.copy() for v in target_obj.data.vertices]
|
||||
|
||||
sk.value = 0.0
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
def transfer_shape_keys(source_data, target_obj):
|
||||
"""Transfer shape keys with side preservation"""
|
||||
print(f" Transferring shape keys with side preservation...")
|
||||
|
||||
mapping = compute_robust_surface_mapping(target_obj, source_data)
|
||||
|
||||
for sk_name in source_data['names']:
|
||||
if sk_name == 'Basis':
|
||||
continue
|
||||
|
||||
print(f" Processing: {sk_name}")
|
||||
set_shape_key_with_side_preservation(target_obj, sk_name, mapping, source_data)
|
||||
print(f" ✓ Transferred {sk_name}")
|
||||
|
||||
return mapping
|
||||
|
||||
def verify_transfer(source_names, target_obj):
|
||||
"""Verify shape keys were transferred correctly"""
|
||||
target_names = [sk.name for sk in target_obj.data.shape_keys.key_blocks]
|
||||
|
||||
print(f" Verification:")
|
||||
print(f" Source keys: {len(source_names)}")
|
||||
print(f" Target keys: {len(target_names)}")
|
||||
|
||||
if set(source_names) == set(target_names):
|
||||
print(f" ✓ All shape keys present")
|
||||
|
||||
def save_output_file(output_path, temp_target):
|
||||
"""Save the modified file"""
|
||||
print(f"\nSaving output file: {output_path}")
|
||||
|
||||
output_dir = os.path.dirname(output_path)
|
||||
if output_dir and not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
bpy.ops.wm.save_as_mainfile(filepath=output_path)
|
||||
|
||||
try:
|
||||
if os.path.exists(temp_target):
|
||||
os.remove(temp_target)
|
||||
temp_dir = os.path.dirname(temp_target)
|
||||
if os.path.exists(temp_dir) and not os.listdir(temp_dir):
|
||||
os.rmdir(temp_dir)
|
||||
except:
|
||||
pass
|
||||
|
||||
print(f"File saved successfully")
|
||||
|
||||
def main():
|
||||
print("=" * 60)
|
||||
print("Blender Shape Key Transfer Script - Boundary Velocity Limiting")
|
||||
print("=" * 60)
|
||||
|
||||
source_file, target_file, output_file = parse_args()
|
||||
print(f"Source: {source_file}")
|
||||
print(f"Target: {target_file}")
|
||||
print(f"Output: {output_file}")
|
||||
|
||||
try:
|
||||
source_data = load_source_data(source_file)
|
||||
target_objects, temp_target = load_target_file(target_file)
|
||||
|
||||
if target_objects:
|
||||
for idx, target_obj in enumerate(target_objects, 1):
|
||||
print(f"\n[{idx}/{len(target_objects)}] Processing: {target_obj.name}")
|
||||
print(f" {'=' * 40}")
|
||||
|
||||
ensure_shape_keys_structure(source_data['names'], target_obj)
|
||||
mapping = transfer_shape_keys(source_data, target_obj)
|
||||
verify_transfer(source_data['names'], target_obj)
|
||||
|
||||
for sk_name in source_data['names']:
|
||||
if sk_name == 'fat':
|
||||
test_shape_key_quality(target_obj, sk_name, mapping, source_data)
|
||||
break
|
||||
|
||||
print(f" {'=' * 40}")
|
||||
print(f" ✓ Completed")
|
||||
|
||||
save_output_file(output_file, temp_target)
|
||||
else:
|
||||
print("\nNo target objects found with required properties")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("Script completed successfully!")
|
||||
print("=" * 60)
|
||||
|
||||
except Exception as e:
|
||||
print(f"\nError: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user