Now we have complete clothes pipeline except shape keys

This commit is contained in:
2026-02-22 17:58:20 +03:00
parent ccf451336d
commit ef4c675d98
12 changed files with 817 additions and 6 deletions

View File

@@ -48,12 +48,12 @@ set(VRM_IMPORTED_BLENDS
# DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/scripts/export_models.py ${VRM_IMPORTED_BLENDS} ${EDITED_BLEND_TARGETS}
# WORKING_DIRECTORY ${CMAKE_BINARY_DIR})
set(FEMALE_OBJECTS "BodyTopRobe;BodyTop;BodyBottom;BodyFeet;Hair;Face;BackHair;Accessoty")
set(MALE_OBJECTS "BodyTopRobe;BodyTop;BodyBottomPants;BodyBottom;BodyFeetPants;BodyFeetPantsShoes;BodyFeet;Hair;Face;BackHair;Accessory")
set(MALE_OBJECTS "BodyTopRobe;BodyTop;BodyBottomPants;BodyBottom_Panties001;BodyBottom;BodyFeetPants;BodyFeetPantsShoes;BodyFeet;Hair;Face;BackHair;Accessory")
add_custom_command(
OUTPUT ${CMAKE_BINARY_DIR}/characters/male/normal-male.glb
COMMAND ${CMAKE_COMMAND} -E make_directory ${CREATE_DIRECTORIES}
COMMAND ${BLENDER} -b -Y -P ${CMAKE_SOURCE_DIR}/assets/blender/scripts/export_models2.py --
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male.blend
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male-consolidated.blend
${CMAKE_BINARY_DIR}/characters/male/normal-male.glb
"${MALE_OBJECTS}"
"male"
@@ -64,7 +64,7 @@ add_custom_command(
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/scripts/export_models2.py
${VRM_IMPORTED_BLENDS}
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male.blend
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male-consolidated.blend
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
VERBATIM
@@ -215,3 +215,200 @@ add_custom_target(edited-blends ALL DEPENDS ${EDITED_BLEND_TARGETS})
add_custom_target(import_vrm DEPENDS ${CHARACTER_GLBS})
function(weight_clothes SRC)
get_filename_component(TARGET_NAME ${SRC} NAME_WE)
add_custom_command(
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/clothes/${TARGET_NAME}_weighted.stamp
${CMAKE_CURRENT_BINARY_DIR}/clothes/${TARGET_NAME}_weighted.blend
COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/clothes
COMMAND ${BLENDER} -b -Y -P ${CMAKE_CURRENT_SOURCE_DIR}/process_clothes.py -- ${CMAKE_CURRENT_SOURCE_DIR}/${TARGET_NAME}.blend ./ ${CMAKE_CURRENT_BINARY_DIR}/clothes
COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/clothes/${TARGET_NAME}_weighted.stamp
COMMAND ${CMAKE_COMMAND} -D FILE=${CMAKE_CURRENT_BINARY_DIR}/clothes/${TARGET_NAME}_weighted.blend
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
DEPENDS ${SRC}
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
)
endfunction()
weight_clothes(${CMAKE_CURRENT_SOURCE_DIR}/clothes-male-bottom.blend)
# Function to combine clothes into a blend file
# Parameters:
# INPUT_BLEND - Input blend file (required)
# WEIGHTED_BLEND - Weighted clothes blend file to combine (required)
# COMBINED_BLEND - Output combined blend file (required)
function(add_clothes_combination INPUT_BLEND WEIGHTED_BLEND COMBINED_BLEND)
# Parse optional arguments
set(options "")
set(oneValueArgs OUTPUT_DIR)
set(multiValueArgs "")
cmake_parse_arguments(COMBINE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
# Validate required arguments
if(NOT INPUT_BLEND)
message(FATAL_ERROR "INPUT_BLEND is required for add_clothes_combination")
endif()
if(NOT WEIGHTED_BLEND)
message(FATAL_ERROR "WEIGHTED_BLEND is required for add_clothes_combination")
endif()
if(NOT COMBINED_BLEND)
message(FATAL_ERROR "COMBINED_BLEND output path is required for add_clothes_combination")
endif()
# Get the base name from the weighted blend file for stamp file
get_filename_component(WEIGHTED_BLEND_NAME "${WEIGHTED_BLEND}" NAME_WE)
# Remove "_weighted" suffix if present
string(REGEX REPLACE "_weighted$" "" TARGET_BASE "${WEIGHTED_BLEND_NAME}")
# Set default output directory for stamp if not provided
if(NOT COMBINE_OUTPUT_DIR)
set(COMBINE_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/clothes")
endif()
# Define stamp file using the derived base name
set(STAMP_FILE "${COMBINE_OUTPUT_DIR}/${TARGET_BASE}-combined.stamp")
# Derive the weighted stamp dependency
get_filename_component(WEIGHTED_DIR "${WEIGHTED_BLEND}" DIRECTORY)
get_filename_component(WEIGHTED_BASE "${WEIGHTED_BLEND}" NAME_WE)
set(WEIGHTED_STAMP "${WEIGHTED_DIR}/${WEIGHTED_BASE}.stamp")
# Ensure the output directory for COMBINED_BLEND exists
get_filename_component(COMBINED_DIR "${COMBINED_BLEND}" DIRECTORY)
add_custom_command(
OUTPUT ${STAMP_FILE} ${COMBINED_BLEND}
COMMAND ${CMAKE_COMMAND} -E make_directory ${COMBINED_DIR}
COMMAND ${BLENDER} -b -Y -P ${CMAKE_CURRENT_SOURCE_DIR}/combine_clothes.py --
${INPUT_BLEND}
${WEIGHTED_BLEND}
${COMBINED_BLEND}
COMMAND ${CMAKE_COMMAND} -E touch ${STAMP_FILE}
COMMAND ${CMAKE_COMMAND} -D FILE=${COMBINED_BLEND}
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
DEPENDS ${WEIGHTED_STAMP}
${WEIGHTED_BLEND}
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
${CMAKE_CURRENT_SOURCE_DIR}/combine_clothes.py
COMMENT "Combining clothes from ${WEIGHTED_BLEND} into ${COMBINED_BLEND}"
)
endfunction()
# Function to consolidate blend files
# Parameters:
# INPUT_BLEND - Input blend file to consolidate into (required)
# COMBINED_BLEND - Combined blend file to consolidate (required)
# OUTPUT_BLEND - Output consolidated blend file (required)
function(add_blend_consolidation INPUT_BLEND COMBINED_BLEND OUTPUT_BLEND)
# Parse optional arguments
set(options "")
set(oneValueArgs "")
set(multiValueArgs "")
cmake_parse_arguments(CONSOLIDATE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
# Validate required arguments
if(NOT INPUT_BLEND)
message(FATAL_ERROR "INPUT_BLEND is required for add_blend_consolidation")
endif()
if(NOT COMBINED_BLEND)
message(FATAL_ERROR "COMBINED_BLEND is required for add_blend_consolidation")
endif()
if(NOT OUTPUT_BLEND)
message(FATAL_ERROR "OUTPUT_BLEND output path is required for add_blend_consolidation")
endif()
# Get the base name from the combined blend file for stamp derivation
get_filename_component(COMBINED_NAME "${COMBINED_BLEND}" NAME_WE)
# Remove "_combined" suffix if present
string(REGEX REPLACE "_combined$" "" TARGET_BASE "${COMBINED_NAME}")
# Derive stamp dependency
get_filename_component(COMBINED_DIR "${COMBINED_BLEND}" DIRECTORY)
set(COMBINE_STAMP "${COMBINED_DIR}/${TARGET_BASE}-combined.stamp")
# Ensure output directory exists
get_filename_component(OUTPUT_DIR "${OUTPUT_BLEND}" DIRECTORY)
add_custom_command(
OUTPUT ${OUTPUT_BLEND}
DEPENDS ${COMBINED_BLEND}
${CMAKE_CURRENT_SOURCE_DIR}/consolidate.py
${COMBINE_STAMP}
${INPUT_BLEND}
COMMAND ${CMAKE_COMMAND} -E make_directory ${OUTPUT_DIR}
COMMAND ${BLENDER} -b -Y ${INPUT_BLEND}
-P ${CMAKE_CURRENT_SOURCE_DIR}/consolidate.py --
${COMBINED_BLEND}
${OUTPUT_BLEND}
COMMAND ${CMAKE_COMMAND} -D FILE=${OUTPUT_BLEND}
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
COMMENT "Consolidating ${COMBINED_BLEND} into ${OUTPUT_BLEND}"
)
endfunction()
# Combined pipeline function
# Parameters:
# INPUT_BLEND - Input blend file (required)
# WEIGHTED_BLEND - Weighted clothes blend file (required)
# FINAL_OUTPUT_BLEND - Final consolidated output (required)
function(add_clothes_pipeline INPUT_BLEND WEIGHTED_BLEND FINAL_OUTPUT_BLEND)
# Parse optional arguments
set(options "")
set(oneValueArgs COMBINED_BLEND INTERMEDIATE_DIR)
set(multiValueArgs "")
cmake_parse_arguments(PIPELINE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
# Validate required arguments
if(NOT INPUT_BLEND)
message(FATAL_ERROR "INPUT_BLEND is required for add_clothes_pipeline")
endif()
if(NOT WEIGHTED_BLEND)
message(FATAL_ERROR "WEIGHTED_BLEND is required for add_clothes_pipeline")
endif()
if(NOT FINAL_OUTPUT_BLEND)
message(FATAL_ERROR "FINAL_OUTPUT_BLEND is required for add_clothes_pipeline")
endif()
# Get the base name for deriving intermediate filenames
get_filename_component(WEIGHTED_NAME "${WEIGHTED_BLEND}" NAME_WE)
string(REGEX REPLACE "_weighted$" "" TARGET_BASE "${WEIGHTED_NAME}")
# Set intermediate directory
if(NOT PIPELINE_INTERMEDIATE_DIR)
set(PIPELINE_INTERMEDIATE_DIR "${CMAKE_CURRENT_BINARY_DIR}/clothes")
endif()
# Define intermediate combined blend file
if(PIPELINE_COMBINED_BLEND)
set(COMBINED_BLEND "${PIPELINE_COMBINED_BLEND}")
else()
set(COMBINED_BLEND "${PIPELINE_INTERMEDIATE_DIR}/${TARGET_BASE}_combined.blend")
endif()
# Step 1: Combine clothes
add_clothes_combination(
"${INPUT_BLEND}"
"${WEIGHTED_BLEND}"
"${COMBINED_BLEND}"
OUTPUT_DIR "${PIPELINE_INTERMEDIATE_DIR}"
)
# Step 2: Consolidate
add_blend_consolidation(
"${INPUT_BLEND}"
"${COMBINED_BLEND}"
"${FINAL_OUTPUT_BLEND}"
)
# Create a custom target to drive the whole pipeline
add_custom_target(${TARGET_BASE}_pipeline ALL
DEPENDS ${FINAL_OUTPUT_BLEND}
COMMENT "Running complete clothes pipeline for ${TARGET_BASE}"
)
endfunction()
add_clothes_pipeline(
"${CMAKE_CURRENT_SOURCE_DIR}/edited-normal-male.blend" # INPUT_BLEND
"${CMAKE_CURRENT_BINARY_DIR}/clothes/clothes-male-bottom_weighted.blend" # WEIGHTED_BLEND
"${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male-consolidated.blend" # FINAL_OUTPUT_BLEND
)

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,150 @@
import bpy
import bmesh
import sys
import os
import mathutils
from mathutils.bvhtree import BVHTree
def run_batch_combine():
try:
args = sys.argv[sys.argv.index("--") + 1:]
body_blend_path, clothes_blend_path, output_path = args[0], args[1], args[2]
except (ValueError, IndexError):
print("Usage: blender -b -P script.py -- <body.blend> <clothes.blend>")
return
bpy.ops.wm.read_homefile(use_empty=True)
# 1. Append Files
for path in [clothes_blend_path, body_blend_path]:
with bpy.data.libraries.load(path) as (data_from, data_to):
data_to.objects = data_from.objects
for obj in data_to.objects:
if obj: bpy.context.collection.objects.link(obj)
all_objs = bpy.data.objects
clothing_meshes = [o for o in all_objs if o.type == 'MESH' and "ref_part" in o]
whitelist = set()
for cloth in clothing_meshes:
target_name = cloth["ref_part"]
target_body = all_objs.get(target_name)
if not target_body: continue
# --- STEP A: RAYCAST & INITIAL HIT LIST ---
depsgraph = bpy.context.evaluated_depsgraph_get()
cloth_eval = cloth.evaluated_get(depsgraph)
bvh_cloth = BVHTree.FromObject(cloth_eval, depsgraph)
m_body = target_body.matrix_world
m_body_inv = m_body.inverted()
m_body_normal = m_body.to_3x3().inverted().transposed()
num_verts = len(target_body.data.vertices)
hit_values = [0] * num_verts
has_shape_keys = target_body.data.shape_keys is not None
for i, v in enumerate(target_body.data.vertices):
v_world = m_body @ v.co
n_world = (m_body_normal @ v.normal).normalized()
hit_f, _, _, _ = bvh_cloth.ray_cast(v_world, n_world, 0.015)
hit_b, _, _, _ = bvh_cloth.ray_cast(v_world, -n_world, 0.005)
if hit_f or hit_b:
hit_values[i] = 1
offset = -n_world * (0.005 if hit_f else 0.01)
new_co = m_body_inv @ (v_world + offset)
v.co = new_co
if has_shape_keys:
for kb in target_body.data.shape_keys.key_blocks:
kb.data[i].co = new_co
# --- STEP B: MULTI-LAYER PROTECTION & DELETE ---
bm = bmesh.new()
bm.from_mesh(target_body.data)
bm.verts.ensure_lookup_table()
# Phase 1: Identify "Layer 1 Border" (Immediate neighbors of visible verts)
border_l1 = set()
for v in bm.verts:
if hit_values[v.index] == 0:
for edge in v.link_edges:
neighbor = edge.other_vert(v)
if hit_values[neighbor.index] == 1:
border_l1.add(neighbor.index)
# Phase 2: Identify "Layer 2 Buffer" (Neighbors of Layer 1)
border_l2 = set()
for idx in border_l1:
v = bm.verts[idx]
for edge in v.link_edges:
neighbor = edge.other_vert(v)
if hit_values[neighbor.index] == 1:
border_l2.add(neighbor.index)
# Merge all protected vertices (Layer 0 (visible) + Layer 1 + Layer 2)
protected_indices = set(border_l1) | set(border_l2)
for i, val in enumerate(hit_values):
if val == 0: protected_indices.add(i)
# Deletion logic
to_delete = []
THRESHOLD = 4.0 # Higher threshold for deep cleaning
for v in bm.verts:
if v.index in protected_indices:
continue
# Sum hits of neighbors
neighbor_hit_sum = hit_values[v.index]
for edge in v.link_edges:
neighbor = edge.other_vert(v)
neighbor_hit_sum += hit_values[neighbor.index]
if neighbor_hit_sum >= THRESHOLD:
to_delete.append(v)
elif len(v.link_edges) == 1:
to_delete.append(v)
bmesh.ops.delete(bm, geom=to_delete, context='VERTS')
bm.to_mesh(target_body.data)
bm.free()
target_body.data.update()
# --- STEP C: ARMATURE & JOIN ---
master_arm = target_body.parent if (target_body.parent and target_body.parent.type == 'ARMATURE') else None
if master_arm: whitelist.add(master_arm)
cloth_label, body_label = cloth.name, target_body.name
if cloth.parent and cloth.parent.type == 'ARMATURE':
old_arm = cloth.parent
cloth.matrix_world = cloth.matrix_world.copy()
cloth.parent = None
if old_arm not in whitelist: bpy.data.objects.remove(old_arm, do_unlink=True)
if master_arm:
cloth.parent = master_arm
for mod in cloth.modifiers:
if mod.type == 'ARMATURE': mod.object = master_arm
bpy.ops.object.select_all(action='DESELECT')
cloth.select_set(True)
target_body.select_set(True)
bpy.context.view_layer.objects.active = target_body
bpy.ops.object.join()
target_body.name = f"{body_label}_{cloth_label}"
whitelist.add(target_body)
# 4. Final Cleanup
for obj in bpy.data.objects[:]:
if obj.type in {'MESH', 'ARMATURE'} and obj not in whitelist:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
bpy.ops.wm.save_as_mainfile(filepath=output_path)
if __name__ == "__main__":
run_batch_combine()

View File

@@ -0,0 +1,71 @@
import bpy
import sys
import os
def process_append(source_files, output_path):
required_props = {"age", "sex", "slot"}
for file_path in source_files:
if not os.path.exists(file_path):
continue
with bpy.data.libraries.load(file_path) as (data_from, data_to):
data_to.objects = data_from.objects
for obj in data_to.objects:
if obj is None: continue
# Check criteria
has_props = all(p in obj.keys() for p in required_props)
if obj.type == 'MESH' and has_props:
# 1. Link to the scene root temporarily
bpy.context.collection.objects.link(obj)
# 2. Synchronize Names
obj.data.name = obj.name
# 3. Find Target Armature
arm_name = obj.get("sex")
arm_obj = bpy.data.objects.get(arm_name)
if arm_obj and arm_obj.type == 'ARMATURE':
# A. Handle Collections: Move mesh to armature's collections
# Remove from all current collections first
for col in obj.users_collection:
col.objects.unlink(obj)
# Link to every collection the armature belongs to
for col in arm_obj.users_collection:
col.objects.link(obj)
# B. Parent to Armature
obj.parent = arm_obj
# C. Handle Armature Modifier
arm_mod = next((m for m in obj.modifiers if m.type == 'ARMATURE'), None)
if not arm_mod:
arm_mod = obj.modifiers.new(name="Armature", type='ARMATURE')
arm_mod.object = arm_obj
print(f"Processed {obj.name}: Parented and Modset to {arm_name}")
else:
print(f"Warning: Armature '{arm_name}' not found for {obj.name}")
else:
# Clean up data not meeting criteria
bpy.data.objects.remove(obj, do_unlink=True)
# 4. Recursive Purge of all unlinked data (Materials, Textures, Meshes)
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
# Save
bpy.ops.wm.save_as_mainfile(filepath=output_path)
if __name__ == "__main__":
try:
args = sys.argv[sys.argv.index("--") + 1:]
if len(args) >= 2:
*sources, output = args
process_append(sources, output)
except ValueError:
print("Error: Use '--' to separate Blender args from script args.")

View File

@@ -0,0 +1,53 @@
import bpy
def transfer_body_data():
source_name = "Body"
target_names = ["BodyTop", "bodyBottom", "BodyFeet"]
source_obj = bpy.data.objects.get(source_name)
if not source_obj:
print(f"Source object '{source_name}' not found!")
return
for name in target_names:
target_obj = bpy.data.objects.get(name)
if not target_obj:
print(f"Target '{name}' not found, skipping...")
continue
# 1. FIX NORMALS & WEIGHTS (Data Transfer)
# Required for Blender 4.1+: Enable Auto Smooth (Smooth by Angle)
target_obj.data.use_auto_smooth = True
dt_mod = target_obj.modifiers.new(name="TR_DATA", type='DATA_TRANSFER')
dt_mod.object = source_obj
# Transfer Vertex Groups (Weights)
dt_mod.use_vert_data = True
dt_mod.data_types_verts = {'VGROUP_WEIGHTS'}
dt_mod.vert_mapping = 'NEAREST'
# Transfer Normals (Fixes Seams)
dt_mod.use_loop_data = True
dt_mod.data_types_loops = {'CUSTOM_NORMAL'}
dt_mod.loop_mapping = 'NEAREST_POLYNOR'
# Apply to bake the weights and normals
bpy.context.view_layer.objects.active = target_obj
bpy.ops.object.modifier_apply(modifier=dt_mod.name)
# 2. TRANSFER SHAPE KEYS
if source_obj.data.shape_keys:
# Deselect all, then select source then target
bpy.ops.object.select_all(action='DESELECT')
source_obj.select_set(True)
target_obj.select_set(True)
bpy.context.view_layer.objects.active = target_obj
# Joins shape keys from source to target
bpy.ops.object.shape_key_transfer()
print("Transfer complete: Normals, Weights, and Shape Keys synced.")
transfer_body_data()

View File

@@ -0,0 +1,53 @@
import bpy
def fix_seams_and_transfer_data():
source_name = "Body"
target_names = ["BodyTop", "bodyBottom", "BodyFeet"]
source_obj = bpy.data.objects.get(source_name)
if not source_obj:
print(f"Error: '{source_name}' not found.")
return
for t_name in target_names:
target_obj = bpy.data.objects.get(t_name)
if not target_obj:
continue
# 1. PREP TARGET
# In 3.6, Auto Smooth must be True to see custom normals
target_obj.data.use_auto_smooth = True
bpy.context.view_layer.objects.active = target_obj
# 2. TRANSFER WEIGHTS & NORMALS (Data Transfer Mod)
dt_mod = target_obj.modifiers.new(name="SeamFix", type='DATA_TRANSFER')
dt_mod.object = source_obj
# Vertex Data (Weights)
dt_mod.use_vert_data = True
dt_mod.data_types_verts = {'VGROUP_WEIGHTS'}
# Face Corner Data (Normals)
dt_mod.use_loop_data = True
dt_mod.data_types_loops = {'CUSTOM_NORMAL'}
dt_mod.loop_mapping = 'NEAREST_POLYNOR'
# Apply the modifier to bake the data
bpy.ops.object.modifier_apply(modifier=dt_mod.name)
# 3. TRANSFER SHAPE KEYS
if source_obj.data.shape_keys:
# Clear selection and set up: Source must be Active, Target Selected
bpy.ops.object.select_all(action='DESELECT')
target_obj.select_set(True)
source_obj.select_set(True)
bpy.context.view_layer.objects.active = source_obj
# Transfer shape keys based on vertex position
# Note: This creates keys on the Target object
bpy.ops.object.shape_key_transfer()
print("Process complete for Blender 3.6.")
fix_seams_and_transfer_data()

View File

@@ -0,0 +1,54 @@
import bpy
def optimize_mesh_for_ogre(obj):
if obj.type != 'MESH':
return
print(obj.type)
print(f"Starting with {obj.name}")
# Set as active and enter Weight Paint mode to use ops
bpy.context.view_layer.objects.active = obj
bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
# 1. Clean zero weights (below 0.001)
bpy.ops.object.vertex_group_clean(group_select_mode='ALL', limit=0.001)
# 2. Limit influences to 4 per vertex (Ogre/GPU standard)
bpy.ops.object.vertex_group_limit_total(limit=4)
# 3. Normalize all weights (Sum of all bone influences = 1.0)
# This prevents "multiplied" or "weak" movements in engine
bpy.ops.object.vertex_group_normalize_all()
bpy.ops.object.mode_set(mode='OBJECT')
# 4. Remove empty Vertex Groups (groups with no assigned vertices)
# This keeps the bone index list identical across modular parts
vgroup_indices_to_remove = []
for i, group in enumerate(obj.vertex_groups):
has_vertices = False
for v in obj.data.vertices:
for g in v.groups:
if g.group == i and g.weight > 0.001:
has_vertices = True
break
if has_vertices: break
if not has_vertices:
print("removed group:" + group.name)
vgroup_indices_to_remove.append(group.name)
for name in vgroup_indices_to_remove:
print("removing group: " + name)
obj.vertex_groups.remove(obj.vertex_groups.get(name))
print(f"Finished {obj.name}: 4-weight limit applied, Normalized, {len(vgroup_indices_to_remove)} empty groups removed.")
# Execute on all selected mesh objects
selected_meshes = [o for o in bpy.data.objects if o.type == 'MESH' and not o.name.startswith("cs_")]
if not selected_meshes:
print("No mesh objects selected.")
else:
for mesh_obj in selected_meshes:
optimize_mesh_for_ogre(mesh_obj)

View File

@@ -0,0 +1,146 @@
import bpy
import os
import sys
def clean_scene():
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete()
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
def remove_empty_vertex_groups(obj, threshold=0.001):
"""Removes vertex groups with no weights or weights below threshold."""
if obj.type != 'MESH':
return
# Ensure we are in object mode
bpy.context.view_layer.objects.active = obj
# Dictionary to track max weight per group
group_max_weights = {g.index: 0.0 for g in obj.vertex_groups}
# Iterate over vertices to find actual max weights
for v in obj.data.vertices:
for g in v.groups:
if g.group in group_max_weights:
group_max_weights[g.group] = max(group_max_weights[g.group], g.weight)
# Remove groups that don't meet the threshold
groups_to_remove = [obj.vertex_groups[idx] for idx, max_w in group_max_weights.items() if max_w < threshold]
for g in groups_to_remove:
obj.vertex_groups.remove(g)
print(f"Cleaned {len(groups_to_remove)} empty/low-weight groups from {obj.name}")
def process_batch():
try:
args = sys.argv[sys.argv.index("--") + 1:]
clothing_path, lib_directory, out_dir = args[0], args[1], args[2]
except (IndexError, ValueError):
print("Usage: blender -b -P script.py -- <source_blend> <lib_dir> <out_dir>")
return
if not os.path.exists(out_dir): os.makedirs(out_dir)
# 1. Identify all clothing in the source file
with bpy.data.libraries.load(clothing_path) as (data_from, data_to):
all_clothing_names = data_from.objects
# Start with a fresh scene
clean_scene()
for name in all_clothing_names:
# 2. Append the clothing item
with bpy.data.libraries.load(clothing_path) as (data_from, data_to):
data_to.objects = [name]
for obj in data_to.objects:
if obj: bpy.context.collection.objects.link(obj)
clothing = bpy.data.objects.get(name)
if not clothing or clothing.type != 'MESH': continue
# Get properties
sex = clothing.get("ref_sex")
age = clothing.get("ref_age")
ref_mesh_name = clothing.get("ref_clothing")
if not all([sex, age, ref_mesh_name]):
print(f"Skipping {name}: Missing properties")
continue
# 3. Locate Reference Library
target_lib_name = f"normal_{age}_{sex}.blend"
target_lib_path = os.path.join(lib_directory, target_lib_name)
rig_name = str(sex)
if not os.path.exists(target_lib_path):
if target_lib_name == "normal_adult_male.blend":
target_lib_name = "edited-normal-male.blend"
elif target_lib_name == "normal_adult_female.blend":
target_lib_name = "edited-normal-female.blend"
target_lib_path = os.path.join(lib_directory, target_lib_name)
if not os.path.exists(target_lib_path):
print(f"Error: Library {target_lib_path} not found")
continue
# 4. Append Weights Source and Rig
with bpy.data.libraries.load(target_lib_path) as (data_from, data_to):
data_to.objects = [ref_mesh_name, rig_name]
for obj in data_to.objects:
if obj: bpy.context.collection.objects.link(obj)
source_mesh = bpy.data.objects.get(ref_mesh_name)
rig = bpy.data.objects.get(rig_name)
# 5. Prep Objects (Apply Scale & Clear Animation)
bpy.context.view_layer.objects.active = clothing
bpy.ops.object.transform_apply(location=False, rotation=True, scale=True)
for item in [clothing, rig]:
if item.animation_data: item.animation_data_clear()
if item.type == 'ARMATURE':
bpy.context.view_layer.objects.active = item
bpy.ops.object.mode_set(mode='POSE')
bpy.ops.pose.transforms_clear()
bpy.ops.object.mode_set(mode='OBJECT')
# 6. Weight Transfer
clothing.vertex_groups.clear()
dt = clothing.modifiers.new(name="WT", type='DATA_TRANSFER')
dt.object = source_mesh
dt.use_vert_data = True
dt.data_types_verts = {'VGROUP_WEIGHTS'}
dt.layers_vgroup_select_src = 'ALL'
dt.vert_mapping = 'POLYINTERP_NEAREST'
bpy.context.view_layer.objects.active = clothing
# "Generate Data Layers" step
bpy.ops.object.datalayout_transfer(modifier=dt.name)
bpy.ops.object.modifier_apply(modifier=dt.name)
remove_empty_vertex_groups(clothing, threshold=0.001)
# 7. Final Parenting
clothing.parent = rig
arm_mod = clothing.modifiers.new(name="Armature", type='ARMATURE')
arm_mod.object = rig
# 8. Cleanup Reference Mesh (keep the Rig!)
bpy.data.objects.remove(source_mesh, do_unlink=True)
print(f"Processed: {name}")
# 9. Save as single file named after source + _weighted
source_filename = os.path.splitext(os.path.basename(clothing_path))[0]
final_save_path = os.path.join(out_dir, f"{source_filename}_weighted.blend")
# Purge any remaining junk before final save
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
bpy.ops.wm.save_as_mainfile(filepath=final_save_path)
print(f"\n--- ALL DONE ---")
print(f"Saved to: {final_save_path}")
if __name__ == "__main__":
process_batch()

View File

@@ -0,0 +1,79 @@
#!/usr/bin/env python
import os, sys, time
import bpy
from math import pi
import glob
import shutil
from mathutils import Vector, Matrix
from math import radians, pi
argv = sys.argv
argv = argv[argv.index("--") + 1:]
incpath = os.path.dirname(__file__)
sys.path.insert(0, incpath)
sys.path.insert(1, incpath + "/blender2ogre")
print(sys.path)
import io_ogre
io_ogre.register()
gltf_file = argv[0]
print("Exporting to " + gltf_file)
basepath = os.getcwd()
# bpy.ops.export_scene.gltf(filepath="", check_existing=True,
# export_import_convert_lighting_mode='SPEC', gltf_export_id="",
# export_format='GLB', ui_tab='GENERAL', export_copyright="", export_image_format='AUTO',
# export_texture_dir="", export_jpeg_quality=75, export_keep_originals=False,
# export_texcoords=True, export_normals=True, export_draco_mesh_compression_enable=False,
# export_draco_mesh_compression_level=6, export_draco_position_quantization=14,
# export_draco_normal_quantization=10, export_draco_texcoord_quantization=12,
# export_draco_color_quantization=10, export_draco_generic_quantization=12, export_tangents=False,
# export_materials='EXPORT', export_original_specular=False, export_colors=True,
# export_attributes=False, use_mesh_edges=False, use_mesh_vertices=False, export_cameras=False,
# use_selection=False, use_visible=False, use_renderable=False,
# use_active_collection_with_nested=True, use_active_collection=False, use_active_scene=False,
# export_extras=False, export_yup=True, export_apply=False, export_animations=True,
# export_frame_range=False, export_frame_step=1, export_force_sampling=True, export_animation_mode='ACTIONS',
# export_nla_strips_merged_animation_name="Animation", export_def_bones=False,
# export_hierarchy_flatten_bones=False, export_optimize_animation_size=True,
# export_optimize_animation_keep_anim_armature=True, export_optimize_animation_keep_anim_object=False,
# export_negative_frame='SLIDE', export_anim_slide_to_zero=False, export_bake_animation=False,
# export_anim_single_armature=True, export_reset_pose_bones=True, export_current_frame=False,
# export_rest_position_armature=True, export_anim_scene_split_object=True, export_skins=True,
# export_all_influences=False, export_morph=True, export_morph_normal=True,
# export_morph_tangent=False, export_morph_animation=True, export_morph_reset_sk_data=True,
# export_lights=False, export_nla_strips=True, will_save_settings=False, filter_glob="*.glb")
for obj in bpy.data.objects:
if obj.name.endswith("-col"):
bpy.data.objects.remove(obj)
if (obj.rigid_body):
print(obj.rigid_body.collision_shape)
scene_file = gltf_file.replace(".glb", "").replace(".gltf", "") + ".scene"
bpy.ops.ogre.export(filepath=scene_file,
EX_SWAP_AXIS='xz-y',
EX_V2_MESH_TOOL_VERSION='v2',
EX_EXPORT_XML_DELETE=True,
EX_SCENE=True,
EX_SELECTED_ONLY=False,
EX_EXPORT_HIDDEN=False,
EX_FORCE_CAMERA=False,
EX_FORCE_LIGHTS=False,
EX_NODE_ANIMATION=True,
EX_MATERIALS=True,
EX_SEPARATE_MATERIALS=True,
EX_COPY_SHADER_PROGRAMS=True,
EX_MESH=True,
EX_LOD_LEVELS=3,
EX_LOD_DISTANCE=100,
EX_LOD_PERCENT=40
)
bpy.ops.wm.read_homefile(use_empty=True)
time.sleep(2)
bpy.ops.wm.quit_blender()