Compare commits
10 Commits
884a310033
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 51dda7e79d | |||
| fe083f13da | |||
| cb2ce23009 | |||
| ef4c675d98 | |||
| ccf451336d | |||
| eab5ed0794 | |||
| 8fddcb4cd9 | |||
| be10abda16 | |||
| 01c1210b1b | |||
| 5d5d04b690 |
53
Game.cpp
53
Game.cpp
@@ -21,6 +21,7 @@
|
||||
#include "physics.h"
|
||||
#include "sound.h"
|
||||
#include <tracy/Tracy.hpp>
|
||||
#include <tracy/TracyC.h>
|
||||
class App;
|
||||
class SkyRenderer : public Ogre::SceneManager::Listener {
|
||||
protected:
|
||||
@@ -171,6 +172,32 @@ public:
|
||||
mSkyBoxGenParameters.skyBoxDistance = distance;
|
||||
}
|
||||
};
|
||||
class FrameListenerTrace : public Ogre::FrameListener {
|
||||
public:
|
||||
TracyCZoneCtx mzone;
|
||||
FrameListenerTrace()
|
||||
: initialized(false)
|
||||
{
|
||||
}
|
||||
bool frameStarted(const Ogre::FrameEvent &evt) override
|
||||
{
|
||||
TracyCZoneN(ctx, "OgreFrame", true);
|
||||
mzone = ctx;
|
||||
return true;
|
||||
}
|
||||
bool frameRenderingQueued(const Ogre::FrameEvent &evt) override
|
||||
{
|
||||
TracyCZoneEnd(mzone);
|
||||
return true;
|
||||
}
|
||||
bool frameEnded(const Ogre::FrameEvent &evt) override
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
bool initialized;
|
||||
};
|
||||
class App;
|
||||
class KeyboardListener : public OgreBites::InputListener {
|
||||
App *mApp;
|
||||
@@ -333,6 +360,8 @@ public:
|
||||
mScnMgr->addRenderQueueListener(pOverlaySystem);
|
||||
mScnMgrInterior->addRenderQueueListener(pOverlaySystem);
|
||||
mScnMgrInventory->addRenderQueueListener(pOverlaySystem);
|
||||
FrameListenerTrace *trace = OGRE_NEW FrameListenerTrace;
|
||||
root->addFrameListener(trace);
|
||||
// mTrayMgr = new OgreBites::TrayManager("AppTrays",
|
||||
// getRenderWindow());
|
||||
}
|
||||
@@ -342,14 +371,22 @@ public:
|
||||
}
|
||||
void locateResources() override
|
||||
{
|
||||
Ogre::ResourceGroupManager::getSingleton().createResourceGroup(
|
||||
Ogre::ResourceGroupManager::getSingleton().createResourceGroup(
|
||||
"Characters", true);
|
||||
Ogre::ResourceGroupManager::getSingleton().createResourceGroup(
|
||||
"Water", true);
|
||||
Ogre::ResourceGroupManager::getSingleton().createResourceGroup(
|
||||
"LuaScripts", false);
|
||||
Ogre::ResourceGroupManager::getSingleton().addResourceLocation(
|
||||
"./lua-scripts", "FileSystem", "LuaScripts", true,
|
||||
true);
|
||||
OgreBites::ApplicationContext::locateResources();
|
||||
Ogre::ResourceGroupManager::getSingleton().addResourceLocation(
|
||||
"./characters/male", "FileSystem", "Characters", false,
|
||||
true);
|
||||
Ogre::ResourceGroupManager::getSingleton().addResourceLocation(
|
||||
"./characters/female", "FileSystem", "Characters",
|
||||
false, true);
|
||||
OgreBites::ApplicationContext::locateResources();
|
||||
}
|
||||
void loadResources() override
|
||||
{
|
||||
@@ -432,6 +469,7 @@ public:
|
||||
Ogre::RTShader::ShaderGenerator *shadergen =
|
||||
Ogre::RTShader::ShaderGenerator::getSingletonPtr();
|
||||
shadergen->addSceneManager(scnMgr);
|
||||
scnMgr->setShadowTechnique(Ogre::SHADOWTYPE_NONE);
|
||||
setWindowGrab(true);
|
||||
std::cout << "Init camera"
|
||||
<< "\n";
|
||||
@@ -487,8 +525,8 @@ public:
|
||||
setWindowGrab(gui.grab);
|
||||
gui.grabChanged = false;
|
||||
ECS::get().modified<ECS::GUI>();
|
||||
std::cout << "updateWorld " << gui.grabChanged
|
||||
<< " " << gui.grab << std::endl;
|
||||
// std::cout << "updateWorld " << gui.grabChanged
|
||||
// << " " << gui.grab << std::endl;
|
||||
}
|
||||
}
|
||||
end:
|
||||
@@ -633,7 +671,7 @@ end:
|
||||
void createContent()
|
||||
{
|
||||
int i;
|
||||
mJolt = new JoltPhysicsWrapper(mScnMgr, mCameraNode);
|
||||
mJolt = new JoltPhysicsWrapper(mScnMgr, mCameraNode);
|
||||
|
||||
sky = new SkyBoxRenderer(getSceneManager());
|
||||
bool drawFirst = true;
|
||||
@@ -687,9 +725,8 @@ end:
|
||||
.each([this](ECS::GUI &gui) {
|
||||
if (gui.grabChanged)
|
||||
setWindowGrab(gui.grab);
|
||||
std::cout << "grab: " << gui.grab << "\n";
|
||||
std::cout << "GUI enabled: " << gui.enabled
|
||||
<< "\n";
|
||||
// std::cout << "grab: " << gui.grab << "\n";
|
||||
// std::cout << "GUI enabled: " << gui.enabled << "\n";
|
||||
});
|
||||
ECS::get_mut<ECS::GUI>().grab = false;
|
||||
ECS::get_mut<ECS::GUI>().grabChanged = true;
|
||||
|
||||
@@ -13,6 +13,7 @@ add_custom_command(
|
||||
${CMAKE_BINARY_DIR}/assets/blender/vrm-vroid-normal-${EDITED_BLEND}.blend
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/copy_animations.py
|
||||
${CMAKE_BINARY_DIR}/assets/blender/mixamo
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-E copy ${CMAKE_CURRENT_SOURCE_DIR}/edited-normal-${EDITED_BLEND}.blend
|
||||
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-${EDITED_BLEND}.blend
|
||||
@@ -25,6 +26,7 @@ add_custom_command(
|
||||
list(APPEND EDITED_BLEND_TARGETS ${CMAKE_BINARY_DIR}/assets/blender/characters/edited-normal-${EDITED_BLEND}.blend)
|
||||
list(APPEND CHARACTER_GLBS ${CMAKE_BINARY_DIR}/characters/${EDITED_BLEND}/normal-${EDITED_BLEND}.glb)
|
||||
endforeach()
|
||||
list(APPEND CHARACTER_GLBS ${CMAKE_BINARY_DIR}/characters/male/male-clothes-toprobe.glb)
|
||||
add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/assets/blender/mixamo
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/assets/blender/mixamo ${CMAKE_BINARY_DIR}/assets/blender/mixamo
|
||||
DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/mixamo
|
||||
@@ -45,20 +47,25 @@ set(VRM_IMPORTED_BLENDS
|
||||
# COMMAND ${CMAKE_COMMAND} -E touch_nocreate ${CHARACTER_GLBS}
|
||||
# DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/scripts/export_models.py ${VRM_IMPORTED_BLENDS} ${EDITED_BLEND_TARGETS}
|
||||
# WORKING_DIRECTORY ${CMAKE_BINARY_DIR})
|
||||
set(FEMALE_OBJECTS "Body;Hair;Face;BackHair;Tops;Bottoms;Shoes;Accessory")
|
||||
set(MALE_OBJECTS "Body;Hair;Face;BackHair;Tops;Bottoms;Shoes;Accessory")
|
||||
set(FEMALE_OBJECTS "BodyTopRobe;BodyTop;BodyBottom;BodyFeet;Hair;Face;BackHair;Accessoty")
|
||||
set(MALE_OBJECTS "BodyTopRobe;BodyTop;BodyBottomPants;BodyBottom_Panties001;BodyBottom;BodyFeetPants;BodyFeetPantsShoes;BodyFeet;Hair;Face;BackHair;Accessory")
|
||||
add_custom_command(
|
||||
OUTPUT ${CMAKE_BINARY_DIR}/characters/male/normal-male.glb
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${CREATE_DIRECTORIES}
|
||||
COMMAND ${BLENDER} -b -Y -P ${CMAKE_SOURCE_DIR}/assets/blender/scripts/export_models2.py --
|
||||
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male.blend
|
||||
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male-consolidated.blend
|
||||
${CMAKE_BINARY_DIR}/characters/male/normal-male.glb
|
||||
"${MALE_OBJECTS}"
|
||||
"male"
|
||||
tmp-edited-male.blend
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${CMAKE_BINARY_DIR}/characters/male/normal-male.glb
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${CMAKE_BINARY_DIR}/characters/male/normal-male.scene
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/scripts/export_models2.py
|
||||
${VRM_IMPORTED_BLENDS}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male.blend
|
||||
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male-consolidated.blend
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
||||
VERBATIM
|
||||
)
|
||||
@@ -72,12 +79,28 @@ add_custom_command(
|
||||
"${FEMALE_OBJECTS}"
|
||||
"female"
|
||||
tmp-edited-female.blend
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${CMAKE_BINARY_DIR}/characters/female/normal-female.glb
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${CMAKE_BINARY_DIR}/characters/female/normal-female.scene
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/scripts/export_models2.py
|
||||
${VRM_IMPORTED_BLENDS}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/edited-normal-female.blend
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
||||
VERBATIM
|
||||
)
|
||||
add_custom_command(
|
||||
OUTPUT ${CMAKE_BINARY_DIR}/characters/male/male-clothes-toprobe.glb
|
||||
COMMAND ${BLENDER} -b -Y ${CMAKE_CURRENT_SOURCE_DIR}/edited-normal-male.blend
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/export_clothes.py
|
||||
-- ${CMAKE_BINARY_DIR}/characters/male/male-clothes-toprobe.glb
|
||||
BodyTopRobe
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${CMAKE_BINARY_DIR}/characters/male/male-clothes-toprobe.glb
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male.blend
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
)
|
||||
|
||||
set(VRM_SOURCE)
|
||||
|
||||
@@ -100,13 +123,29 @@ foreach(MIXAMO_FILE ${MIXAMO_FILES})
|
||||
list(APPEND VRM_SOURCE "${OUTPUT_FILE}")
|
||||
endforeach()
|
||||
add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/assets/blender/scripts/addons ${CMAKE_BINARY_DIR}/assets/blender/scripts/addons
|
||||
COMMAND ${BLENDER} -b -Y -P ${CMAKE_SOURCE_DIR}/assets/blender/scripts/install_addons.py
|
||||
COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/scripts/addons/3.6/VRM_Addon_for_Blender-release.zip
|
||||
${CMAKE_SOURCE_DIR}/assets/blender/scripts/addons/3.6/io_ogre.zip
|
||||
DEPENDS ${CMAKE_BINARY_DIR}/assets/blender/scripts/addons/3.6/VRM_Addon_for_Blender-release.zip
|
||||
${CMAKE_BINARY_DIR}/assets/blender/scripts/addons/3.6/io_ogre.zip
|
||||
${CMAKE_SOURCE_DIR}/assets/blender/scripts/install_addons.py
|
||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
||||
)
|
||||
add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/assets/blender/scripts/addons/3.6/VRM_Addon_for_Blender-release.zip
|
||||
COMMAND ${CMAKE_COMMAND} -E copy
|
||||
${CMAKE_SOURCE_DIR}/assets/blender/scripts/addons/3.6/VRM_Addon_for_Blender-release.zip
|
||||
${CMAKE_BINARY_DIR}/assets/blender/scripts/addons/3.6/VRM_Addon_for_Blender-release.zip
|
||||
DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/scripts/addons/3.6/VRM_Addon_for_Blender-release.zip
|
||||
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
|
||||
)
|
||||
add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/assets/blender/scripts/addons/3.6/io_ogre.zip
|
||||
COMMAND zip -r ${CMAKE_BINARY_DIR}/assets/blender/scripts/addons/3.6/io_ogre.zip io_ogre
|
||||
DEPENDS ${CMAKE_SOURCE_DIR}/assets/blender/scripts/blender2ogre/io_ogre
|
||||
${CMAKE_SOURCE_DIR}/assets/blender/scripts/blender2ogre/io_ogre/ui/export.py
|
||||
${CMAKE_SOURCE_DIR}/assets/blender/scripts/blender2ogre/io_ogre/ogre/skeleton.py
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/assets/blender/scripts/blender2ogre
|
||||
)
|
||||
add_custom_target(install_addons ALL DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed)
|
||||
|
||||
|
||||
#add_custom_command(OUTPUT ${VRM_IMPORTED_BLENDS}
|
||||
# COMMAND ${CMAKE_COMMAND} -E make_directory ${CREATE_DIRECTORIES}
|
||||
@@ -176,3 +215,218 @@ add_custom_target(edited-blends ALL DEPENDS ${EDITED_BLEND_TARGETS})
|
||||
|
||||
add_custom_target(import_vrm DEPENDS ${CHARACTER_GLBS})
|
||||
|
||||
function(weight_clothes SRC)
|
||||
get_filename_component(TARGET_NAME ${SRC} NAME_WE)
|
||||
add_custom_command(
|
||||
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/clothes/${TARGET_NAME}_weighted.stamp
|
||||
${CMAKE_CURRENT_BINARY_DIR}/clothes/${TARGET_NAME}_weighted.blend
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/clothes
|
||||
COMMAND ${BLENDER} -b -Y -P ${CMAKE_CURRENT_SOURCE_DIR}/process_clothes.py -- ${CMAKE_CURRENT_SOURCE_DIR}/${TARGET_NAME}.blend ./ ${CMAKE_CURRENT_BINARY_DIR}/clothes
|
||||
COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/clothes/${TARGET_NAME}_weighted.stamp
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${CMAKE_CURRENT_BINARY_DIR}/clothes/${TARGET_NAME}_weighted.blend
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
DEPENDS ${SRC}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
)
|
||||
endfunction()
|
||||
function(transfer_shape_keys SRC CLOTH DST)
|
||||
add_custom_command(
|
||||
OUTPUT ${DST}
|
||||
COMMAND ${BLENDER} -b -Y -P ${CMAKE_CURRENT_SOURCE_DIR}/transfer_shape_keys.py -- ${SRC} ${CLOTH} ${DST}
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${DST}
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
DEPENDS ${SRC} ${CLOTH}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/transfer_shape_keys.py
|
||||
)
|
||||
endfunction()
|
||||
|
||||
# Function to combine clothes into a blend file
|
||||
# Parameters:
|
||||
# INPUT_BLEND - Input blend file (required)
|
||||
# WEIGHTED_BLEND - Weighted clothes blend file to combine (required)
|
||||
# COMBINED_BLEND - Output combined blend file (required)
|
||||
function(add_clothes_combination INPUT_BLEND WEIGHTED_BLEND COMBINED_BLEND)
|
||||
# Parse optional arguments
|
||||
set(options "")
|
||||
set(oneValueArgs OUTPUT_DIR)
|
||||
set(multiValueArgs "")
|
||||
cmake_parse_arguments(COMBINE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
# Validate required arguments
|
||||
if(NOT INPUT_BLEND)
|
||||
message(FATAL_ERROR "INPUT_BLEND is required for add_clothes_combination")
|
||||
endif()
|
||||
if(NOT WEIGHTED_BLEND)
|
||||
message(FATAL_ERROR "WEIGHTED_BLEND is required for add_clothes_combination")
|
||||
endif()
|
||||
if(NOT COMBINED_BLEND)
|
||||
message(FATAL_ERROR "COMBINED_BLEND output path is required for add_clothes_combination")
|
||||
endif()
|
||||
|
||||
# Get the base name from the weighted blend file for stamp file
|
||||
get_filename_component(WEIGHTED_BLEND_NAME "${WEIGHTED_BLEND}" NAME_WE)
|
||||
# Remove "_weighted" suffix if present
|
||||
string(REGEX REPLACE "_weighted$" "" TARGET_BASE "${WEIGHTED_BLEND_NAME}")
|
||||
|
||||
# Set default output directory for stamp if not provided
|
||||
if(NOT COMBINE_OUTPUT_DIR)
|
||||
set(COMBINE_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/clothes")
|
||||
endif()
|
||||
|
||||
# Define stamp file using the derived base name
|
||||
set(STAMP_FILE "${COMBINE_OUTPUT_DIR}/${TARGET_BASE}-combined.stamp")
|
||||
|
||||
# Derive the weighted stamp dependency
|
||||
get_filename_component(WEIGHTED_DIR "${WEIGHTED_BLEND}" DIRECTORY)
|
||||
get_filename_component(WEIGHTED_BASE "${WEIGHTED_BLEND}" NAME_WE)
|
||||
set(WEIGHTED_STAMP "${WEIGHTED_DIR}/${WEIGHTED_BASE}.stamp")
|
||||
|
||||
# Ensure the output directory for COMBINED_BLEND exists
|
||||
get_filename_component(COMBINED_DIR "${COMBINED_BLEND}" DIRECTORY)
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${STAMP_FILE} ${COMBINED_BLEND}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${COMBINED_DIR}
|
||||
COMMAND ${BLENDER} -b -Y -P ${CMAKE_CURRENT_SOURCE_DIR}/combine_clothes.py --
|
||||
${INPUT_BLEND}
|
||||
${WEIGHTED_BLEND}
|
||||
${COMBINED_BLEND}
|
||||
COMMAND ${CMAKE_COMMAND} -E touch ${STAMP_FILE}
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${COMBINED_BLEND}
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
DEPENDS ${WEIGHTED_STAMP}
|
||||
${WEIGHTED_BLEND}
|
||||
${CMAKE_CURRENT_BINARY_DIR}/blender-addons-installed
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/combine_clothes.py
|
||||
COMMENT "Combining clothes from ${WEIGHTED_BLEND} into ${COMBINED_BLEND}"
|
||||
)
|
||||
endfunction()
|
||||
|
||||
# Function to consolidate blend files
|
||||
# Parameters:
|
||||
# INPUT_BLEND - Input blend file to consolidate into (required)
|
||||
# COMBINED_BLEND - Combined blend file to consolidate (required)
|
||||
# OUTPUT_BLEND - Output consolidated blend file (required)
|
||||
function(add_blend_consolidation INPUT_BLEND COMBINED_BLEND OUTPUT_BLEND)
|
||||
# Parse optional arguments
|
||||
set(options "")
|
||||
set(oneValueArgs "")
|
||||
set(multiValueArgs "")
|
||||
cmake_parse_arguments(CONSOLIDATE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
# Validate required arguments
|
||||
if(NOT INPUT_BLEND)
|
||||
message(FATAL_ERROR "INPUT_BLEND is required for add_blend_consolidation")
|
||||
endif()
|
||||
if(NOT COMBINED_BLEND)
|
||||
message(FATAL_ERROR "COMBINED_BLEND is required for add_blend_consolidation")
|
||||
endif()
|
||||
if(NOT OUTPUT_BLEND)
|
||||
message(FATAL_ERROR "OUTPUT_BLEND output path is required for add_blend_consolidation")
|
||||
endif()
|
||||
|
||||
# Get the base name from the combined blend file for stamp derivation
|
||||
get_filename_component(COMBINED_NAME "${COMBINED_BLEND}" NAME_WE)
|
||||
# Remove "_combined" suffix if present
|
||||
string(REGEX REPLACE "_combined$" "" TARGET_BASE "${COMBINED_NAME}")
|
||||
|
||||
# Derive stamp dependency
|
||||
get_filename_component(COMBINED_DIR "${COMBINED_BLEND}" DIRECTORY)
|
||||
set(COMBINE_STAMP "${COMBINED_DIR}/${TARGET_BASE}-combined.stamp")
|
||||
|
||||
# Ensure output directory exists
|
||||
get_filename_component(OUTPUT_DIR "${OUTPUT_BLEND}" DIRECTORY)
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${OUTPUT_BLEND}
|
||||
DEPENDS ${COMBINED_BLEND}
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/consolidate.py
|
||||
${COMBINE_STAMP}
|
||||
${INPUT_BLEND}
|
||||
COMMAND ${CMAKE_COMMAND} -E make_directory ${OUTPUT_DIR}
|
||||
COMMAND ${BLENDER} -b -Y ${INPUT_BLEND}
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/consolidate.py --
|
||||
${COMBINED_BLEND}
|
||||
${OUTPUT_BLEND}
|
||||
COMMAND ${CMAKE_COMMAND} -D FILE=${OUTPUT_BLEND}
|
||||
-P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/check_file_size.cmake
|
||||
COMMENT "Consolidating ${COMBINED_BLEND} into ${OUTPUT_BLEND}"
|
||||
)
|
||||
endfunction()
|
||||
|
||||
# Combined pipeline function
|
||||
# Parameters:
|
||||
# INPUT_BLEND - Input blend file (required)
|
||||
# WEIGHTED_BLEND - Weighted clothes blend file (required)
|
||||
# FINAL_OUTPUT_BLEND - Final consolidated output (required)
|
||||
function(add_clothes_pipeline INPUT_BLEND WEIGHTED_BLEND FINAL_OUTPUT_BLEND)
|
||||
# Parse optional arguments
|
||||
set(options "")
|
||||
set(oneValueArgs COMBINED_BLEND INTERMEDIATE_DIR)
|
||||
set(multiValueArgs "")
|
||||
cmake_parse_arguments(PIPELINE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
# Validate required arguments
|
||||
if(NOT INPUT_BLEND)
|
||||
message(FATAL_ERROR "INPUT_BLEND is required for add_clothes_pipeline")
|
||||
endif()
|
||||
if(NOT WEIGHTED_BLEND)
|
||||
message(FATAL_ERROR "WEIGHTED_BLEND is required for add_clothes_pipeline")
|
||||
endif()
|
||||
if(NOT FINAL_OUTPUT_BLEND)
|
||||
message(FATAL_ERROR "FINAL_OUTPUT_BLEND is required for add_clothes_pipeline")
|
||||
endif()
|
||||
|
||||
# Get the base name for deriving intermediate filenames
|
||||
get_filename_component(WEIGHTED_NAME "${WEIGHTED_BLEND}" NAME_WE)
|
||||
string(REGEX REPLACE "_weighted$" "" TARGET_BASE "${WEIGHTED_NAME}")
|
||||
|
||||
# Set intermediate directory
|
||||
if(NOT PIPELINE_INTERMEDIATE_DIR)
|
||||
set(PIPELINE_INTERMEDIATE_DIR "${CMAKE_CURRENT_BINARY_DIR}/clothes")
|
||||
endif()
|
||||
|
||||
# Define intermediate combined blend file
|
||||
if(PIPELINE_COMBINED_BLEND)
|
||||
set(COMBINED_BLEND "${PIPELINE_COMBINED_BLEND}")
|
||||
else()
|
||||
set(COMBINED_BLEND "${PIPELINE_INTERMEDIATE_DIR}/${TARGET_BASE}_combined.blend")
|
||||
endif()
|
||||
set(SHAPED_BLEND "${PIPELINE_INTERMEDIATE_DIR}/${TARGET_BASE}_shaped.blend")
|
||||
|
||||
|
||||
# Step 1: Combine clothes
|
||||
add_clothes_combination(
|
||||
"${INPUT_BLEND}"
|
||||
"${WEIGHTED_BLEND}"
|
||||
"${COMBINED_BLEND}"
|
||||
OUTPUT_DIR "${PIPELINE_INTERMEDIATE_DIR}"
|
||||
)
|
||||
|
||||
transfer_shape_keys(${INPUT_BLEND}
|
||||
${COMBINED_BLEND}
|
||||
${SHAPED_BLEND}
|
||||
)
|
||||
# Step 2: Consolidate
|
||||
add_blend_consolidation(
|
||||
"${INPUT_BLEND}"
|
||||
"${COMBINED_BLEND}"
|
||||
"${FINAL_OUTPUT_BLEND}"
|
||||
)
|
||||
|
||||
# Create a custom target to drive the whole pipeline
|
||||
add_custom_target(${TARGET_BASE}_pipeline ALL
|
||||
DEPENDS ${FINAL_OUTPUT_BLEND} ${SHAPED_BLEND}
|
||||
COMMENT "Running complete clothes pipeline for ${TARGET_BASE}"
|
||||
)
|
||||
endfunction()
|
||||
|
||||
weight_clothes(${CMAKE_CURRENT_SOURCE_DIR}/clothes-male-bottom.blend)
|
||||
|
||||
add_clothes_pipeline(
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/edited-normal-male.blend" # INPUT_BLEND
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/clothes/clothes-male-bottom_weighted.blend" # WEIGHTED_BLEND
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/edited-normal-male-consolidated.blend" # FINAL_OUTPUT_BLEND
|
||||
)
|
||||
|
||||
|
||||
BIN
assets/blender/characters/clothes-male-bottom.blend
LFS
Normal file
BIN
assets/blender/characters/clothes-male-bottom.blend
LFS
Normal file
Binary file not shown.
BIN
assets/blender/characters/clothes-top.blend
LFS
Normal file
BIN
assets/blender/characters/clothes-top.blend
LFS
Normal file
Binary file not shown.
349
assets/blender/characters/combine_clothes.py
Normal file
349
assets/blender/characters/combine_clothes.py
Normal file
@@ -0,0 +1,349 @@
|
||||
import bpy
|
||||
import bmesh
|
||||
import sys
|
||||
import os
|
||||
import mathutils
|
||||
from mathutils.bvhtree import BVHTree
|
||||
|
||||
def load_blend_files(clothes_blend_path, body_blend_path):
|
||||
"""Load objects from blend files and return all loaded objects"""
|
||||
loaded_objects = []
|
||||
|
||||
for path in [clothes_blend_path, body_blend_path]:
|
||||
with bpy.data.libraries.load(path) as (data_from, data_to):
|
||||
data_to.objects = data_from.objects
|
||||
for obj in data_to.objects:
|
||||
if obj:
|
||||
bpy.context.collection.objects.link(obj)
|
||||
loaded_objects.append(obj)
|
||||
|
||||
return loaded_objects
|
||||
|
||||
def setup_bvh_and_matrices(obj):
|
||||
"""Setup BVH tree and transformation matrices for an object"""
|
||||
depsgraph = bpy.context.evaluated_depsgraph_get()
|
||||
obj_eval = obj.evaluated_get(depsgraph)
|
||||
bvh = BVHTree.FromObject(obj_eval, depsgraph)
|
||||
|
||||
return bvh
|
||||
|
||||
def get_transformation_matrices(obj):
|
||||
"""Get transformation matrices for an object"""
|
||||
m = obj.matrix_world
|
||||
m_inv = m.inverted()
|
||||
m_normal = m.to_3x3().inverted().transposed()
|
||||
|
||||
return m, m_inv, m_normal
|
||||
|
||||
def raycast_and_adjust_vertices(target_body, bvh_cloth):
|
||||
"""Raycast from body to cloth and adjust vertices that intersect"""
|
||||
m_body, m_body_inv, m_body_normal = get_transformation_matrices(target_body)
|
||||
|
||||
num_verts = len(target_body.data.vertices)
|
||||
hit_values = [0] * num_verts
|
||||
has_shape_keys = target_body.data.shape_keys is not None
|
||||
|
||||
# Forward raycast (into cloth)
|
||||
for i, v in enumerate(target_body.data.vertices):
|
||||
v_world = m_body @ v.co
|
||||
n_world = (m_body_normal @ v.normal).normalized()
|
||||
|
||||
# Raycast forward (into cloth) and backward (from inside cloth)
|
||||
hit_f, _, _, _ = bvh_cloth.ray_cast(v_world, n_world, 0.015)
|
||||
hit_b, _, _, _ = bvh_cloth.ray_cast(v_world, -n_world, 0.005)
|
||||
|
||||
if hit_f or hit_b:
|
||||
hit_values[i] = 1
|
||||
# Adjust vertex position to be slightly outside cloth
|
||||
offset = -n_world * (0.005 if hit_f else 0.01)
|
||||
new_co = m_body_inv @ (v_world + offset)
|
||||
v.co = new_co
|
||||
|
||||
# Update shape keys if they exist
|
||||
if has_shape_keys:
|
||||
for kb in target_body.data.shape_keys.key_blocks:
|
||||
kb.data[i].co = new_co
|
||||
|
||||
return hit_values
|
||||
|
||||
def protect_and_remove_hidden_geometry(target_body, hit_values, threshold=4.0):
|
||||
"""Protect visible vertices and remove hidden geometry"""
|
||||
bm = bmesh.new()
|
||||
bm.from_mesh(target_body.data)
|
||||
bm.verts.ensure_lookup_table()
|
||||
|
||||
# Phase 1: Identify "Layer 1 Border" (Immediate neighbors of visible verts)
|
||||
border_l1 = set()
|
||||
for v in bm.verts:
|
||||
if hit_values[v.index] == 0: # Visible vertex
|
||||
for edge in v.link_edges:
|
||||
neighbor = edge.other_vert(v)
|
||||
if hit_values[neighbor.index] == 1:
|
||||
border_l1.add(neighbor.index)
|
||||
|
||||
# Phase 2: Identify "Layer 2 Buffer" (Neighbors of Layer 1)
|
||||
border_l2 = set()
|
||||
for idx in border_l1:
|
||||
v = bm.verts[idx]
|
||||
for edge in v.link_edges:
|
||||
neighbor = edge.other_vert(v)
|
||||
if hit_values[neighbor.index] == 1:
|
||||
border_l2.add(neighbor.index)
|
||||
|
||||
# Merge all protected vertices
|
||||
protected_indices = set(border_l1) | set(border_l2)
|
||||
for i, val in enumerate(hit_values):
|
||||
if val == 0: # Visible vertices
|
||||
protected_indices.add(i)
|
||||
|
||||
# Deletion logic
|
||||
to_delete = []
|
||||
for v in bm.verts:
|
||||
if v.index in protected_indices:
|
||||
continue
|
||||
|
||||
# Sum hits of neighbors
|
||||
neighbor_hit_sum = hit_values[v.index]
|
||||
for edge in v.link_edges:
|
||||
neighbor = edge.other_vert(v)
|
||||
neighbor_hit_sum += hit_values[neighbor.index]
|
||||
|
||||
if neighbor_hit_sum >= threshold:
|
||||
to_delete.append(v)
|
||||
elif len(v.link_edges) == 1: # Loose vertices
|
||||
to_delete.append(v)
|
||||
|
||||
# Perform deletion
|
||||
bmesh.ops.delete(bm, geom=to_delete, context='VERTS')
|
||||
bm.to_mesh(target_body.data)
|
||||
bm.free()
|
||||
target_body.data.update()
|
||||
|
||||
def process_clothing_pair(clothing_obj, target_obj, whitelist, is_clothing_copy=False, original_clothing_name=None):
|
||||
"""Process a clothing-body pair
|
||||
|
||||
Args:
|
||||
clothing_obj: The clothing object to process
|
||||
target_obj: The target object to combine with (body or combined object)
|
||||
whitelist: Set of objects to keep
|
||||
is_clothing_copy: Whether clothing_obj is a copy (for layer 2 processing)
|
||||
original_clothing_name: Original name of clothing if it's a copy (for layer 2 naming)
|
||||
"""
|
||||
# Create a copy of the target object
|
||||
new_target = target_obj.copy()
|
||||
new_target.data = target_obj.data.copy()
|
||||
bpy.context.collection.objects.link(new_target)
|
||||
|
||||
# Copy custom properties
|
||||
for key in target_obj.keys():
|
||||
new_target[key] = target_obj[key]
|
||||
|
||||
# Ensure the copy has the same transformations
|
||||
new_target.matrix_world = target_obj.matrix_world.copy()
|
||||
|
||||
target_name = target_obj.name
|
||||
|
||||
# Determine the name to use for the clothing in the final combined object
|
||||
if is_clothing_copy and original_clothing_name:
|
||||
clothing_name_for_final = original_clothing_name
|
||||
else:
|
||||
clothing_name_for_final = clothing_obj.name
|
||||
|
||||
print(f"Processing: {clothing_name_for_final} -> {target_name} (using copy)")
|
||||
|
||||
# Step A: Raycast & adjust vertices
|
||||
bvh_cloth = setup_bvh_and_matrices(clothing_obj)
|
||||
hit_values = raycast_and_adjust_vertices(new_target, bvh_cloth)
|
||||
|
||||
# Step B: Remove hidden geometry
|
||||
protect_and_remove_hidden_geometry(new_target, hit_values)
|
||||
|
||||
# Step C: Handle armature and join
|
||||
master_arm = new_target.parent if (new_target.parent and new_target.parent.type == 'ARMATURE') else None
|
||||
if master_arm:
|
||||
whitelist.add(master_arm)
|
||||
|
||||
# Handle clothing armature (if it's not a copy for layer 2)
|
||||
if not is_clothing_copy and clothing_obj.parent and clothing_obj.parent.type == 'ARMATURE':
|
||||
old_arm = clothing_obj.parent
|
||||
clothing_obj.matrix_world = clothing_obj.matrix_world.copy()
|
||||
clothing_obj.parent = None
|
||||
if old_arm not in whitelist:
|
||||
bpy.data.objects.remove(old_arm, do_unlink=True)
|
||||
|
||||
# For layer 2 clothing copies, we don't need to handle armature separately
|
||||
# as they'll inherit from the target
|
||||
|
||||
# Reparent to master armature if exists
|
||||
if master_arm:
|
||||
clothing_obj.parent = master_arm
|
||||
for mod in clothing_obj.modifiers:
|
||||
if mod.type == 'ARMATURE':
|
||||
mod.object = master_arm
|
||||
|
||||
# Join clothing with target copy
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
clothing_obj.select_set(True)
|
||||
new_target.select_set(True)
|
||||
bpy.context.view_layer.objects.active = new_target
|
||||
bpy.ops.object.join()
|
||||
|
||||
# Rename the combined object using the appropriate clothing name
|
||||
new_target.name = f"{target_name}_{clothing_name_for_final}"
|
||||
whitelist.add(new_target)
|
||||
|
||||
return new_target
|
||||
|
||||
def cleanup_unused_objects(whitelist):
|
||||
"""Remove all objects not in whitelist"""
|
||||
for obj in bpy.data.objects[:]:
|
||||
if obj.type in {'MESH', 'ARMATURE'} and obj not in whitelist:
|
||||
bpy.data.objects.remove(obj, do_unlink=True)
|
||||
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||
|
||||
def run_batch_combine():
|
||||
"""Main function to run the batch combine process"""
|
||||
try:
|
||||
args = sys.argv[sys.argv.index("--") + 1:]
|
||||
body_blend_path, clothes_blend_path, output_path = args[0], args[1], args[2]
|
||||
except (ValueError, IndexError):
|
||||
print("Usage: blender -b -P script.py -- <body.blend> <clothes.blend> <output.blend>")
|
||||
return
|
||||
|
||||
# Start fresh
|
||||
bpy.ops.wm.read_homefile(use_empty=True)
|
||||
|
||||
# Load objects from blend files
|
||||
loaded_objects = load_blend_files(clothes_blend_path, body_blend_path)
|
||||
|
||||
# Categorize loaded objects
|
||||
all_objs = bpy.data.objects
|
||||
|
||||
# Separate body objects (no ref_layer property)
|
||||
body_objects = [o for o in all_objs if o.type == 'MESH' and "ref_layer" not in o]
|
||||
|
||||
# Separate clothing by layer
|
||||
clothing_layer1 = [o for o in all_objs if o.type == 'MESH' and "ref_layer" in o and o["ref_layer"] == 1]
|
||||
clothing_layer2 = [o for o in all_objs if o.type == 'MESH' and "ref_layer" in o and o["ref_layer"] == 2]
|
||||
|
||||
print(f"Found {len(body_objects)} body objects")
|
||||
print(f"Found {len(clothing_layer1)} layer 1 clothing objects")
|
||||
print(f"Found {len(clothing_layer2)} layer 2 clothing objects")
|
||||
|
||||
# Create dictionary of body objects for quick lookup
|
||||
body_objects_dict = {obj.name: obj for obj in body_objects}
|
||||
|
||||
# Track objects to keep
|
||||
whitelist = set()
|
||||
|
||||
# List to store combined objects from layer 1
|
||||
combined_objects = []
|
||||
|
||||
# PROCESS LAYER 1: Combine with original body parts
|
||||
print("\n=== PROCESSING LAYER 1 CLOTHING ===")
|
||||
for clothing_obj in clothing_layer1:
|
||||
if "ref_part" not in clothing_obj:
|
||||
print(f"Warning: Layer 1 clothing '{clothing_obj.name}' missing ref_part property, skipping")
|
||||
continue
|
||||
|
||||
target_name = clothing_obj["ref_part"]
|
||||
original_body = body_objects_dict.get(target_name)
|
||||
|
||||
if not original_body or original_body.type != 'MESH':
|
||||
print(f"Warning: Target body '{target_name}' not found for clothing '{clothing_obj.name}', skipping")
|
||||
continue
|
||||
|
||||
# Process the pair
|
||||
result = process_clothing_pair(clothing_obj, original_body, whitelist)
|
||||
if result:
|
||||
combined_objects.append(result)
|
||||
print(f"Added '{result.name}' to combined objects list")
|
||||
|
||||
print(f"Layer 1 complete. Created {len(combined_objects)} combined objects")
|
||||
|
||||
# PROCESS LAYER 2 (First Pass): Combine with layer 1 results
|
||||
print("\n=== PROCESSING LAYER 2 CLOTHING (First Pass - Over Layer 1) ===")
|
||||
layer2_results_over_l1 = []
|
||||
|
||||
for clothing_obj in clothing_layer2:
|
||||
print(f"\nProcessing layer 2 clothing: {clothing_obj.name}")
|
||||
|
||||
# Store the original clothing name for later use
|
||||
original_clothing_name = clothing_obj.name
|
||||
|
||||
# For each combined object from layer 1
|
||||
for combined_obj in combined_objects:
|
||||
# Create a copy of the layer 2 clothing
|
||||
clothing_copy = clothing_obj.copy()
|
||||
clothing_copy.data = clothing_obj.data.copy()
|
||||
bpy.context.collection.objects.link(clothing_copy)
|
||||
|
||||
# Copy custom properties
|
||||
for key in clothing_obj.keys():
|
||||
clothing_copy[key] = clothing_obj[key]
|
||||
|
||||
# Set the ref_part to point to the combined object
|
||||
clothing_copy["ref_part"] = combined_obj.name
|
||||
|
||||
print(f" Combining with layer 1 result: {combined_obj.name}")
|
||||
|
||||
# Process the pair
|
||||
result = process_clothing_pair(clothing_copy, combined_obj, whitelist,
|
||||
is_clothing_copy=True,
|
||||
original_clothing_name=original_clothing_name)
|
||||
if result:
|
||||
layer2_results_over_l1.append(result)
|
||||
print(f" Created: {result.name}")
|
||||
|
||||
print(f"Layer 2 first pass complete. Created {len(layer2_results_over_l1)} combined objects")
|
||||
|
||||
# PROCESS LAYER 2 (Second Pass): Combine directly with body parts (like layer 1)
|
||||
print("\n=== PROCESSING LAYER 2 CLOTHING (Second Pass - Direct to Body) ===")
|
||||
layer2_results_direct = []
|
||||
|
||||
for clothing_obj in clothing_layer2:
|
||||
if "ref_part" not in clothing_obj:
|
||||
print(f"Warning: Layer 2 clothing '{clothing_obj.name}' missing ref_part property, skipping")
|
||||
continue
|
||||
|
||||
target_name = clothing_obj["ref_part"]
|
||||
original_body = body_objects_dict.get(target_name)
|
||||
|
||||
if not original_body or original_body.type != 'MESH':
|
||||
print(f"Warning: Target body '{target_name}' not found for clothing '{clothing_obj.name}', skipping")
|
||||
continue
|
||||
|
||||
print(f"\nProcessing layer 2 clothing directly with body: {clothing_obj.name} -> {target_name}")
|
||||
|
||||
# Process directly with body part (no copy needed for the clothing itself)
|
||||
result = process_clothing_pair(clothing_obj, original_body, whitelist, is_clothing_copy=False)
|
||||
if result:
|
||||
layer2_results_direct.append(result)
|
||||
print(f" Created: {result.name}")
|
||||
|
||||
print(f"Layer 2 second pass complete. Created {len(layer2_results_direct)} combined objects")
|
||||
|
||||
# Add all results to combined objects list
|
||||
all_results = combined_objects + layer2_results_over_l1 + layer2_results_direct
|
||||
|
||||
print(f"\n=== SUMMARY ===")
|
||||
print(f"Layer 1 results: {len(combined_objects)}")
|
||||
print(f"Layer 2 over layer 1 results: {len(layer2_results_over_l1)}")
|
||||
print(f"Layer 2 direct to body results: {len(layer2_results_direct)}")
|
||||
print(f"Total combined objects: {len(all_results)}")
|
||||
|
||||
# Final cleanup - keep all combined objects and their armatures
|
||||
for obj in all_results:
|
||||
whitelist.add(obj)
|
||||
|
||||
cleanup_unused_objects(whitelist)
|
||||
|
||||
# Save the result
|
||||
bpy.ops.wm.save_as_mainfile(filepath=output_path)
|
||||
print(f"\nSaved to: {output_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_batch_combine()
|
||||
|
||||
71
assets/blender/characters/consolidate.py
Normal file
71
assets/blender/characters/consolidate.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import bpy
|
||||
import sys
|
||||
import os
|
||||
|
||||
def process_append(source_files, output_path):
|
||||
required_props = {"age", "sex", "slot"}
|
||||
|
||||
for file_path in source_files:
|
||||
if not os.path.exists(file_path):
|
||||
continue
|
||||
|
||||
with bpy.data.libraries.load(file_path) as (data_from, data_to):
|
||||
data_to.objects = data_from.objects
|
||||
|
||||
for obj in data_to.objects:
|
||||
if obj is None: continue
|
||||
|
||||
# Check criteria
|
||||
has_props = all(p in obj.keys() for p in required_props)
|
||||
if obj.type == 'MESH' and has_props:
|
||||
# 1. Link to the scene root temporarily
|
||||
bpy.context.collection.objects.link(obj)
|
||||
|
||||
# 2. Synchronize Names
|
||||
obj.data.name = obj.name
|
||||
|
||||
# 3. Find Target Armature
|
||||
arm_name = obj.get("sex")
|
||||
arm_obj = bpy.data.objects.get(arm_name)
|
||||
|
||||
if arm_obj and arm_obj.type == 'ARMATURE':
|
||||
# A. Handle Collections: Move mesh to armature's collections
|
||||
# Remove from all current collections first
|
||||
for col in obj.users_collection:
|
||||
col.objects.unlink(obj)
|
||||
|
||||
# Link to every collection the armature belongs to
|
||||
for col in arm_obj.users_collection:
|
||||
col.objects.link(obj)
|
||||
|
||||
# B. Parent to Armature
|
||||
obj.parent = arm_obj
|
||||
|
||||
# C. Handle Armature Modifier
|
||||
arm_mod = next((m for m in obj.modifiers if m.type == 'ARMATURE'), None)
|
||||
if not arm_mod:
|
||||
arm_mod = obj.modifiers.new(name="Armature", type='ARMATURE')
|
||||
|
||||
arm_mod.object = arm_obj
|
||||
print(f"Processed {obj.name}: Parented and Modset to {arm_name}")
|
||||
else:
|
||||
print(f"Warning: Armature '{arm_name}' not found for {obj.name}")
|
||||
else:
|
||||
# Clean up data not meeting criteria
|
||||
bpy.data.objects.remove(obj, do_unlink=True)
|
||||
|
||||
# 4. Recursive Purge of all unlinked data (Materials, Textures, Meshes)
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||
|
||||
# Save
|
||||
bpy.ops.wm.save_as_mainfile(filepath=output_path)
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
args = sys.argv[sys.argv.index("--") + 1:]
|
||||
if len(args) >= 2:
|
||||
*sources, output = args
|
||||
process_append(sources, output)
|
||||
except ValueError:
|
||||
print("Error: Use '--' to separate Blender args from script args.")
|
||||
|
||||
Binary file not shown.
Binary file not shown.
78
assets/blender/characters/export_clothes.py
Normal file
78
assets/blender/characters/export_clothes.py
Normal file
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os, sys, time
|
||||
import bpy
|
||||
from math import pi
|
||||
import glob
|
||||
import shutil
|
||||
from mathutils import Vector, Matrix
|
||||
from math import radians, pi
|
||||
|
||||
argv = sys.argv
|
||||
argv = argv[argv.index("--") + 1:]
|
||||
|
||||
incpath = os.path.dirname(__file__)
|
||||
|
||||
sys.path.insert(0, incpath)
|
||||
sys.path.insert(1, incpath + "/blender2ogre")
|
||||
|
||||
gltf_file = argv[0]
|
||||
target_name = argv[1]
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
|
||||
print("Target: " + target_name)
|
||||
for o in bpy.data.objects:
|
||||
print(o.name)
|
||||
|
||||
obj = bpy.data.objects.get(target_name)
|
||||
obj.select_set(True)
|
||||
|
||||
armature = obj.parent if (obj.parent and obj.parent.type == 'ARMATURE') else None
|
||||
if not armature:
|
||||
for mod in obj.modifiers:
|
||||
if mod.type == 'ARMATURE' and mod.object:
|
||||
armature = mod.object
|
||||
break
|
||||
if armature:
|
||||
armature.select_set(True)
|
||||
bpy.context.view_layer.objects.active = armature
|
||||
else:
|
||||
raise Exception("bad armature")
|
||||
print("Exporting to " + gltf_file)
|
||||
basepath = incpath
|
||||
|
||||
bpy.ops.export_scene.gltf(filepath=gltf_file,
|
||||
use_selection=True,
|
||||
check_existing=False,
|
||||
export_format='GLB',
|
||||
export_texture_dir='textures', export_texcoords=True,
|
||||
export_animation_mode='NLA_TRACKS',
|
||||
export_normals=True,
|
||||
export_tangents=True,
|
||||
export_materials='EXPORT',
|
||||
export_colors=True,
|
||||
use_mesh_edges=False,
|
||||
use_mesh_vertices=False,
|
||||
export_cameras=False,
|
||||
use_visible=False,
|
||||
use_renderable=False,
|
||||
export_yup=True,
|
||||
export_apply=True,
|
||||
export_animations=True,
|
||||
export_force_sampling=True,
|
||||
export_def_bones=False,
|
||||
export_current_frame=False,
|
||||
export_morph=True,
|
||||
export_morph_animation=False,
|
||||
export_morph_normal=True,
|
||||
export_morph_tangent=True,
|
||||
export_lights=False,
|
||||
export_skins=True)
|
||||
|
||||
bpy.ops.wm.read_homefile(use_empty=True)
|
||||
time.sleep(2)
|
||||
bpy.ops.wm.quit_blender()
|
||||
|
||||
53
assets/blender/characters/fix_parts.py
Normal file
53
assets/blender/characters/fix_parts.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import bpy
|
||||
|
||||
def transfer_body_data():
|
||||
source_name = "Body"
|
||||
target_names = ["BodyTop", "bodyBottom", "BodyFeet"]
|
||||
|
||||
source_obj = bpy.data.objects.get(source_name)
|
||||
if not source_obj:
|
||||
print(f"Source object '{source_name}' not found!")
|
||||
return
|
||||
|
||||
for name in target_names:
|
||||
target_obj = bpy.data.objects.get(name)
|
||||
if not target_obj:
|
||||
print(f"Target '{name}' not found, skipping...")
|
||||
continue
|
||||
|
||||
# 1. FIX NORMALS & WEIGHTS (Data Transfer)
|
||||
# Required for Blender 4.1+: Enable Auto Smooth (Smooth by Angle)
|
||||
target_obj.data.use_auto_smooth = True
|
||||
|
||||
dt_mod = target_obj.modifiers.new(name="TR_DATA", type='DATA_TRANSFER')
|
||||
dt_mod.object = source_obj
|
||||
|
||||
# Transfer Vertex Groups (Weights)
|
||||
dt_mod.use_vert_data = True
|
||||
dt_mod.data_types_verts = {'VGROUP_WEIGHTS'}
|
||||
dt_mod.vert_mapping = 'NEAREST'
|
||||
|
||||
# Transfer Normals (Fixes Seams)
|
||||
dt_mod.use_loop_data = True
|
||||
dt_mod.data_types_loops = {'CUSTOM_NORMAL'}
|
||||
dt_mod.loop_mapping = 'NEAREST_POLYNOR'
|
||||
|
||||
# Apply to bake the weights and normals
|
||||
bpy.context.view_layer.objects.active = target_obj
|
||||
bpy.ops.object.modifier_apply(modifier=dt_mod.name)
|
||||
|
||||
# 2. TRANSFER SHAPE KEYS
|
||||
if source_obj.data.shape_keys:
|
||||
# Deselect all, then select source then target
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
source_obj.select_set(True)
|
||||
target_obj.select_set(True)
|
||||
bpy.context.view_layer.objects.active = target_obj
|
||||
|
||||
# Joins shape keys from source to target
|
||||
bpy.ops.object.shape_key_transfer()
|
||||
|
||||
print("Transfer complete: Normals, Weights, and Shape Keys synced.")
|
||||
|
||||
transfer_body_data()
|
||||
|
||||
53
assets/blender/characters/fix_parts2.py
Normal file
53
assets/blender/characters/fix_parts2.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import bpy
|
||||
|
||||
def fix_seams_and_transfer_data():
|
||||
source_name = "Body"
|
||||
target_names = ["BodyTop", "bodyBottom", "BodyFeet"]
|
||||
|
||||
source_obj = bpy.data.objects.get(source_name)
|
||||
if not source_obj:
|
||||
print(f"Error: '{source_name}' not found.")
|
||||
return
|
||||
|
||||
for t_name in target_names:
|
||||
target_obj = bpy.data.objects.get(t_name)
|
||||
if not target_obj:
|
||||
continue
|
||||
|
||||
# 1. PREP TARGET
|
||||
# In 3.6, Auto Smooth must be True to see custom normals
|
||||
target_obj.data.use_auto_smooth = True
|
||||
bpy.context.view_layer.objects.active = target_obj
|
||||
|
||||
# 2. TRANSFER WEIGHTS & NORMALS (Data Transfer Mod)
|
||||
dt_mod = target_obj.modifiers.new(name="SeamFix", type='DATA_TRANSFER')
|
||||
dt_mod.object = source_obj
|
||||
|
||||
# Vertex Data (Weights)
|
||||
dt_mod.use_vert_data = True
|
||||
dt_mod.data_types_verts = {'VGROUP_WEIGHTS'}
|
||||
|
||||
# Face Corner Data (Normals)
|
||||
dt_mod.use_loop_data = True
|
||||
dt_mod.data_types_loops = {'CUSTOM_NORMAL'}
|
||||
dt_mod.loop_mapping = 'NEAREST_POLYNOR'
|
||||
|
||||
# Apply the modifier to bake the data
|
||||
bpy.ops.object.modifier_apply(modifier=dt_mod.name)
|
||||
|
||||
# 3. TRANSFER SHAPE KEYS
|
||||
if source_obj.data.shape_keys:
|
||||
# Clear selection and set up: Source must be Active, Target Selected
|
||||
bpy.ops.object.select_all(action='DESELECT')
|
||||
target_obj.select_set(True)
|
||||
source_obj.select_set(True)
|
||||
bpy.context.view_layer.objects.active = source_obj
|
||||
|
||||
# Transfer shape keys based on vertex position
|
||||
# Note: This creates keys on the Target object
|
||||
bpy.ops.object.shape_key_transfer()
|
||||
|
||||
print("Process complete for Blender 3.6.")
|
||||
|
||||
fix_seams_and_transfer_data()
|
||||
|
||||
54
assets/blender/characters/optimize_meshes.py
Normal file
54
assets/blender/characters/optimize_meshes.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import bpy
|
||||
|
||||
def optimize_mesh_for_ogre(obj):
|
||||
if obj.type != 'MESH':
|
||||
return
|
||||
print(obj.type)
|
||||
|
||||
print(f"Starting with {obj.name}")
|
||||
# Set as active and enter Weight Paint mode to use ops
|
||||
bpy.context.view_layer.objects.active = obj
|
||||
bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
|
||||
|
||||
# 1. Clean zero weights (below 0.001)
|
||||
bpy.ops.object.vertex_group_clean(group_select_mode='ALL', limit=0.001)
|
||||
|
||||
# 2. Limit influences to 4 per vertex (Ogre/GPU standard)
|
||||
bpy.ops.object.vertex_group_limit_total(limit=4)
|
||||
|
||||
# 3. Normalize all weights (Sum of all bone influences = 1.0)
|
||||
# This prevents "multiplied" or "weak" movements in engine
|
||||
bpy.ops.object.vertex_group_normalize_all()
|
||||
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# 4. Remove empty Vertex Groups (groups with no assigned vertices)
|
||||
# This keeps the bone index list identical across modular parts
|
||||
vgroup_indices_to_remove = []
|
||||
for i, group in enumerate(obj.vertex_groups):
|
||||
has_vertices = False
|
||||
for v in obj.data.vertices:
|
||||
for g in v.groups:
|
||||
if g.group == i and g.weight > 0.001:
|
||||
has_vertices = True
|
||||
break
|
||||
if has_vertices: break
|
||||
|
||||
if not has_vertices:
|
||||
print("removed group:" + group.name)
|
||||
vgroup_indices_to_remove.append(group.name)
|
||||
|
||||
for name in vgroup_indices_to_remove:
|
||||
print("removing group: " + name)
|
||||
obj.vertex_groups.remove(obj.vertex_groups.get(name))
|
||||
|
||||
print(f"Finished {obj.name}: 4-weight limit applied, Normalized, {len(vgroup_indices_to_remove)} empty groups removed.")
|
||||
|
||||
# Execute on all selected mesh objects
|
||||
selected_meshes = [o for o in bpy.data.objects if o.type == 'MESH' and not o.name.startswith("cs_")]
|
||||
if not selected_meshes:
|
||||
print("No mesh objects selected.")
|
||||
else:
|
||||
for mesh_obj in selected_meshes:
|
||||
optimize_mesh_for_ogre(mesh_obj)
|
||||
|
||||
146
assets/blender/characters/process_clothes.py
Normal file
146
assets/blender/characters/process_clothes.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import bpy
|
||||
import os
|
||||
import sys
|
||||
|
||||
def clean_scene():
|
||||
bpy.ops.object.select_all(action='SELECT')
|
||||
bpy.ops.object.delete()
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||
|
||||
def remove_empty_vertex_groups(obj, threshold=0.001):
|
||||
"""Removes vertex groups with no weights or weights below threshold."""
|
||||
if obj.type != 'MESH':
|
||||
return
|
||||
|
||||
# Ensure we are in object mode
|
||||
bpy.context.view_layer.objects.active = obj
|
||||
|
||||
# Dictionary to track max weight per group
|
||||
group_max_weights = {g.index: 0.0 for g in obj.vertex_groups}
|
||||
|
||||
# Iterate over vertices to find actual max weights
|
||||
for v in obj.data.vertices:
|
||||
for g in v.groups:
|
||||
if g.group in group_max_weights:
|
||||
group_max_weights[g.group] = max(group_max_weights[g.group], g.weight)
|
||||
|
||||
# Remove groups that don't meet the threshold
|
||||
groups_to_remove = [obj.vertex_groups[idx] for idx, max_w in group_max_weights.items() if max_w < threshold]
|
||||
|
||||
for g in groups_to_remove:
|
||||
obj.vertex_groups.remove(g)
|
||||
|
||||
print(f"Cleaned {len(groups_to_remove)} empty/low-weight groups from {obj.name}")
|
||||
|
||||
def process_batch():
|
||||
try:
|
||||
args = sys.argv[sys.argv.index("--") + 1:]
|
||||
clothing_path, lib_directory, out_dir = args[0], args[1], args[2]
|
||||
except (IndexError, ValueError):
|
||||
print("Usage: blender -b -P script.py -- <source_blend> <lib_dir> <out_dir>")
|
||||
return
|
||||
|
||||
if not os.path.exists(out_dir): os.makedirs(out_dir)
|
||||
|
||||
# 1. Identify all clothing in the source file
|
||||
with bpy.data.libraries.load(clothing_path) as (data_from, data_to):
|
||||
all_clothing_names = data_from.objects
|
||||
|
||||
# Start with a fresh scene
|
||||
clean_scene()
|
||||
|
||||
for name in all_clothing_names:
|
||||
# 2. Append the clothing item
|
||||
with bpy.data.libraries.load(clothing_path) as (data_from, data_to):
|
||||
data_to.objects = [name]
|
||||
|
||||
for obj in data_to.objects:
|
||||
if obj: bpy.context.collection.objects.link(obj)
|
||||
|
||||
clothing = bpy.data.objects.get(name)
|
||||
if not clothing or clothing.type != 'MESH': continue
|
||||
|
||||
# Get properties
|
||||
sex = clothing.get("ref_sex")
|
||||
age = clothing.get("ref_age")
|
||||
ref_mesh_name = clothing.get("ref_clothing")
|
||||
|
||||
if not all([sex, age, ref_mesh_name]):
|
||||
print(f"Skipping {name}: Missing properties")
|
||||
continue
|
||||
|
||||
# 3. Locate Reference Library
|
||||
target_lib_name = f"normal_{age}_{sex}.blend"
|
||||
target_lib_path = os.path.join(lib_directory, target_lib_name)
|
||||
rig_name = str(sex)
|
||||
|
||||
if not os.path.exists(target_lib_path):
|
||||
if target_lib_name == "normal_adult_male.blend":
|
||||
target_lib_name = "edited-normal-male.blend"
|
||||
elif target_lib_name == "normal_adult_female.blend":
|
||||
target_lib_name = "edited-normal-female.blend"
|
||||
target_lib_path = os.path.join(lib_directory, target_lib_name)
|
||||
if not os.path.exists(target_lib_path):
|
||||
print(f"Error: Library {target_lib_path} not found")
|
||||
continue
|
||||
|
||||
# 4. Append Weights Source and Rig
|
||||
with bpy.data.libraries.load(target_lib_path) as (data_from, data_to):
|
||||
data_to.objects = [ref_mesh_name, rig_name]
|
||||
|
||||
for obj in data_to.objects:
|
||||
if obj: bpy.context.collection.objects.link(obj)
|
||||
|
||||
source_mesh = bpy.data.objects.get(ref_mesh_name)
|
||||
rig = bpy.data.objects.get(rig_name)
|
||||
|
||||
# 5. Prep Objects (Apply Scale & Clear Animation)
|
||||
bpy.context.view_layer.objects.active = clothing
|
||||
bpy.ops.object.transform_apply(location=False, rotation=True, scale=True)
|
||||
|
||||
for item in [clothing, rig]:
|
||||
if item.animation_data: item.animation_data_clear()
|
||||
if item.type == 'ARMATURE':
|
||||
bpy.context.view_layer.objects.active = item
|
||||
bpy.ops.object.mode_set(mode='POSE')
|
||||
bpy.ops.pose.transforms_clear()
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
# 6. Weight Transfer
|
||||
clothing.vertex_groups.clear()
|
||||
dt = clothing.modifiers.new(name="WT", type='DATA_TRANSFER')
|
||||
dt.object = source_mesh
|
||||
dt.use_vert_data = True
|
||||
dt.data_types_verts = {'VGROUP_WEIGHTS'}
|
||||
dt.layers_vgroup_select_src = 'ALL'
|
||||
dt.vert_mapping = 'POLYINTERP_NEAREST'
|
||||
|
||||
bpy.context.view_layer.objects.active = clothing
|
||||
# "Generate Data Layers" step
|
||||
bpy.ops.object.datalayout_transfer(modifier=dt.name)
|
||||
bpy.ops.object.modifier_apply(modifier=dt.name)
|
||||
|
||||
remove_empty_vertex_groups(clothing, threshold=0.001)
|
||||
|
||||
# 7. Final Parenting
|
||||
clothing.parent = rig
|
||||
arm_mod = clothing.modifiers.new(name="Armature", type='ARMATURE')
|
||||
arm_mod.object = rig
|
||||
|
||||
# 8. Cleanup Reference Mesh (keep the Rig!)
|
||||
bpy.data.objects.remove(source_mesh, do_unlink=True)
|
||||
print(f"Processed: {name}")
|
||||
|
||||
# 9. Save as single file named after source + _weighted
|
||||
source_filename = os.path.splitext(os.path.basename(clothing_path))[0]
|
||||
final_save_path = os.path.join(out_dir, f"{source_filename}_weighted.blend")
|
||||
|
||||
# Purge any remaining junk before final save
|
||||
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
|
||||
bpy.ops.wm.save_as_mainfile(filepath=final_save_path)
|
||||
print(f"\n--- ALL DONE ---")
|
||||
print(f"Saved to: {final_save_path}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
process_batch()
|
||||
|
||||
970
assets/blender/characters/transfer_shape_keys.py
Normal file
970
assets/blender/characters/transfer_shape_keys.py
Normal file
@@ -0,0 +1,970 @@
|
||||
"""
|
||||
Blender 3.6.20 Script: Transfer Shape Keys with Boundary Velocity Limiting
|
||||
Usage: blender -b --python transfer_shape_keys.py -- <source_file> <target_file> <output_file>
|
||||
"""
|
||||
|
||||
import bpy
|
||||
import sys
|
||||
import os
|
||||
import shutil
|
||||
import mathutils
|
||||
from mathutils.bvhtree import BVHTree
|
||||
import numpy as np
|
||||
from collections import defaultdict
|
||||
|
||||
def parse_args():
|
||||
"""Parse command line arguments after '--'"""
|
||||
if '--' in sys.argv:
|
||||
args_start = sys.argv.index('--') + 1
|
||||
args = sys.argv[args_start:]
|
||||
|
||||
if len(args) >= 3:
|
||||
return args[0], args[1], args[2]
|
||||
else:
|
||||
print("Error: Please provide source, target, and output .blend files")
|
||||
print("Usage: blender -b --python transfer_shape_keys.py -- <source_file> <target_file> <output_file>")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Error: No arguments provided")
|
||||
print("Usage: blender -b --python transfer_shape_keys.py -- <source_file> <target_file> <output_file>")
|
||||
sys.exit(1)
|
||||
|
||||
def load_source_data(source_path):
|
||||
"""Load source file and extract shape key information"""
|
||||
print(f"\nLoading source file: {source_path}")
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
print(f"Error: Source file not found: {source_path}")
|
||||
sys.exit(1)
|
||||
|
||||
current_file = bpy.data.filepath if bpy.data.filepath else None
|
||||
|
||||
bpy.ops.wm.open_mainfile(filepath=source_path)
|
||||
|
||||
body_object = None
|
||||
for obj in bpy.data.objects:
|
||||
if obj.name == "Body_shapes" and obj.type == 'MESH':
|
||||
body_object = obj
|
||||
break
|
||||
|
||||
if not body_object:
|
||||
for obj in bpy.data.objects:
|
||||
if obj.name == "Body" and obj.type == 'MESH':
|
||||
body_object = obj
|
||||
break
|
||||
if not body_object:
|
||||
print("Error: Could not find mesh object named 'Body' in source file")
|
||||
sys.exit(1)
|
||||
|
||||
if not body_object.data.shape_keys:
|
||||
print("Error: Body object has no shape keys")
|
||||
sys.exit(1)
|
||||
|
||||
shape_key_data = {
|
||||
'names': [],
|
||||
'vertex_positions': {},
|
||||
'polygons': [],
|
||||
'is_relative': body_object.data.shape_keys.use_relative
|
||||
}
|
||||
|
||||
# Store polygon data for BVH
|
||||
mesh = body_object.data
|
||||
for poly in mesh.polygons:
|
||||
shape_key_data['polygons'].append([v for v in poly.vertices])
|
||||
|
||||
source_shape_keys = body_object.data.shape_keys.key_blocks
|
||||
print(f"Found Body object with {len(source_shape_keys)} shape keys")
|
||||
|
||||
for sk in source_shape_keys:
|
||||
shape_key_data['names'].append(sk.name)
|
||||
print(f" - {sk.name}")
|
||||
|
||||
vertex_positions = []
|
||||
for v in sk.data:
|
||||
vertex_positions.append((v.co.x, v.co.y, v.co.z))
|
||||
|
||||
shape_key_data['vertex_positions'][sk.name] = vertex_positions
|
||||
|
||||
if current_file and os.path.exists(current_file):
|
||||
bpy.ops.wm.open_mainfile(filepath=current_file)
|
||||
elif current_file:
|
||||
bpy.ops.wm.read_homefile()
|
||||
|
||||
return shape_key_data
|
||||
|
||||
def load_target_file(target_path):
|
||||
"""Load the target .blend file and find objects with custom properties"""
|
||||
print(f"\nLoading target file: {target_path}")
|
||||
|
||||
if not os.path.exists(target_path):
|
||||
print(f"Error: Target file not found: {target_path}")
|
||||
sys.exit(1)
|
||||
|
||||
temp_dir = os.path.join(os.path.dirname(target_path), "temp_blend_files")
|
||||
os.makedirs(temp_dir, exist_ok=True)
|
||||
|
||||
temp_target = os.path.join(temp_dir, os.path.basename(target_path))
|
||||
shutil.copy2(target_path, temp_target)
|
||||
|
||||
bpy.ops.wm.open_mainfile(filepath=temp_target)
|
||||
|
||||
target_objects = []
|
||||
for obj in bpy.data.objects:
|
||||
if obj.type == 'MESH' and obj.data:
|
||||
if all(prop in obj for prop in ['age', 'sex', 'slot']):
|
||||
target_objects.append(obj)
|
||||
print(f"Found target object: {obj.name}")
|
||||
print(f" - age: {obj['age']}")
|
||||
print(f" - sex: {obj['sex']}")
|
||||
print(f" - slot: {obj['slot']}")
|
||||
print(f" - vertices: {len(obj.data.vertices)}")
|
||||
|
||||
return target_objects, temp_target
|
||||
|
||||
def delete_existing_shape_keys(target_obj):
|
||||
"""Delete all existing shape keys from target object"""
|
||||
if not target_obj.data.shape_keys:
|
||||
return False
|
||||
|
||||
num_keys = len(target_obj.data.shape_keys.key_blocks)
|
||||
print(f" Deleting {num_keys} existing shape keys...")
|
||||
|
||||
bpy.context.view_layer.objects.active = target_obj
|
||||
target_obj.select_set(True)
|
||||
bpy.ops.object.mode_set(mode='OBJECT')
|
||||
|
||||
while target_obj.data.shape_keys:
|
||||
target_obj.active_shape_key_index = 0
|
||||
bpy.ops.object.shape_key_remove()
|
||||
|
||||
target_obj.select_set(False)
|
||||
return True
|
||||
|
||||
def ensure_shape_keys_structure(shape_key_names, target_obj):
|
||||
"""Create shape key structure on target object"""
|
||||
delete_existing_shape_keys(target_obj)
|
||||
|
||||
bpy.context.view_layer.objects.active = target_obj
|
||||
target_obj.select_set(True)
|
||||
|
||||
# Create Basis with current mesh positions
|
||||
target_obj.shape_key_add(name='Basis', from_mix=True)
|
||||
print(f" Created Basis shape key")
|
||||
|
||||
# Create other shape keys
|
||||
for sk_name in shape_key_names:
|
||||
if sk_name != 'Basis':
|
||||
target_obj.shape_key_add(name=sk_name, from_mix=False)
|
||||
print(f" Created shape key: {sk_name}")
|
||||
|
||||
target_obj.data.shape_keys.use_relative = True
|
||||
target_obj.select_set(False)
|
||||
|
||||
def create_bvh_for_source(source_data, shape_key_name='Basis'):
|
||||
"""Create BVH tree for source mesh in specified pose"""
|
||||
|
||||
positions = source_data['vertex_positions'][shape_key_name]
|
||||
verts = [mathutils.Vector(v) for v in positions]
|
||||
|
||||
# Create BVH tree
|
||||
bvh = BVHTree.FromPolygons(verts, source_data['polygons'], all_triangles=False)
|
||||
|
||||
return bvh, verts
|
||||
|
||||
def compute_signed_distance_and_direction(bvh, point, reference_normal=None):
|
||||
"""Compute signed distance and direction to surface"""
|
||||
|
||||
# Find closest point on surface
|
||||
location, normal, index, distance = bvh.find_nearest(point)
|
||||
|
||||
if location is None:
|
||||
return None, None, None, None
|
||||
|
||||
# Determine sign using reference normal
|
||||
if reference_normal is not None:
|
||||
to_surface = location - point
|
||||
if to_surface.length > 0:
|
||||
dot = to_surface.normalized().dot(reference_normal)
|
||||
signed_distance = distance if dot > 0 else -distance
|
||||
else:
|
||||
signed_distance = 0
|
||||
else:
|
||||
to_surface = point - location
|
||||
if to_surface.length > 0 and normal.length > 0:
|
||||
dot = to_surface.normalized().dot(normal)
|
||||
signed_distance = distance if dot > 0 else -distance
|
||||
else:
|
||||
signed_distance = distance
|
||||
|
||||
return location, normal, index, signed_distance
|
||||
|
||||
def detect_boundary_vertices(target_obj, mapping, threshold=0.3):
|
||||
"""Detect vertices that lie on the boundary between inner and outer surfaces"""
|
||||
|
||||
print(f" Detecting boundary vertices...")
|
||||
|
||||
num_verts = len(mapping['target_verts'])
|
||||
adjacency = mapping['adjacency']
|
||||
boundary_vertices = set()
|
||||
|
||||
# For each vertex, check if its neighbors have different side flags
|
||||
for i in range(num_verts):
|
||||
if i >= len(mapping['side_flags']):
|
||||
continue
|
||||
|
||||
side_i = mapping['side_flags'][i]
|
||||
if side_i == 0: # On surface, definitely boundary
|
||||
boundary_vertices.add(i)
|
||||
continue
|
||||
|
||||
neighbors = adjacency[i]
|
||||
opposite_side_count = 0
|
||||
total_neighbors = 0
|
||||
|
||||
for n in neighbors:
|
||||
if n < len(mapping['side_flags']):
|
||||
side_n = mapping['side_flags'][n]
|
||||
if side_n != 0 and side_n != side_i:
|
||||
opposite_side_count += 1
|
||||
total_neighbors += 1
|
||||
|
||||
# If significant portion of neighbors are on opposite side, this is a boundary
|
||||
if total_neighbors > 0 and opposite_side_count / total_neighbors > threshold:
|
||||
boundary_vertices.add(i)
|
||||
|
||||
# Also check vertices that are close to the surface (small signed distance)
|
||||
for i in range(num_verts):
|
||||
if i < len(mapping['signed_distances']):
|
||||
if abs(mapping['signed_distances'][i]) < 0.05: # Very close to surface
|
||||
boundary_vertices.add(i)
|
||||
|
||||
print(f" Found {len(boundary_vertices)} boundary vertices")
|
||||
return boundary_vertices
|
||||
|
||||
def compute_robust_surface_mapping(target_obj, source_data):
|
||||
"""Create robust surface mapping with signed distances"""
|
||||
|
||||
print(f" Computing robust surface mapping...")
|
||||
|
||||
# Get target vertices and normals
|
||||
target_verts = [v.co.copy() for v in target_obj.data.vertices]
|
||||
target_normals = [v.normal.copy() for v in target_obj.data.vertices]
|
||||
num_verts = len(target_verts)
|
||||
|
||||
# Create BVH for source basis
|
||||
bvh_basis, basis_verts = create_bvh_for_source(source_data, 'Basis')
|
||||
|
||||
# Build adjacency for later smoothing
|
||||
adjacency = defaultdict(set)
|
||||
mesh = target_obj.data
|
||||
for edge in mesh.edges:
|
||||
v1, v2 = edge.vertices
|
||||
adjacency[v1].add(v2)
|
||||
adjacency[v2].add(v1)
|
||||
|
||||
mapping = {
|
||||
'target_verts': target_verts,
|
||||
'source_indices': [],
|
||||
'surface_points': [],
|
||||
'signed_distances': [],
|
||||
'direction_vectors': [],
|
||||
'face_indices': [],
|
||||
'barycentric_coords': [],
|
||||
'side_flags': [],
|
||||
'confidence': [],
|
||||
'adjacency': adjacency,
|
||||
'deformation_magnitude': [0.0] * num_verts,
|
||||
'boundary_vertices': set()
|
||||
}
|
||||
|
||||
# First pass: find closest points and determine side
|
||||
for i, (target_pos, target_normal) in enumerate(zip(target_verts, target_normals)):
|
||||
location, normal, index, signed_dist = compute_signed_distance_and_direction(
|
||||
bvh_basis, target_pos, target_normal
|
||||
)
|
||||
|
||||
if location is None:
|
||||
# Fallback to nearest vertex
|
||||
if i % 100 == 0:
|
||||
print(f" Warning: Vertex {i} using nearest vertex fallback")
|
||||
min_dist = float('inf')
|
||||
nearest_idx = 0
|
||||
for j, src_pos in enumerate(basis_verts):
|
||||
dist = (target_pos - src_pos).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
nearest_idx = j
|
||||
|
||||
mapping['source_indices'].append([nearest_idx])
|
||||
mapping['surface_points'].append(basis_verts[nearest_idx])
|
||||
mapping['signed_distances'].append(min_dist)
|
||||
mapping['direction_vectors'].append(target_pos - basis_verts[nearest_idx])
|
||||
mapping['face_indices'].append(-1)
|
||||
mapping['barycentric_coords'].append([1.0])
|
||||
mapping['side_flags'].append(1)
|
||||
mapping['confidence'].append(0.5)
|
||||
else:
|
||||
mapping['surface_points'].append(location)
|
||||
mapping['signed_distances'].append(signed_dist)
|
||||
|
||||
abs_dist = abs(signed_dist)
|
||||
if abs_dist < 0.001:
|
||||
side_flag = 0
|
||||
confidence = 1.0
|
||||
else:
|
||||
ray_hits = 0
|
||||
for ray_dir in [target_normal, -target_normal, mathutils.Vector((1,0,0)), mathutils.Vector((-1,0,0))]:
|
||||
hit, _, _, _ = bvh_basis.ray_cast(target_pos + ray_dir * 0.1, -ray_dir)
|
||||
if hit is not None:
|
||||
ray_hits += 1
|
||||
|
||||
side_flag = 1 if signed_dist > 0 else -1
|
||||
confidence = min(1.0, ray_hits / 4.0 + 0.5)
|
||||
|
||||
mapping['side_flags'].append(side_flag)
|
||||
mapping['confidence'].append(confidence)
|
||||
|
||||
if index is not None and index < len(source_data['polygons']):
|
||||
face = source_data['polygons'][index]
|
||||
mapping['face_indices'].append(index)
|
||||
|
||||
if len(face) == 3:
|
||||
face_verts = [basis_verts[idx] for idx in face]
|
||||
try:
|
||||
coords = mathutils.geometry.barycentric_transform(
|
||||
location, face_verts[0], face_verts[1], face_verts[2]
|
||||
)
|
||||
mapping['barycentric_coords'].append(coords)
|
||||
|
||||
min_dist = float('inf')
|
||||
nearest_idx = face[0]
|
||||
for idx in face:
|
||||
dist = (location - basis_verts[idx]).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
nearest_idx = idx
|
||||
mapping['source_indices'].append([nearest_idx])
|
||||
except:
|
||||
min_dist = float('inf')
|
||||
nearest_idx = face[0]
|
||||
for idx in face:
|
||||
dist = (location - basis_verts[idx]).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
nearest_idx = idx
|
||||
mapping['source_indices'].append([nearest_idx])
|
||||
mapping['barycentric_coords'].append([1.0])
|
||||
else:
|
||||
min_dist = float('inf')
|
||||
nearest_idx = face[0]
|
||||
for idx in face:
|
||||
dist = (location - basis_verts[idx]).length
|
||||
if dist < min_dist:
|
||||
min_dist = dist
|
||||
nearest_idx = idx
|
||||
mapping['source_indices'].append([nearest_idx])
|
||||
mapping['barycentric_coords'].append([1.0])
|
||||
else:
|
||||
mapping['face_indices'].append(-1)
|
||||
mapping['source_indices'].append([0])
|
||||
mapping['barycentric_coords'].append([1.0])
|
||||
|
||||
direction = target_pos - location
|
||||
mapping['direction_vectors'].append(direction)
|
||||
|
||||
# Detect boundary vertices
|
||||
mapping['boundary_vertices'] = detect_boundary_vertices(target_obj, mapping)
|
||||
|
||||
print(f" Mapping complete. Side distribution: "
|
||||
f"Outside: {mapping['side_flags'].count(1)}, "
|
||||
f"Inside: {mapping['side_flags'].count(-1)}, "
|
||||
f"On surface: {mapping['side_flags'].count(0)}")
|
||||
|
||||
return mapping
|
||||
|
||||
def interpolate_source_position_safe(source_data, sk_name, surface_point, face_idx, bary_coords, source_indices):
|
||||
"""Interpolate source position with side preservation"""
|
||||
|
||||
if sk_name == 'Basis':
|
||||
return surface_point
|
||||
|
||||
source_positions = source_data['vertex_positions'][sk_name]
|
||||
|
||||
if face_idx >= 0 and face_idx < len(source_data['polygons']):
|
||||
face = source_data['polygons'][face_idx]
|
||||
|
||||
if len(face) == 3 and len(bary_coords) == 3:
|
||||
pos = mathutils.Vector((0, 0, 0))
|
||||
for j, vert_idx in enumerate(face):
|
||||
if j < len(bary_coords) and vert_idx < len(source_positions):
|
||||
pos += bary_coords[j] * mathutils.Vector(source_positions[vert_idx])
|
||||
return pos
|
||||
|
||||
if source_indices and len(source_indices) > 0:
|
||||
idx = source_indices[0]
|
||||
if idx < len(source_positions):
|
||||
return mathutils.Vector(source_positions[idx])
|
||||
|
||||
return surface_point
|
||||
|
||||
def compute_deformation_magnitudes(source_data, mapping):
|
||||
"""Compute how much each vertex deforms in the source"""
|
||||
|
||||
print(f" Computing deformation magnitudes...")
|
||||
|
||||
basis_positions = [mathutils.Vector(v) for v in source_data['vertex_positions']['Basis']]
|
||||
num_verts = len(mapping['target_verts'])
|
||||
magnitudes = [0.0] * num_verts
|
||||
|
||||
for sk_name in source_data['names']:
|
||||
if sk_name == 'Basis':
|
||||
continue
|
||||
|
||||
sk_positions = [mathutils.Vector(v) for v in source_data['vertex_positions'][sk_name]]
|
||||
|
||||
for i in range(num_verts):
|
||||
if i < len(mapping['source_indices']):
|
||||
src_indices = mapping['source_indices'][i]
|
||||
if src_indices and len(src_indices) > 0:
|
||||
src_idx = src_indices[0]
|
||||
if src_idx < len(basis_positions) and src_idx < len(sk_positions):
|
||||
deformation = (sk_positions[src_idx] - basis_positions[src_idx]).length
|
||||
magnitudes[i] = max(magnitudes[i], deformation)
|
||||
|
||||
max_mag = max(magnitudes) if magnitudes else 1.0
|
||||
if max_mag > 0:
|
||||
magnitudes = [m / max_mag for m in magnitudes]
|
||||
|
||||
high_count = sum(1 for m in magnitudes if m > 0.7)
|
||||
print(f" High deformation vertices (>0.7): {high_count}/{num_verts}")
|
||||
|
||||
return magnitudes
|
||||
|
||||
def enforce_side_constraint(pos, surface_point, reference_normal, target_side, confidence, is_boundary=False):
|
||||
"""Enforce that vertex stays on correct side of surface"""
|
||||
|
||||
to_surface = surface_point - pos
|
||||
if to_surface.length < 0.001:
|
||||
return pos
|
||||
|
||||
current_side = 1 if to_surface.dot(reference_normal) < 0 else -1
|
||||
|
||||
# For boundary vertices, use very gentle enforcement
|
||||
if is_boundary:
|
||||
if current_side != target_side and target_side != 0:
|
||||
# Just pull back slightly toward surface
|
||||
proj_factor = to_surface.dot(reference_normal) / reference_normal.length_squared
|
||||
proj_point = pos + reference_normal * proj_factor
|
||||
|
||||
# Very small offset to stay near surface
|
||||
offset = abs((surface_point - pos).length) * 0.2
|
||||
if target_side > 0:
|
||||
return proj_point - reference_normal * offset
|
||||
else:
|
||||
return proj_point + reference_normal * offset
|
||||
return pos
|
||||
|
||||
# Regular vertices - stronger enforcement
|
||||
if current_side != target_side and target_side != 0:
|
||||
proj_factor = to_surface.dot(reference_normal) / reference_normal.length_squared
|
||||
proj_point = pos + reference_normal * proj_factor
|
||||
|
||||
offset = abs((surface_point - pos).length) * 0.95
|
||||
if target_side > 0:
|
||||
corrected_pos = proj_point - reference_normal * offset
|
||||
else:
|
||||
corrected_pos = proj_point + reference_normal * offset
|
||||
|
||||
return corrected_pos
|
||||
|
||||
return pos
|
||||
|
||||
def limit_boundary_velocity(target_idx, target_pos, deformed_surface, t, mapping):
|
||||
"""Limit how fast boundary vertices can move"""
|
||||
|
||||
if target_idx not in mapping['boundary_vertices']:
|
||||
return deformed_surface
|
||||
|
||||
# Get the full deformation target
|
||||
full_target = deformed_surface
|
||||
|
||||
# Calculate how far this vertex should move at this t value
|
||||
# Use a slower progression for boundary vertices
|
||||
safe_t = t * 0.7 # Boundary vertices only move 70% as fast
|
||||
|
||||
# Interpolate between start and target with limited speed
|
||||
limited_pos = (1 - safe_t) * target_pos + safe_t * full_target
|
||||
|
||||
return limited_pos
|
||||
|
||||
def adaptive_damping(pos, target_pos, deformed_surface, magnitude, t, threshold=0.55, is_boundary=False):
|
||||
"""Apply adaptive damping based on deformation magnitude and t value"""
|
||||
|
||||
if t <= threshold:
|
||||
return pos
|
||||
|
||||
# For boundary vertices, use much less damping to avoid pinching
|
||||
if is_boundary:
|
||||
return pos
|
||||
|
||||
excess = (t - threshold) / (1.0 - threshold)
|
||||
damping = min(1.0, magnitude * 0.8 + 0.2)
|
||||
damped_weight = excess * damping * 0.5
|
||||
|
||||
safe_pos = (target_pos + deformed_surface) * 0.5
|
||||
damped_pos = (1 - damped_weight) * pos + damped_weight * safe_pos
|
||||
|
||||
return damped_pos
|
||||
|
||||
def compute_side_preserving_position(target_idx, sk_name, mapping, source_data, t):
|
||||
"""Compute position that preserves side and signed distance"""
|
||||
|
||||
if target_idx >= len(mapping['target_verts']):
|
||||
return mathutils.Vector((0, 0, 0))
|
||||
|
||||
target_pos = mapping['target_verts'][target_idx]
|
||||
|
||||
if target_idx >= len(mapping['surface_points']):
|
||||
return target_pos
|
||||
|
||||
# Check if this is a boundary vertex
|
||||
is_boundary = target_idx in mapping.get('boundary_vertices', set())
|
||||
|
||||
surface_point = mapping['surface_points'][target_idx]
|
||||
signed_dist = mapping['signed_distances'][target_idx] if target_idx < len(mapping['signed_distances']) else 0
|
||||
side_flag = mapping['side_flags'][target_idx] if target_idx < len(mapping['side_flags']) else 1
|
||||
confidence = mapping['confidence'][target_idx] if target_idx < len(mapping['confidence']) else 0.5
|
||||
|
||||
face_idx = mapping['face_indices'][target_idx] if target_idx < len(mapping['face_indices']) else -1
|
||||
bary_coords = mapping['barycentric_coords'][target_idx] if target_idx < len(mapping['barycentric_coords']) else [1.0]
|
||||
source_indices = mapping['source_indices'][target_idx] if target_idx < len(mapping['source_indices']) else [0]
|
||||
|
||||
deformed_surface = interpolate_source_position_safe(
|
||||
source_data, sk_name, surface_point, face_idx, bary_coords, source_indices
|
||||
)
|
||||
|
||||
# For boundary vertices, limit their velocity
|
||||
if is_boundary:
|
||||
deformed_surface = limit_boundary_velocity(target_idx, target_pos, deformed_surface, t, mapping)
|
||||
|
||||
basis_surface = surface_point
|
||||
current_surface = (1 - t) * basis_surface + t * deformed_surface
|
||||
|
||||
if target_idx < len(mapping['direction_vectors']):
|
||||
reference_normal = mapping['direction_vectors'][target_idx].normalized()
|
||||
else:
|
||||
reference_normal = mathutils.Vector((0, 0, 1))
|
||||
|
||||
if reference_normal.length < 0.1:
|
||||
reference_normal = mathutils.Vector((0, 0, 1))
|
||||
|
||||
abs_dist = abs(signed_dist)
|
||||
|
||||
# For boundary vertices, use a much smaller offset to keep them near the surface
|
||||
if is_boundary:
|
||||
# Boundary vertices should stay very close to the surface
|
||||
# Use a fraction of their original distance
|
||||
abs_dist = abs_dist * 0.2 # Only 20% of original offset
|
||||
else:
|
||||
# Regular vertices use full offset but with confidence weighting
|
||||
abs_dist = abs_dist * confidence
|
||||
|
||||
if side_flag > 0:
|
||||
base_pos = current_surface + reference_normal * abs_dist
|
||||
elif side_flag < 0:
|
||||
base_pos = current_surface - reference_normal * abs_dist
|
||||
else:
|
||||
base_pos = current_surface
|
||||
|
||||
direct_mapped = (1 - t) * target_pos + t * deformed_surface
|
||||
|
||||
# For boundary vertices, blend more heavily with direct mapping
|
||||
if is_boundary:
|
||||
blend_weight = 0.1 # Only 10% side-preserving for boundaries
|
||||
else:
|
||||
blend_weight = confidence * (1 - t * 0.3)
|
||||
|
||||
blended_pos = (1 - blend_weight) * direct_mapped + blend_weight * base_pos
|
||||
|
||||
if side_flag != 0:
|
||||
final_pos = enforce_side_constraint(
|
||||
blended_pos, current_surface, reference_normal, side_flag, confidence, is_boundary
|
||||
)
|
||||
else:
|
||||
final_pos = blended_pos
|
||||
|
||||
if 'deformation_magnitude' in mapping:
|
||||
if target_idx < len(mapping['deformation_magnitude']):
|
||||
magnitude = mapping['deformation_magnitude'][target_idx]
|
||||
final_pos = adaptive_damping(final_pos, target_pos, deformed_surface, magnitude, t, is_boundary=is_boundary)
|
||||
|
||||
return final_pos
|
||||
|
||||
def smooth_boundary_areas(target_obj, sk_name, mapping, source_data):
|
||||
"""Specifically smooth boundary vertices to prevent them from popping out"""
|
||||
|
||||
print(f" Smoothing boundary areas for {sk_name}...")
|
||||
|
||||
sk = target_obj.data.shape_keys.key_blocks[sk_name]
|
||||
current_positions = [v.co.copy() for v in sk.data]
|
||||
num_verts = len(current_positions)
|
||||
|
||||
boundary_vertices = mapping.get('boundary_vertices', set())
|
||||
if not boundary_vertices:
|
||||
return
|
||||
|
||||
adjacency = mapping['adjacency']
|
||||
|
||||
# Multiple smoothing passes focused on boundaries
|
||||
for iteration in range(3): # More iterations for boundaries
|
||||
new_positions = current_positions.copy()
|
||||
|
||||
for i in boundary_vertices:
|
||||
if i >= num_verts:
|
||||
continue
|
||||
|
||||
neighbors = adjacency[i]
|
||||
if not neighbors:
|
||||
continue
|
||||
|
||||
# Average with neighbors, weighting by side similarity
|
||||
weighted_sum = mathutils.Vector((0, 0, 0))
|
||||
total_weight = 0
|
||||
|
||||
for n in neighbors:
|
||||
if n < num_verts:
|
||||
# Higher weight for neighbors on same side
|
||||
if n < len(mapping['side_flags']) and i < len(mapping['side_flags']):
|
||||
if mapping['side_flags'][n] == mapping['side_flags'][i]:
|
||||
weight = 2.0
|
||||
else:
|
||||
weight = 1.0
|
||||
else:
|
||||
weight = 1.0
|
||||
|
||||
weighted_sum += current_positions[n] * weight
|
||||
total_weight += weight
|
||||
|
||||
if total_weight > 0:
|
||||
avg_pos = weighted_sum / total_weight
|
||||
|
||||
# Progressive smoothing
|
||||
blend = 0.2 + iteration * 0.1
|
||||
new_positions[i] = (1 - blend) * current_positions[i] + blend * avg_pos
|
||||
|
||||
current_positions = new_positions
|
||||
|
||||
# Apply smoothed positions
|
||||
for i, pos in enumerate(current_positions):
|
||||
if i < len(sk.data):
|
||||
sk.data[i].co = pos
|
||||
|
||||
sk.data.update()
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
print(f" Boundary smoothing complete")
|
||||
|
||||
def smooth_penetration_areas(target_obj, sk_name, mapping, source_data, threshold_t=0.55):
|
||||
"""Smooth areas where penetration occurs at high t values"""
|
||||
|
||||
print(f" Smoothing penetration areas for {sk_name}...")
|
||||
|
||||
sk = target_obj.data.shape_keys.key_blocks[sk_name]
|
||||
current_positions = [v.co.copy() for v in sk.data]
|
||||
num_verts = len(current_positions)
|
||||
|
||||
boundary_vertices = mapping.get('boundary_vertices', set())
|
||||
|
||||
# Detect penetration at t=1.0
|
||||
problem_vertices = set()
|
||||
bvh_deformed, _ = create_bvh_for_source(source_data, sk_name)
|
||||
|
||||
for i, pos in enumerate(current_positions):
|
||||
if i < len(mapping['surface_points']) and i < len(mapping['side_flags']):
|
||||
surface_point = mapping['surface_points'][i]
|
||||
target_side = mapping['side_flags'][i]
|
||||
|
||||
if target_side != 0:
|
||||
location, _, _, _ = bvh_deformed.find_nearest(pos)
|
||||
if location and i < len(mapping['direction_vectors']):
|
||||
to_surface = pos - location
|
||||
current_side = 1 if to_surface.dot(mapping['direction_vectors'][i]) > 0 else -1
|
||||
|
||||
if current_side != target_side:
|
||||
problem_vertices.add(i)
|
||||
|
||||
if not problem_vertices:
|
||||
print(f" No penetration detected")
|
||||
return
|
||||
|
||||
print(f" Found {len(problem_vertices)} penetrating vertices")
|
||||
|
||||
# Separate boundary and interior problem vertices
|
||||
boundary_problems = problem_vertices.intersection(boundary_vertices)
|
||||
interior_problems = problem_vertices - boundary_vertices
|
||||
|
||||
if boundary_problems:
|
||||
print(f" {len(boundary_problems)} are boundary vertices - these need special care")
|
||||
|
||||
# For boundary vertices, use very gentle smoothing
|
||||
adjacency = mapping['adjacency']
|
||||
for iteration in range(2):
|
||||
new_positions = current_positions.copy()
|
||||
|
||||
for i in boundary_problems:
|
||||
neighbors = adjacency[i]
|
||||
if not neighbors:
|
||||
continue
|
||||
|
||||
# Only average with non-problem neighbors
|
||||
weighted_sum = mathutils.Vector((0, 0, 0))
|
||||
total_weight = 0
|
||||
|
||||
for n in neighbors:
|
||||
if n < num_verts and n not in boundary_problems:
|
||||
weighted_sum += current_positions[n]
|
||||
total_weight += 1
|
||||
|
||||
if total_weight > 0:
|
||||
avg_pos = weighted_sum / total_weight
|
||||
blend = 0.15 # Very gentle
|
||||
new_positions[i] = (1 - blend) * current_positions[i] + blend * avg_pos
|
||||
|
||||
current_positions = new_positions
|
||||
|
||||
# Get vertices with large deformation
|
||||
large_deformation = set()
|
||||
if 'deformation_magnitude' in mapping:
|
||||
for i, mag in enumerate(mapping['deformation_magnitude']):
|
||||
if i < num_verts and mag > 0.7:
|
||||
large_deformation.add(i)
|
||||
|
||||
# Focus on interior problems with large deformation
|
||||
focus_vertices = interior_problems.intersection(large_deformation)
|
||||
if focus_vertices:
|
||||
print(f" {len(focus_vertices)} interior high-deformation vertices")
|
||||
|
||||
# Smooth interior problems
|
||||
adjacency = mapping['adjacency']
|
||||
for iteration in range(3):
|
||||
new_positions = current_positions.copy()
|
||||
|
||||
for i in focus_vertices:
|
||||
neighbors = adjacency[i]
|
||||
if not neighbors:
|
||||
continue
|
||||
|
||||
weighted_sum = mathutils.Vector((0, 0, 0))
|
||||
total_weight = 0
|
||||
|
||||
for n in neighbors:
|
||||
if n < num_verts:
|
||||
weight = 1.0
|
||||
if n in boundary_vertices:
|
||||
weight = 0.3 # Less influence from boundaries
|
||||
|
||||
weighted_sum += current_positions[n] * weight
|
||||
total_weight += weight
|
||||
|
||||
if total_weight > 0:
|
||||
avg_pos = weighted_sum / total_weight
|
||||
blend = 0.3 + iteration * 0.1
|
||||
new_positions[i] = (1 - blend) * current_positions[i] + blend * avg_pos
|
||||
|
||||
current_positions = new_positions
|
||||
|
||||
# Apply smoothed positions
|
||||
for i, pos in enumerate(current_positions):
|
||||
if i < len(sk.data):
|
||||
sk.data[i].co = pos
|
||||
|
||||
sk.data.update()
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
print(f" Smoothing complete")
|
||||
|
||||
def set_shape_key_with_side_preservation(target_obj, sk_name, mapping, source_data):
|
||||
"""Set shape key with side preservation"""
|
||||
|
||||
print(f" Setting {sk_name} with side preservation...")
|
||||
|
||||
if 'deformation_magnitude' not in mapping or len(mapping['deformation_magnitude']) != len(mapping['target_verts']):
|
||||
mapping['deformation_magnitude'] = compute_deformation_magnitudes(source_data, mapping)
|
||||
|
||||
target_obj.data.shape_keys.use_relative = False
|
||||
sk = target_obj.data.shape_keys.key_blocks[sk_name]
|
||||
|
||||
for i, v in enumerate(sk.data):
|
||||
if i < len(mapping['target_verts']):
|
||||
pos = compute_side_preserving_position(i, sk_name, mapping, source_data, 1.0)
|
||||
v.co = pos
|
||||
|
||||
sk.data.update()
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
target_obj.data.shape_keys.use_relative = True
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
# First smooth boundary areas
|
||||
smooth_boundary_areas(target_obj, sk_name, mapping, source_data)
|
||||
|
||||
# Then smooth penetration areas
|
||||
smooth_penetration_areas(target_obj, sk_name, mapping, source_data)
|
||||
|
||||
def test_shape_key_quality(target_obj, sk_name, mapping, source_data):
|
||||
"""Test shape key quality with side tracking"""
|
||||
|
||||
sk = target_obj.data.shape_keys.key_blocks[sk_name]
|
||||
|
||||
print(f" Testing quality of '{sk_name}':")
|
||||
|
||||
test_values = [0.0, 0.3, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.77, 0.8, 0.9, 1.0]
|
||||
prev_positions = None
|
||||
boundary_vertices = mapping.get('boundary_vertices', set())
|
||||
|
||||
for val in test_values:
|
||||
if val == 0.0:
|
||||
sk.value = 0.0
|
||||
else:
|
||||
target_obj.data.shape_keys.use_relative = False
|
||||
for i, v in enumerate(sk.data):
|
||||
if i < len(mapping['target_verts']):
|
||||
pos = compute_side_preserving_position(i, sk_name, mapping, source_data, val)
|
||||
v.co = pos
|
||||
target_obj.data.shape_keys.use_relative = True
|
||||
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
if prev_positions is not None:
|
||||
movements = []
|
||||
side_changes = 0
|
||||
boundary_issues = 0
|
||||
|
||||
for i, v in enumerate(target_obj.data.vertices):
|
||||
if i < len(prev_positions) and i < len(mapping['side_flags']):
|
||||
movement = (v.co - prev_positions[i]).length
|
||||
if movement > 0.001:
|
||||
movements.append(movement)
|
||||
|
||||
if i < len(mapping['surface_points']) and i < len(mapping['direction_vectors']):
|
||||
current_to_surface = v.co - mapping['surface_points'][i]
|
||||
current_side = 1 if current_to_surface.dot(mapping['direction_vectors'][i]) > 0 else -1
|
||||
|
||||
if current_side != mapping['side_flags'][i] and mapping['side_flags'][i] != 0:
|
||||
side_changes += 1
|
||||
if i in boundary_vertices:
|
||||
boundary_issues += 1
|
||||
|
||||
if movements:
|
||||
avg_movement = sum(movements) / len(movements)
|
||||
max_movement = max(movements)
|
||||
print(f" Value {val:.2f}: avg Δ {avg_movement:.4f}, max Δ {max_movement:.4f}")
|
||||
if side_changes > 0:
|
||||
print(f" ⚠ {side_changes} vertices changed side ({boundary_issues} on boundary)")
|
||||
|
||||
prev_positions = [v.co.copy() for v in target_obj.data.vertices]
|
||||
|
||||
sk.value = 0.0
|
||||
target_obj.data.update_tag()
|
||||
bpy.context.view_layer.update()
|
||||
|
||||
def transfer_shape_keys(source_data, target_obj):
|
||||
"""Transfer shape keys with side preservation"""
|
||||
print(f" Transferring shape keys with side preservation...")
|
||||
|
||||
mapping = compute_robust_surface_mapping(target_obj, source_data)
|
||||
|
||||
for sk_name in source_data['names']:
|
||||
if sk_name == 'Basis':
|
||||
continue
|
||||
|
||||
print(f" Processing: {sk_name}")
|
||||
set_shape_key_with_side_preservation(target_obj, sk_name, mapping, source_data)
|
||||
print(f" ✓ Transferred {sk_name}")
|
||||
|
||||
return mapping
|
||||
|
||||
def verify_transfer(source_names, target_obj):
|
||||
"""Verify shape keys were transferred correctly"""
|
||||
target_names = [sk.name for sk in target_obj.data.shape_keys.key_blocks]
|
||||
|
||||
print(f" Verification:")
|
||||
print(f" Source keys: {len(source_names)}")
|
||||
print(f" Target keys: {len(target_names)}")
|
||||
|
||||
if set(source_names) == set(target_names):
|
||||
print(f" ✓ All shape keys present")
|
||||
|
||||
def save_output_file(output_path, temp_target):
|
||||
"""Save the modified file"""
|
||||
print(f"\nSaving output file: {output_path}")
|
||||
|
||||
output_dir = os.path.dirname(output_path)
|
||||
if output_dir and not os.path.exists(output_dir):
|
||||
os.makedirs(output_dir)
|
||||
|
||||
bpy.ops.wm.save_as_mainfile(filepath=output_path)
|
||||
|
||||
try:
|
||||
if os.path.exists(temp_target):
|
||||
os.remove(temp_target)
|
||||
temp_dir = os.path.dirname(temp_target)
|
||||
if os.path.exists(temp_dir) and not os.listdir(temp_dir):
|
||||
os.rmdir(temp_dir)
|
||||
except:
|
||||
pass
|
||||
|
||||
print(f"File saved successfully")
|
||||
|
||||
def main():
|
||||
print("=" * 60)
|
||||
print("Blender Shape Key Transfer Script - Boundary Velocity Limiting")
|
||||
print("=" * 60)
|
||||
|
||||
source_file, target_file, output_file = parse_args()
|
||||
print(f"Source: {source_file}")
|
||||
print(f"Target: {target_file}")
|
||||
print(f"Output: {output_file}")
|
||||
|
||||
try:
|
||||
source_data = load_source_data(source_file)
|
||||
target_objects, temp_target = load_target_file(target_file)
|
||||
|
||||
if target_objects:
|
||||
for idx, target_obj in enumerate(target_objects, 1):
|
||||
print(f"\n[{idx}/{len(target_objects)}] Processing: {target_obj.name}")
|
||||
print(f" {'=' * 40}")
|
||||
|
||||
ensure_shape_keys_structure(source_data['names'], target_obj)
|
||||
mapping = transfer_shape_keys(source_data, target_obj)
|
||||
verify_transfer(source_data['names'], target_obj)
|
||||
|
||||
for sk_name in source_data['names']:
|
||||
if sk_name == 'fat':
|
||||
test_shape_key_quality(target_obj, sk_name, mapping, source_data)
|
||||
break
|
||||
|
||||
print(f" {'=' * 40}")
|
||||
print(f" ✓ Completed")
|
||||
|
||||
save_output_file(output_file, temp_target)
|
||||
else:
|
||||
print("\nNo target objects found with required properties")
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("Script completed successfully!")
|
||||
print("=" * 60)
|
||||
|
||||
except Exception as e:
|
||||
print(f"\nError: {str(e)}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Binary file not shown.
@@ -32,6 +32,8 @@ def dot_skeleton(obj, path, **kwargs):
|
||||
name = kwargs.get('force_name') or obj.data.name
|
||||
if config.get('SHARED_ARMATURE') is True:
|
||||
name = kwargs.get('force_name') or arm.data.name
|
||||
if name == "Armature":
|
||||
name = arm.name
|
||||
name = util.clean_object_name(name)
|
||||
|
||||
# Lets export the Armature only once
|
||||
|
||||
79
assets/blender/scripts/export_buildings2.py
Normal file
79
assets/blender/scripts/export_buildings2.py
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os, sys, time
|
||||
import bpy
|
||||
from math import pi
|
||||
import glob
|
||||
import shutil
|
||||
from mathutils import Vector, Matrix
|
||||
from math import radians, pi
|
||||
|
||||
argv = sys.argv
|
||||
argv = argv[argv.index("--") + 1:]
|
||||
|
||||
incpath = os.path.dirname(__file__)
|
||||
|
||||
sys.path.insert(0, incpath)
|
||||
sys.path.insert(1, incpath + "/blender2ogre")
|
||||
print(sys.path)
|
||||
|
||||
import io_ogre
|
||||
io_ogre.register()
|
||||
|
||||
gltf_file = argv[0]
|
||||
print("Exporting to " + gltf_file)
|
||||
basepath = os.getcwd()
|
||||
# bpy.ops.export_scene.gltf(filepath="", check_existing=True,
|
||||
# export_import_convert_lighting_mode='SPEC', gltf_export_id="",
|
||||
# export_format='GLB', ui_tab='GENERAL', export_copyright="", export_image_format='AUTO',
|
||||
# export_texture_dir="", export_jpeg_quality=75, export_keep_originals=False,
|
||||
# export_texcoords=True, export_normals=True, export_draco_mesh_compression_enable=False,
|
||||
# export_draco_mesh_compression_level=6, export_draco_position_quantization=14,
|
||||
# export_draco_normal_quantization=10, export_draco_texcoord_quantization=12,
|
||||
# export_draco_color_quantization=10, export_draco_generic_quantization=12, export_tangents=False,
|
||||
# export_materials='EXPORT', export_original_specular=False, export_colors=True,
|
||||
# export_attributes=False, use_mesh_edges=False, use_mesh_vertices=False, export_cameras=False,
|
||||
# use_selection=False, use_visible=False, use_renderable=False,
|
||||
# use_active_collection_with_nested=True, use_active_collection=False, use_active_scene=False,
|
||||
# export_extras=False, export_yup=True, export_apply=False, export_animations=True,
|
||||
# export_frame_range=False, export_frame_step=1, export_force_sampling=True, export_animation_mode='ACTIONS',
|
||||
# export_nla_strips_merged_animation_name="Animation", export_def_bones=False,
|
||||
# export_hierarchy_flatten_bones=False, export_optimize_animation_size=True,
|
||||
# export_optimize_animation_keep_anim_armature=True, export_optimize_animation_keep_anim_object=False,
|
||||
# export_negative_frame='SLIDE', export_anim_slide_to_zero=False, export_bake_animation=False,
|
||||
# export_anim_single_armature=True, export_reset_pose_bones=True, export_current_frame=False,
|
||||
# export_rest_position_armature=True, export_anim_scene_split_object=True, export_skins=True,
|
||||
# export_all_influences=False, export_morph=True, export_morph_normal=True,
|
||||
# export_morph_tangent=False, export_morph_animation=True, export_morph_reset_sk_data=True,
|
||||
# export_lights=False, export_nla_strips=True, will_save_settings=False, filter_glob="*.glb")
|
||||
|
||||
for obj in bpy.data.objects:
|
||||
if obj.name.endswith("-col"):
|
||||
bpy.data.objects.remove(obj)
|
||||
if (obj.rigid_body):
|
||||
print(obj.rigid_body.collision_shape)
|
||||
|
||||
scene_file = gltf_file.replace(".glb", "").replace(".gltf", "") + ".scene"
|
||||
bpy.ops.ogre.export(filepath=scene_file,
|
||||
EX_SWAP_AXIS='xz-y',
|
||||
EX_V2_MESH_TOOL_VERSION='v2',
|
||||
EX_EXPORT_XML_DELETE=True,
|
||||
EX_SCENE=True,
|
||||
EX_SELECTED_ONLY=False,
|
||||
EX_EXPORT_HIDDEN=False,
|
||||
EX_FORCE_CAMERA=False,
|
||||
EX_FORCE_LIGHTS=False,
|
||||
EX_NODE_ANIMATION=True,
|
||||
EX_MATERIALS=True,
|
||||
EX_SEPARATE_MATERIALS=True,
|
||||
EX_COPY_SHADER_PROGRAMS=True,
|
||||
EX_MESH=True,
|
||||
EX_LOD_LEVELS=3,
|
||||
EX_LOD_DISTANCE=100,
|
||||
EX_LOD_PERCENT=40
|
||||
)
|
||||
|
||||
|
||||
bpy.ops.wm.read_homefile(use_empty=True)
|
||||
time.sleep(2)
|
||||
bpy.ops.wm.quit_blender()
|
||||
@@ -7,6 +7,7 @@ import glob
|
||||
import shutil
|
||||
from mathutils import Vector, Matrix
|
||||
from math import radians, pi
|
||||
import json
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
#from settings import ExportMappingFemale, ExportMappingMale, ExportMappingMaleBabyShape, ExportMappingMaleEdited, ExportMappingFemaleEdited, ExportMappingMaleTestShapeEdited, ExportMappingMaleBaseShapeEdited
|
||||
@@ -290,6 +291,42 @@ for mapping in[CommandLineMapping()]:
|
||||
export_lights=False,
|
||||
export_skins=True)
|
||||
print("exported to: " + mapping.gltf_path)
|
||||
obj_names = mapping.objs
|
||||
prefix = mapping.armature_name + "_"
|
||||
|
||||
for name in obj_names:
|
||||
obj = bpy.data.objects.get(name)
|
||||
|
||||
if obj and obj.type == 'MESH':
|
||||
# 1. Rename Mesh Data
|
||||
if not obj.data.name.startswith(prefix):
|
||||
obj.data.name = prefix + obj.data.name
|
||||
|
||||
# 2. Iterate through all Material Slots on the object
|
||||
for slot in obj.material_slots:
|
||||
if slot.material:
|
||||
mat = slot.material
|
||||
# 3. Check if material already has the prefix
|
||||
if not mat.name.startswith(prefix):
|
||||
mat.name = prefix + mat.name
|
||||
print(f"Renamed material '{mat.name}' on object '{name}'")
|
||||
# 3. Export custom properties to json
|
||||
save_data = {}
|
||||
for key in obj.keys():
|
||||
if key in ["age", "sex", "slot"]:
|
||||
save_data[key] = obj[key]
|
||||
if key.startswith("body_"):
|
||||
save_data[key.replace("body_", "", 1)] = obj[key]
|
||||
save_data["mesh"] = obj.data.name + ".mesh"
|
||||
json_dir = os.path.dirname(mapping.gltf_path)
|
||||
save_file = json_dir + "/body_part_" + obj.data.name + ".json"
|
||||
json_filepath = os.path.join(json_dir, save_file)
|
||||
with open(json_filepath, 'w') as f:
|
||||
json.dump(save_data, f)
|
||||
|
||||
armobj = bpy.data.objects.get(mapping.armature_name)
|
||||
armobj.data.name = armobj.name
|
||||
bpy.ops.ogre.export(filepath=mapping.gltf_path.replace(".glb", ".scene"), EX_SELECTED_ONLY=False, EX_SHARED_ARMATURE=True, EX_LOD_GENERATION='0', EX_LOD_DISTANCE=20, EX_LOD_LEVELS=4, EX_GENERATE_TANGENTS='4')
|
||||
|
||||
bpy.ops.wm.read_homefile(use_empty=True)
|
||||
time.sleep(2)
|
||||
|
||||
@@ -73,9 +73,8 @@ FileSystem=resources/fonts
|
||||
[LuaScripts]
|
||||
FileSystem=lua-scripts
|
||||
|
||||
[Characters]
|
||||
FileSystem=./characters/male
|
||||
FileSystem=./characters/female
|
||||
#[Characters]
|
||||
#FileSystem=./characters
|
||||
[Audio]
|
||||
FileSystem=./audio/gui
|
||||
|
||||
|
||||
@@ -387,14 +387,22 @@ public:
|
||||
}
|
||||
void locateResources() override
|
||||
{
|
||||
Ogre::ResourceGroupManager::getSingleton().createResourceGroup(
|
||||
Ogre::ResourceGroupManager::getSingleton().createResourceGroup(
|
||||
"Characters", true);
|
||||
Ogre::ResourceGroupManager::getSingleton().createResourceGroup(
|
||||
"Water", true);
|
||||
Ogre::ResourceGroupManager::getSingleton().createResourceGroup(
|
||||
"LuaScripts", false);
|
||||
Ogre::ResourceGroupManager::getSingleton().addResourceLocation(
|
||||
"./lua-scripts", "FileSystem", "LuaScripts", true,
|
||||
true);
|
||||
OgreBites::ApplicationContext::locateResources();
|
||||
Ogre::ResourceGroupManager::getSingleton().addResourceLocation(
|
||||
"./characters/male", "FileSystem", "Characters", false,
|
||||
true);
|
||||
Ogre::ResourceGroupManager::getSingleton().addResourceLocation(
|
||||
"./characters/female", "FileSystem", "Characters",
|
||||
false, true);
|
||||
OgreBites::ApplicationContext::locateResources();
|
||||
}
|
||||
void loadResources() override
|
||||
{
|
||||
|
||||
@@ -201,16 +201,15 @@ BoatModule::BoatModule(flecs::world &ecs)
|
||||
->_getDerivedOrientation();
|
||||
if (ev.e2.has<
|
||||
CharacterBase>()) {
|
||||
ev.e2.get_mut<
|
||||
CharacterBase>()
|
||||
.mBodyNode
|
||||
->_setDerivedPosition(
|
||||
position);
|
||||
ev.e2.get_mut<
|
||||
CharacterBase>()
|
||||
.mBodyNode
|
||||
->_setDerivedOrientation(
|
||||
orientation);
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<
|
||||
CharacterModule>()
|
||||
.characterNodes
|
||||
.at(ev.e2);
|
||||
n->_setDerivedPosition(
|
||||
position);
|
||||
n->_setDerivedOrientation(
|
||||
orientation);
|
||||
}
|
||||
}
|
||||
e.set<BoatCurrentActuator>(
|
||||
@@ -282,14 +281,14 @@ BoatModule::BoatModule(flecs::world &ecs)
|
||||
captainSeat
|
||||
->_getDerivedOrientation();
|
||||
if (ev.e2.has<CharacterBase>()) {
|
||||
ev.e2.get_mut<CharacterBase>()
|
||||
.mBodyNode
|
||||
->_setDerivedPosition(
|
||||
position);
|
||||
ev.e2.get_mut<CharacterBase>()
|
||||
.mBodyNode
|
||||
->_setDerivedOrientation(
|
||||
orientation);
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes
|
||||
.at(ev.e2);
|
||||
n->_setDerivedPosition(
|
||||
position);
|
||||
n->_setDerivedOrientation(
|
||||
orientation);
|
||||
}
|
||||
} else if (ev.event == "boat_control_exit") {
|
||||
} else if (ev.event == "actuator_exit") {
|
||||
|
||||
@@ -188,17 +188,16 @@ public:
|
||||
int update(float delta) override
|
||||
{
|
||||
if (npc.e.is_valid()) {
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(npc.e);
|
||||
Ogre::Vector3 position =
|
||||
npc.e.get<CharacterBase>()
|
||||
.mBodyNode
|
||||
->_getDerivedPosition();
|
||||
n->_getDerivedPosition();
|
||||
if (position.squaredDistance(targetPosition) >=
|
||||
radius * radius) {
|
||||
if (npc.e.is_valid())
|
||||
npc.e.get<CharacterBase>()
|
||||
.mBodyNode
|
||||
->_setDerivedPosition(
|
||||
targetPosition);
|
||||
n->_setDerivedPosition(
|
||||
targetPosition);
|
||||
npc.position = targetPosition;
|
||||
return BUSY;
|
||||
}
|
||||
@@ -271,6 +270,7 @@ public:
|
||||
};
|
||||
ActionNodeActions(int node, const Blackboard &prereq, int cost)
|
||||
{
|
||||
ZoneScoped;
|
||||
OgreAssert(
|
||||
node < ECS::get<ActionNodeList>().dynamicNodes.size(),
|
||||
"bad node " + Ogre::StringConverter::toString(node));
|
||||
@@ -347,6 +347,7 @@ public:
|
||||
have_bits = true;
|
||||
}
|
||||
if (!have_bits) {
|
||||
ZoneScopedN("Use");
|
||||
std::cout << "use: " << props.dump(4)
|
||||
<< std::endl;
|
||||
// OgreAssert(false, "props: " + props.dump(4));
|
||||
@@ -396,8 +397,8 @@ private:
|
||||
}
|
||||
void activate() override
|
||||
{
|
||||
std::cout << action->get_name();
|
||||
std::cout << "!";
|
||||
ZoneScoped;
|
||||
ZoneTextF("%s", action->get_name().c_str());
|
||||
delay = 1.0f;
|
||||
}
|
||||
|
||||
@@ -414,6 +415,7 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
static std::mutex ecs_mutex;
|
||||
ecs.module<CharacterAIModule>();
|
||||
ecs.import <CharacterManagerModule>();
|
||||
ecs.import <PlayerActionModule>();
|
||||
ecs.component<Blackboard>();
|
||||
ecs.component<TownAI>().on_add([](flecs::entity e, TownAI &ai) {
|
||||
std::lock_guard<std::mutex> lock(ecs_mutex);
|
||||
@@ -512,6 +514,7 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
const TownNPCs &npcs) {
|
||||
ZoneScopedN("CreateBlackboards");
|
||||
std::lock_guard<std::mutex> lock(ecs_mutex);
|
||||
OgreAssert(npcs.npcs.size() > 0, "npcs not crated");
|
||||
createBlackboards(town, npcs, ai);
|
||||
});
|
||||
ecs.system<ActionNodeList, TownAI, TownNPCs>("UpdateDynamicActions")
|
||||
@@ -520,11 +523,15 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
TownNPCs &npcs) {
|
||||
ZoneScopedN("UpdateDynamicActions");
|
||||
std::lock_guard<std::mutex> lock(ecs_mutex);
|
||||
OgreAssert(npcs.npcs.size() > 0, "npcs not crated");
|
||||
if (ai.nodeActions.size() > 0)
|
||||
return;
|
||||
if (alist.dynamicNodes.size() == 0)
|
||||
ECS::get_mut<ActionNodeList>()
|
||||
.updateDynamicNodes();
|
||||
OgreAssert(alist.nodes.size() > 0, "bad nodes");
|
||||
if (alist.dynamicNodes.size() == 0)
|
||||
return;
|
||||
OgreAssert(alist.dynamicNodes.size() > 0,
|
||||
"bad dynamic nodes");
|
||||
int nodeIndex;
|
||||
@@ -550,6 +557,7 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
.each([this](flecs::entity town, ActionNodeList &alist,
|
||||
TownAI &ai, TownNPCs &npcs) {
|
||||
ZoneScopedN("UpdateDynamicNodes");
|
||||
OgreAssert(npcs.npcs.size() > 0, "npcs not crated");
|
||||
std::lock_guard<std::mutex> lock(ecs_mutex);
|
||||
ECS::get_mut<ActionNodeList>().updateDynamicNodes();
|
||||
});
|
||||
@@ -574,15 +582,18 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
.kind(flecs::OnUpdate)
|
||||
.each([](flecs::entity e, TownNPCs &npcs) {
|
||||
ZoneScopedN("UpdateNPCPositions");
|
||||
OgreAssert(npcs.npcs.size() > 0, "npcs not crated");
|
||||
for (auto it = npcs.npcs.begin(); it != npcs.npcs.end();
|
||||
it++) {
|
||||
auto &npc = npcs.npcs.at(it->first);
|
||||
if (npc.e.is_valid() &&
|
||||
npc.e.has<CharacterBase>())
|
||||
npc.position =
|
||||
npc.e.get<CharacterBase>()
|
||||
.mBodyNode
|
||||
->_getDerivedPosition();
|
||||
npc.e.has<CharacterBase>()) {
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(
|
||||
npc.e);
|
||||
npc.position = n->_getDerivedPosition();
|
||||
}
|
||||
}
|
||||
});
|
||||
ecs.system<ActionNodeList, TownAI, TownNPCs>("UpdateBlackboards")
|
||||
@@ -591,6 +602,7 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
.each([this](flecs::entity town, ActionNodeList &alist,
|
||||
TownAI &ai, const TownNPCs &npcs) {
|
||||
ZoneScopedN("UpdateBlackboards");
|
||||
OgreAssert(npcs.npcs.size() > 0, "npcs not crated");
|
||||
|
||||
Ogre::Root::getSingleton().getWorkQueue()->addTask([this,
|
||||
town,
|
||||
@@ -624,6 +636,10 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
.each([&](flecs::entity town, TownAI &ai,
|
||||
const TownNPCs &npcs) {
|
||||
ZoneScopedN("PlanAI");
|
||||
OgreAssert(npcs.npcs.size() > 0, "npcs not crated");
|
||||
OgreAssert(ai.blackboards.size() > 0,
|
||||
"blackboards not crated");
|
||||
OgreAssert(ai.memory.size() > 0, "memory not crated");
|
||||
Ogre::Root::getSingleton().getWorkQueue()->addTask([this,
|
||||
town,
|
||||
npcs,
|
||||
@@ -646,6 +662,10 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
.each([&](flecs::entity town, const EngineData &eng,
|
||||
TownNPCs &npcs, TownAI &ai) {
|
||||
ZoneScopedN("RunPLAN");
|
||||
OgreAssert(npcs.npcs.size() > 0, "npcs not crated");
|
||||
OgreAssert(ai.blackboards.size() > 0,
|
||||
"blackboards not crated");
|
||||
OgreAssert(ai.memory.size() > 0, "memory not crated");
|
||||
for (const auto &plans : ai.plans) {
|
||||
if (plan_exec.find(plans.first) !=
|
||||
plan_exec.end()) {
|
||||
@@ -668,13 +688,19 @@ CharacterAIModule::CharacterAIModule(flecs::world &ecs)
|
||||
// std::cout << " Goal: ";
|
||||
plan.goal->goal.dump_bits();
|
||||
for (const auto &action : plan.plan) {
|
||||
ActionExec::PlanExecData data({
|
||||
TownNPCs::NPCData &npc =
|
||||
npcs.npcs.at(
|
||||
plans.first),
|
||||
plans.first);
|
||||
Blackboard &bb =
|
||||
ai.blackboards.at(
|
||||
plans.first),
|
||||
plans.first);
|
||||
nlohmann::json &mem =
|
||||
ai.memory.at(
|
||||
plans.first),
|
||||
plans.first);
|
||||
ActionExec::PlanExecData data({
|
||||
npc,
|
||||
bb,
|
||||
mem,
|
||||
|
||||
});
|
||||
// TODO: executor factory is needed
|
||||
@@ -773,19 +799,26 @@ void CharacterAIModule::buildPlans(flecs::entity town, const TownNPCs &npcs,
|
||||
if (plan_tasks.size() > 0) {
|
||||
bool created = (plan_tasks.front())();
|
||||
if (created) {
|
||||
std::cout << plan_tasks.front().blackboard.index << " ";
|
||||
std::cout << "Goal: "
|
||||
<< plan_tasks.front().goal.get_name();
|
||||
plan_tasks.front().goal.goal.dump_bits();
|
||||
std::cout << std::endl;
|
||||
std::cout << "Path: ";
|
||||
for (auto &action : plan_tasks.front().plan.plan) {
|
||||
OgreAssert(action, "No action");
|
||||
std::cout << action->get_name() + " ";
|
||||
ZoneTextF("%d: Goal: %s",
|
||||
plan_tasks.front().blackboard.index,
|
||||
plan_tasks.front().goal.get_name().c_str());
|
||||
{
|
||||
std::cout << plan_tasks.front().blackboard.index
|
||||
<< " ";
|
||||
std::cout << "Goal: "
|
||||
<< plan_tasks.front().goal.get_name();
|
||||
plan_tasks.front().goal.goal.dump_bits();
|
||||
std::cout << std::endl;
|
||||
std::cout << "Path: ";
|
||||
for (auto &action :
|
||||
plan_tasks.front().plan.plan) {
|
||||
OgreAssert(action, "No action");
|
||||
std::cout << action->get_name() + " ";
|
||||
}
|
||||
std::cout << " size: "
|
||||
<< plan_tasks.front().plan.plan.size()
|
||||
<< std::endl;
|
||||
}
|
||||
std::cout << " size: "
|
||||
<< plan_tasks.front().plan.plan.size()
|
||||
<< std::endl;
|
||||
ai.plans[plan_tasks.front().blackboard.index].push_back(
|
||||
plan_tasks.front().plan);
|
||||
}
|
||||
@@ -1182,11 +1215,12 @@ void Blackboard::query_ai()
|
||||
const float distance = 10000.0f;
|
||||
Ogre::Vector3 position(0, 0, 0);
|
||||
if (npcs.npcs.at(index).e.is_valid() &&
|
||||
npcs.npcs.at(index).e.has<CharacterBase>())
|
||||
position = npcs.npcs.at(index)
|
||||
.e.get<CharacterBase>()
|
||||
.mBodyNode->_getDerivedPosition();
|
||||
else
|
||||
npcs.npcs.at(index).e.has<CharacterBase>()) {
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
npcs.npcs.at(index).e);
|
||||
position = n->_getDerivedPosition();
|
||||
} else
|
||||
from_json(npcs.npcs.at(index).props["position"], position);
|
||||
this->position = position;
|
||||
ActionNodeList &alist = ECS::get_mut<ActionNodeList>();
|
||||
|
||||
@@ -29,27 +29,30 @@ CharacterAnimationModule::CharacterAnimationModule(flecs::world &ecs)
|
||||
if (!anim.configured) {
|
||||
int i, j;
|
||||
e.set<EventData>({});
|
||||
ch.mBodyEnt->getSkeleton()->setBlendMode(
|
||||
Ogre::ANIMBLEND_CUMULATIVE);
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(e);
|
||||
Ogre::Entity *ent = static_cast<Ogre::Entity *>(
|
||||
n->getAttachedObject(0));
|
||||
ent->getSkeleton()->setBlendMode(
|
||||
Ogre::ANIMBLEND_CUMULATIVE);
|
||||
Ogre::AnimationStateSet *animStateSet =
|
||||
ch.mBodyEnt->getAllAnimationStates();
|
||||
ent->getAllAnimationStates();
|
||||
const Ogre::AnimationStateMap &animMap =
|
||||
animStateSet->getAnimationStates();
|
||||
anim.mAnimationSystem =
|
||||
new AnimationSystem::AnimationSystem(
|
||||
false);
|
||||
ch.mBodyEnt->getSkeleton()
|
||||
ent->getSkeleton()
|
||||
->getBone("Root")
|
||||
->removeAllChildren();
|
||||
for (auto it = animMap.begin();
|
||||
it != animMap.end(); it++) {
|
||||
AnimationSystem::Animation *animation =
|
||||
new AnimationSystem::Animation(
|
||||
ch.mBodyEnt
|
||||
->getSkeleton(),
|
||||
ent->getSkeleton(),
|
||||
it->second,
|
||||
ch.mBodyEnt
|
||||
->getSkeleton()
|
||||
ent->getSkeleton()
|
||||
->getAnimation(
|
||||
it->first));
|
||||
#ifdef VDEBUG
|
||||
@@ -197,14 +200,21 @@ CharacterAnimationModule::CharacterAnimationModule(flecs::world &ecs)
|
||||
ZoneScopedN("HandleRootMotionVelocity");
|
||||
if (eng.delta < 0.0000001f)
|
||||
return;
|
||||
if (!ch.mBodyNode)
|
||||
if (ECS::get<CharacterModule>().characterNodes.find(
|
||||
e) ==
|
||||
ECS::get<CharacterModule>().characterNodes.end())
|
||||
return;
|
||||
Ogre::Quaternion rot = ch.mBodyNode->getOrientation();
|
||||
Ogre::Vector3 pos = ch.mBodyNode->getPosition();
|
||||
const Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
e);
|
||||
Ogre::Quaternion rot = n->getOrientation();
|
||||
Ogre::Vector3 pos = n->getPosition();
|
||||
Ogre::Vector3 boneMotion = ch.mBoneMotion;
|
||||
v.velocity = Ogre::Vector3::ZERO;
|
||||
if (eng.delta <= 0.005)
|
||||
return;
|
||||
float safeDelta =
|
||||
Ogre::Math::Clamp(eng.delta, 0.001f, 0.99f);
|
||||
Ogre::Math::Clamp(eng.delta, 0.005f, 0.99f);
|
||||
#if 0
|
||||
if (!e.has<CharacterInActuator>()) {
|
||||
v.velocity = Ogre::Math::lerp(
|
||||
@@ -233,7 +243,7 @@ CharacterAnimationModule::CharacterAnimationModule(flecs::world &ecs)
|
||||
v.velocity.y = 0.0f;
|
||||
#endif
|
||||
OgreAssert(v.velocity.squaredLength() < 1000.0f,
|
||||
"Bad velocity setting");
|
||||
"Bad velocity setting " + Ogre::StringConverter::toString(safeDelta) + " " + Ogre::StringConverter::toString(boneMotion));
|
||||
});
|
||||
ecs.system<const EngineData, CharacterBase, AnimationControl,
|
||||
CharacterVelocity>("HandleRootMotion")
|
||||
@@ -242,8 +252,10 @@ CharacterAnimationModule::CharacterAnimationModule(flecs::world &ecs)
|
||||
CharacterBase &ch, AnimationControl &anim,
|
||||
CharacterVelocity &v) {
|
||||
ZoneScopedN("HandleRootMotion");
|
||||
#if 0
|
||||
if (!ch.mBodyNode)
|
||||
return;
|
||||
#endif
|
||||
if (eng.delta < 0.0000001f)
|
||||
return;
|
||||
OgreAssert(eng.delta > 0.0f, "Zero delta");
|
||||
|
||||
@@ -21,8 +21,9 @@ void createNPCActionNodes(flecs::entity town, int index)
|
||||
flecs::entity e = npc.e;
|
||||
nlohmann::json npcprops = npc.props;
|
||||
const CharacterBase &ch = e.get<CharacterBase>();
|
||||
Ogre::Vector3 characterPos = ch.mBodyNode->_getDerivedPosition();
|
||||
Ogre::Quaternion characterRot = ch.mBodyNode->_getDerivedOrientation();
|
||||
Ogre::SceneNode *n = ECS::get<CharacterModule>().characterNodes.at(e);
|
||||
Ogre::Vector3 characterPos = n->_getDerivedPosition();
|
||||
Ogre::Quaternion characterRot = n->_getDerivedOrientation();
|
||||
if (npc.actionNodes.size() > 0) {
|
||||
int i;
|
||||
for (i = 0; i < npc.actionNodes.size(); i++) {
|
||||
@@ -88,12 +89,12 @@ void createNPCActionNodes(flecs::entity town, int index)
|
||||
CharacterManagerModule::CharacterManagerModule(flecs::world &ecs)
|
||||
{
|
||||
ecs.module<CharacterManagerModule>();
|
||||
ecs.import <CharacterModule>();
|
||||
ecs.component<TownNPCs>();
|
||||
ecs.import <CharacterModule>();
|
||||
ecs.import <CharacterAnimationModule>();
|
||||
ecs.import <PhysicsModule>();
|
||||
ecs.import <PlayerActionModule>();
|
||||
ecs.component<TownCharacterHolder>();
|
||||
ecs.component<TownNPCs>();
|
||||
ecs.component<LivesIn>();
|
||||
ecs.system<TerrainItem, TownNPCs>("UpdateCharacters")
|
||||
.kind(flecs::OnUpdate)
|
||||
@@ -117,8 +118,8 @@ CharacterManagerModule::CharacterManagerModule(flecs::world &ecs)
|
||||
if (!player.has<CharacterBase>())
|
||||
return;
|
||||
Ogre::Vector3 cameraPos =
|
||||
player.get<CharacterBase>()
|
||||
.mBodyNode->_getDerivedPosition();
|
||||
ECS::get<Camera>()
|
||||
.mCameraNode->_getDerivedPosition();
|
||||
for (auto &npc : npcs.npcs) {
|
||||
int index = npc.first;
|
||||
TownNPCs::NPCData &data = npc.second;
|
||||
@@ -129,13 +130,18 @@ CharacterManagerModule::CharacterManagerModule(flecs::world &ecs)
|
||||
10000.0f) {
|
||||
if (!data.e.is_valid()) {
|
||||
data.e = createCharacterData(
|
||||
data.model,
|
||||
data.modelFace,
|
||||
data.modelHair,
|
||||
data.modelTop,
|
||||
data.modelBottom,
|
||||
data.modelFeet,
|
||||
data.position,
|
||||
data.orientation);
|
||||
data.e.add<LivesIn>(town);
|
||||
break;
|
||||
}
|
||||
}
|
||||
#if 0
|
||||
if (cameraPos.squaredDistance(npcPosition) >
|
||||
22500.0f) {
|
||||
if (data.e.is_valid()) {
|
||||
@@ -144,6 +150,7 @@ CharacterManagerModule::CharacterManagerModule(flecs::world &ecs)
|
||||
break;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
for (auto &npc : npcs.npcs) {
|
||||
int index = npc.first;
|
||||
@@ -174,24 +181,32 @@ CharacterManagerModule::createPlayer(const Ogre::Vector3 &position,
|
||||
OgreAssert(!player.is_valid(), "Player already created");
|
||||
player = ECS::get().entity("player");
|
||||
OgreAssert(player.is_valid(), "Can't create player");
|
||||
std::cout << "Begin player create" << std::endl;
|
||||
player.add<Player>();
|
||||
ECS::get_mut<CharacterModule>().createCharacter(
|
||||
player, position, rotation, "normal-male.glb");
|
||||
ECS::modified<CharacterModule>();
|
||||
std::cout << "End player create" << std::endl;
|
||||
count++;
|
||||
return player;
|
||||
{
|
||||
ZoneScopedN("PlayerCreate");
|
||||
|
||||
player.add<Player>();
|
||||
ECS::get_mut<CharacterModule>().createCharacter(
|
||||
player, position, rotation, "male_Face.mesh",
|
||||
"male_Hair001.mesh", "male_BodyTop.mesh",
|
||||
"male_BodyBottom.mesh", "male_BodyFeet.mesh");
|
||||
ECS::modified<CharacterModule>();
|
||||
count++;
|
||||
}
|
||||
return player;
|
||||
}
|
||||
flecs::entity
|
||||
CharacterManagerModule::createCharacterData(const Ogre::String model,
|
||||
const Ogre::Vector3 &position,
|
||||
const Ogre::Quaternion &rotation)
|
||||
|
||||
flecs::entity CharacterManagerModule::createCharacterData(
|
||||
const Ogre::String &modelFace, const Ogre::String &modelHair,
|
||||
const Ogre::String &modelTop, const Ogre::String &modelBottom,
|
||||
const Ogre::String &modelFeet, const Ogre::Vector3 &position,
|
||||
const Ogre::Quaternion &rotation)
|
||||
{
|
||||
ZoneScoped;
|
||||
flecs::entity e = ECS::get().entity();
|
||||
ECS::get_mut<CharacterModule>().createCharacter(e, position, rotation,
|
||||
model);
|
||||
modelFace, modelHair,
|
||||
modelTop, modelBottom,
|
||||
modelFeet);
|
||||
ECS::modified<CharacterModule>();
|
||||
return e;
|
||||
}
|
||||
@@ -199,36 +214,45 @@ CharacterManagerModule::createCharacterData(const Ogre::String model,
|
||||
void CharacterManagerModule::registerTownCharacters(flecs::entity town)
|
||||
{
|
||||
ZoneScoped;
|
||||
Ogre::MeshManager::getSingleton().load("normal-male.glb", "General");
|
||||
Ogre::MeshManager::getSingleton().load("normal-female.glb", "General");
|
||||
{
|
||||
Ogre::MeshPtr maleMesh = Ogre::MeshManager::getSingleton().load(
|
||||
"normal-male.glb", "Characters");
|
||||
Ogre::MeshPtr femaleMesh =
|
||||
Ogre::MeshManager::getSingleton().load(
|
||||
"normal-female.glb", "Characters");
|
||||
}
|
||||
Ogre::String props = StaticGeometryModule::getItemProperties(town);
|
||||
nlohmann::json j = nlohmann::json::parse(props);
|
||||
nlohmann::json npcs = nlohmann::json::array();
|
||||
if (town.has<TownNPCs>())
|
||||
return;
|
||||
if (j.find("npcs") != j.end())
|
||||
npcs = j["npcs"];
|
||||
if (j.find("npcs") == j.end())
|
||||
return;
|
||||
npcs = j["npcs"];
|
||||
std::cout << npcs.dump(4) << std::endl;
|
||||
if (npcs.size() == 0)
|
||||
return;
|
||||
int index = 0;
|
||||
std::map<int, TownNPCs::NPCData> npcMap;
|
||||
for (auto &npc : npcs) {
|
||||
const char *models[] = { "normal-male.glb",
|
||||
"normal-female.glb" };
|
||||
int sex = npc["sex"].get<int>();
|
||||
Ogre::Vector3 npcPosition;
|
||||
Ogre::Quaternion npcOrientation;
|
||||
from_json(npc["position"], npcPosition);
|
||||
from_json(npc["orientation"], npcOrientation);
|
||||
Ogre::String model = models[sex];
|
||||
TownNPCs::NPCData npcData;
|
||||
npcData.e = flecs::entity();
|
||||
npcData.model = model;
|
||||
npcData.modelFace = npc["slot_face"].get<Ogre::String>();
|
||||
npcData.modelHair = npc["slot_hair"].get<Ogre::String>();
|
||||
npcData.modelTop = npc["slot_top"].get<Ogre::String>();
|
||||
npcData.modelBottom = npc["slot_bottom"].get<Ogre::String>();
|
||||
npcData.modelFeet = npc["slot_feet"].get<Ogre::String>();
|
||||
npcData.orientation = npcOrientation;
|
||||
npcData.position = npcPosition;
|
||||
npcData.props = npc;
|
||||
npcMap[index] = npcData;
|
||||
index++;
|
||||
}
|
||||
OgreAssert(npcMap.size() > 0, "no npcs registered");
|
||||
town.set<TownNPCs>({ npcMap });
|
||||
}
|
||||
|
||||
|
||||
@@ -12,7 +12,11 @@ struct TownNPCs {
|
||||
nlohmann::json props;
|
||||
Ogre::Vector3 position;
|
||||
Ogre::Quaternion orientation;
|
||||
Ogre::String model;
|
||||
Ogre::String modelFace;
|
||||
Ogre::String modelHair;
|
||||
Ogre::String modelTop;
|
||||
Ogre::String modelBottom;
|
||||
Ogre::String modelFeet;
|
||||
std::vector<ActionNodeList::ActionNode> actionNodes;
|
||||
};
|
||||
|
||||
@@ -25,8 +29,12 @@ struct CharacterManagerModule {
|
||||
CharacterManagerModule(flecs::world &ecs);
|
||||
flecs::entity createPlayer(const Ogre::Vector3 &position,
|
||||
const Ogre::Quaternion &rotation);
|
||||
flecs::entity createCharacterData(const Ogre::String model,
|
||||
const Ogre::Vector3 &position,
|
||||
flecs::entity createCharacterData(const Ogre::String &modelFace,
|
||||
const Ogre::String &modelHair,
|
||||
const Ogre::String &modelTop,
|
||||
const Ogre::String &modelBottom,
|
||||
const Ogre::String &modelFeet,
|
||||
const Ogre::Vector3 &position,
|
||||
const Ogre::Quaternion &rotation);
|
||||
void removeCharacterData(int id);
|
||||
flecs::entity getPlayer() const
|
||||
|
||||
@@ -16,17 +16,67 @@ CharacterModule::CharacterModule(flecs::world &ecs)
|
||||
{
|
||||
ZoneScoped;
|
||||
struct TriggerPhysicsChange {};
|
||||
ecs.module<CharacterModule>();
|
||||
static std::vector<Ogre::String> part_names;
|
||||
const std::vector<Ogre::String> &groups =
|
||||
Ogre::ResourceGroupManager::getSingleton().getResourceGroups();
|
||||
if (part_names.size() == 0) {
|
||||
int i;
|
||||
for (i = 0; i < groups.size(); i++) {
|
||||
std::vector<Ogre::String> names =
|
||||
*Ogre::ResourceGroupManager::getSingleton()
|
||||
.findResourceNames(groups[i],
|
||||
"body_part_*.json");
|
||||
part_names.insert(part_names.end(), names.begin(),
|
||||
names.end());
|
||||
}
|
||||
}
|
||||
body_parts = nlohmann::json::object();
|
||||
for (auto &g : part_names) {
|
||||
Ogre::String group = Ogre::ResourceGroupManager::getSingleton()
|
||||
.findGroupContainingResource(g);
|
||||
Ogre::DataStreamPtr stream =
|
||||
Ogre::ResourceGroupManager::getSingleton().openResource(
|
||||
g, group);
|
||||
Ogre::String json = stream->getAsString();
|
||||
nlohmann::json jdata = nlohmann::json::parse(json);
|
||||
if (jdata.find("age") == jdata.end())
|
||||
continue;
|
||||
if (jdata.find("sex") == jdata.end())
|
||||
continue;
|
||||
if (jdata.find("slot") == jdata.end())
|
||||
continue;
|
||||
if (jdata.find("mesh") == jdata.end())
|
||||
continue;
|
||||
Ogre::String age = jdata["age"].get<Ogre::String>();
|
||||
Ogre::String sex = jdata["sex"].get<Ogre::String>();
|
||||
Ogre::String slot = jdata["slot"].get<Ogre::String>();
|
||||
Ogre::String mesh = jdata["mesh"].get<Ogre::String>();
|
||||
if (body_parts.find(age) == body_parts.end())
|
||||
body_parts[age] = nlohmann::json::object();
|
||||
if (body_parts[age].find(sex) == body_parts[age].end())
|
||||
body_parts[age][sex] = nlohmann::json::object();
|
||||
if (body_parts[age][sex].find(slot) ==
|
||||
body_parts[age][sex].end())
|
||||
body_parts[age][sex][slot] = nlohmann::json::array();
|
||||
body_parts[age][sex][slot].push_back(mesh);
|
||||
mesh_names.insert(mesh);
|
||||
Ogre::MeshManager::getSingleton().load(mesh, "Characters");
|
||||
}
|
||||
std::cout << body_parts.dump(4) << std::endl;
|
||||
ecs.module<CharacterModule>();
|
||||
ecs.component<Character>();
|
||||
ecs.component<Player>();
|
||||
ecs.component<CharacterBase>()
|
||||
.on_remove([this](flecs::entity e, CharacterBase &ch) {
|
||||
ZoneScoped;
|
||||
// FIXME: clean up data
|
||||
if (characterEntities.find(e) !=
|
||||
characterEntities.end() ||
|
||||
if (characterEntitiesFace.find(e) !=
|
||||
characterEntitiesFace.end() ||
|
||||
characterNodes.find(e) != characterNodes.end()) {
|
||||
characterEntities.erase(e);
|
||||
// FIXME: clean up data
|
||||
characterEntitiesFace.erase(e);
|
||||
characterEntitiesTop.erase(e);
|
||||
characterEntitiesBottom.erase(e);
|
||||
characterEntitiesFeet.erase(e);
|
||||
characterNodes.erase(e);
|
||||
ECS::modified<CharacterModule>();
|
||||
}
|
||||
@@ -34,30 +84,92 @@ CharacterModule::CharacterModule(flecs::world &ecs)
|
||||
.on_add([this](flecs::entity e, CharacterBase &ch) {
|
||||
if (characterNodes.find(e) == characterNodes.end()) {
|
||||
ZoneScoped;
|
||||
OgreAssert(characterModels.find(e) !=
|
||||
characterModels.end(),
|
||||
OgreAssert(characterModelsFace.find(e) !=
|
||||
characterModelsFace.end(),
|
||||
"no model set");
|
||||
const EngineData &eng = ECS::get<EngineData>();
|
||||
Ogre::SceneNode *bodyNode =
|
||||
eng.mScnMgr->getRootSceneNode()
|
||||
->createChildSceneNode();
|
||||
Ogre::Entity *bodyEnt =
|
||||
Ogre::Entity *faceEnt =
|
||||
eng.mScnMgr->createEntity(
|
||||
characterModels[e]);
|
||||
characterModelsFace[e]);
|
||||
bodyNode->attachObject(faceEnt);
|
||||
characterNodes[e] = bodyNode;
|
||||
characterEntities[e] = bodyEnt;
|
||||
characterEntitiesFace[e] = faceEnt;
|
||||
Ogre::Entity *hairEnt =
|
||||
eng.mScnMgr->createEntity(
|
||||
characterModelsHair[e]);
|
||||
hairEnt->shareSkeletonInstanceWith(faceEnt);
|
||||
bodyNode->attachObject(hairEnt);
|
||||
characterEntitiesHair[e] = hairEnt;
|
||||
Ogre::Entity *topEnt =
|
||||
eng.mScnMgr->createEntity(
|
||||
characterModelsTop[e]);
|
||||
topEnt->shareSkeletonInstanceWith(faceEnt);
|
||||
bodyNode->attachObject(topEnt);
|
||||
characterEntitiesTop[e] = topEnt;
|
||||
Ogre::Entity *bottomEnt =
|
||||
eng.mScnMgr->createEntity(
|
||||
characterModelsBottom[e]);
|
||||
bottomEnt->shareSkeletonInstanceWith(faceEnt);
|
||||
bodyNode->attachObject(bottomEnt);
|
||||
characterEntitiesBottom[e] = bottomEnt;
|
||||
Ogre::Entity *feetEnt =
|
||||
eng.mScnMgr->createEntity(
|
||||
characterModelsFeet[e]);
|
||||
feetEnt->shareSkeletonInstanceWith(faceEnt);
|
||||
bodyNode->attachObject(feetEnt);
|
||||
characterEntitiesFeet[e] = feetEnt;
|
||||
#if 0
|
||||
if (characterModelsTop.find(e) !=
|
||||
characterModelsTop.end()) {
|
||||
Ogre::String skeletonName =
|
||||
bodyEnt->getMesh()
|
||||
->getSkeletonName();
|
||||
Ogre::MeshPtr mesh =
|
||||
Ogre::MeshManager::getSingleton()
|
||||
.load(characterModelsTop
|
||||
[e],
|
||||
"General");
|
||||
Ogre::String mname = mesh->getName();
|
||||
mesh = mesh->clone(mname + "_clone");
|
||||
OgreAssert(
|
||||
mesh,
|
||||
"No mesh " +
|
||||
characterModelsTop[e]);
|
||||
Ogre::String clothSkeleton =
|
||||
mesh->getSkeletonName();
|
||||
if (clothSkeleton != skeletonName) {
|
||||
mesh->setSkeletonName(
|
||||
skeletonName);
|
||||
mesh->load();
|
||||
if (Ogre::SkeletonManager::getSingleton()
|
||||
.resourceExists(
|
||||
clothSkeleton))
|
||||
Ogre::SkeletonManager::
|
||||
getSingleton()
|
||||
.remove(clothSkeleton);
|
||||
}
|
||||
Ogre::Entity *characterTop =
|
||||
eng.mScnMgr->createEntity(mesh);
|
||||
characterTop->shareSkeletonInstanceWith(
|
||||
bodyEnt);
|
||||
bodyNode->attachObject(characterTop);
|
||||
}
|
||||
#endif
|
||||
ECS::modified<CharacterModule>();
|
||||
}
|
||||
OgreAssert(characterOrientations.find(e) !=
|
||||
characterOrientations.end(),
|
||||
"Bad orientation/position");
|
||||
ch.mBodyEnt = characterEntities[e];
|
||||
ch.mBodyNode = characterNodes[e];
|
||||
ch.mBodyNode->setOrientation(characterOrientations[e]);
|
||||
ch.mBodyNode->setPosition(characterPositions[e]);
|
||||
ch.mBodyNode->attachObject(ch.mBodyEnt);
|
||||
OgreAssert(ch.mBodyEnt->getSkeleton()->hasBone("Root"),
|
||||
"No root bone");
|
||||
Ogre::SceneNode *bodyNode = characterNodes[e];
|
||||
bodyNode->setOrientation(characterOrientations[e]);
|
||||
bodyNode->setPosition(characterPositions[e]);
|
||||
OgreAssert(
|
||||
characterEntitiesFace[e]->getSkeleton()->hasBone(
|
||||
"Root"),
|
||||
"No root bone");
|
||||
ch.mBoneMotion = Ogre::Vector3::ZERO;
|
||||
ch.mBonePrevMotion = Ogre::Vector3::ZERO;
|
||||
});
|
||||
@@ -193,7 +305,9 @@ CharacterModule::CharacterModule(flecs::world &ecs)
|
||||
ch.mGoalDirection.normalise();
|
||||
|
||||
Ogre::Quaternion toGoal =
|
||||
ch.mBodyNode->getOrientation()
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(e)
|
||||
->getOrientation()
|
||||
.zAxis()
|
||||
.getRotationTo(
|
||||
ch.mGoalDirection);
|
||||
@@ -218,7 +332,9 @@ CharacterModule::CharacterModule(flecs::world &ecs)
|
||||
std::min<Ogre::Real>(
|
||||
yawToGoal,
|
||||
yawAtSpeed)); //yawToGoal = Math::Clamp<Real>(yawToGoal, 0, yawAtSpeed);
|
||||
ch.mBodyNode->yaw(Ogre::Degree(yawToGoal));
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(e)
|
||||
->yaw(Ogre::Degree(yawToGoal));
|
||||
}
|
||||
});
|
||||
#if 0
|
||||
@@ -252,7 +368,7 @@ CharacterModule::CharacterModule(flecs::world &ecs)
|
||||
.kind(flecs::OnUpdate)
|
||||
.with<Player>()
|
||||
.with<GroundCheckReady>()
|
||||
.each([](const EngineData &eng, Camera &camera,
|
||||
.each([](flecs::entity e, const EngineData &eng, Camera &camera,
|
||||
const CharacterBase &ch) {
|
||||
ZoneScopedN("UpdateCamera");
|
||||
float delta = eng.delta;
|
||||
@@ -280,7 +396,9 @@ CharacterModule::CharacterModule(flecs::world &ecs)
|
||||
} else {
|
||||
// place the camera pivot roughly at the character's shoulder
|
||||
camera.mCameraPivot->setPosition(
|
||||
ch.mBodyNode->getPosition() +
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(e)
|
||||
->getPosition() +
|
||||
Ogre::Vector3::UNIT_Y * CAM_HEIGHT);
|
||||
// move the camera smoothly to the goal
|
||||
Ogre::Vector3 goalOffset =
|
||||
@@ -462,6 +580,34 @@ void CharacterModule::updateCameraGoal(Camera &camera, Ogre::Real deltaYaw,
|
||||
}
|
||||
}
|
||||
|
||||
void CharacterModule::createCharacter(
|
||||
flecs::entity e, const Ogre::Vector3 &position,
|
||||
const Ogre::Quaternion &rotation, const Ogre::String &faceModel,
|
||||
const Ogre::String &hairModel, const Ogre::String &topModel,
|
||||
const Ogre::String &bottomModel, const Ogre::String &feetModel)
|
||||
{
|
||||
ZoneScoped;
|
||||
if (e.has<CharacterBase>() || e.has<AnimationControl>())
|
||||
return;
|
||||
if (characterNodes.find(e) != characterNodes.end())
|
||||
return;
|
||||
e.set<CharacterLocation>({ rotation, position });
|
||||
characterOrientations[e] = rotation;
|
||||
characterPositions[e] = position;
|
||||
characterModelsFace[e] = faceModel;
|
||||
characterModelsHair[e] = hairModel;
|
||||
characterModelsTop[e] = topModel;
|
||||
characterModelsBottom[e] = bottomModel;
|
||||
characterModelsFeet[e] = feetModel;
|
||||
e.set<CharacterVelocity>(
|
||||
{ { 0.0f, 0.0f, 0.0f }, { 0.0f, 0.0f, 0.0f } });
|
||||
e.add<CharacterGravity>();
|
||||
e.add<CharacterBuoyancy>();
|
||||
e.add<Character>();
|
||||
e.add<CharacterBase>();
|
||||
e.add<AnimationControl>();
|
||||
}
|
||||
|
||||
void applyWeightBasedScale(Ogre::Entity *ent,
|
||||
const Ogre::String &targetBoneName,
|
||||
const Ogre::Vector3 &scale)
|
||||
@@ -549,26 +695,87 @@ void applyWeightBasedScale(Ogre::Entity *ent,
|
||||
}
|
||||
}
|
||||
|
||||
void CharacterModule::createCharacter(flecs::entity e,
|
||||
const Ogre::Vector3 &position,
|
||||
const Ogre::Quaternion &rotation,
|
||||
const Ogre::String model)
|
||||
void CharacterModule::remapMeshToMasterSkeleton(Ogre::MeshPtr clothMesh,
|
||||
Ogre::MeshPtr masterMesh)
|
||||
{
|
||||
ZoneScoped;
|
||||
if (e.has<CharacterBase>() || e.has<AnimationControl>())
|
||||
Ogre::SkeletonPtr masterSkel = masterMesh->getSkeleton();
|
||||
Ogre::SkeletonPtr clothSkel = clothMesh->getSkeleton();
|
||||
|
||||
if (!masterSkel || !clothSkel)
|
||||
return;
|
||||
if (characterNodes.find(e) != characterNodes.end())
|
||||
return;
|
||||
e.set<CharacterLocation>({ rotation, position });
|
||||
characterOrientations[e] = rotation;
|
||||
characterPositions[e] = position;
|
||||
characterModels[e] = model;
|
||||
e.set<CharacterVelocity>(
|
||||
{ { 0.0f, 0.0f, 0.0f }, { 0.0f, 0.0f, 0.0f } });
|
||||
e.add<CharacterGravity>();
|
||||
e.add<CharacterBuoyancy>();
|
||||
e.add<Character>();
|
||||
e.add<CharacterBase>();
|
||||
e.add<AnimationControl>();
|
||||
|
||||
// 1. Create a Lookup Table: ClothIndex -> MasterIndex
|
||||
std::map<unsigned short, unsigned short> indexMap;
|
||||
for (unsigned short i = 0; i < clothSkel->getNumBones(); ++i) {
|
||||
Ogre::String boneName = clothSkel->getBone(i)->getName();
|
||||
if (masterSkel->hasBone(boneName)) {
|
||||
indexMap[i] =
|
||||
masterSkel->getBone(boneName)->getHandle();
|
||||
} else {
|
||||
indexMap[i] = 0; // Fallback to root
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Update the Hardware Buffers for each SubMesh
|
||||
for (unsigned short i = 0; i < clothMesh->getNumSubMeshes(); ++i) {
|
||||
Ogre::SubMesh *sub = clothMesh->getSubMesh(i);
|
||||
Ogre::VertexData *vdata = sub->useSharedVertices ?
|
||||
clothMesh->sharedVertexData :
|
||||
sub->vertexData;
|
||||
|
||||
// Find the element containing bone indices (VES_BLEND_INDICES)
|
||||
const Ogre::VertexElement *idxElem =
|
||||
vdata->vertexDeclaration->findElementBySemantic(
|
||||
Ogre::VES_BLEND_INDICES);
|
||||
if (!idxElem)
|
||||
continue;
|
||||
|
||||
Ogre::HardwareVertexBufferSharedPtr vbuf =
|
||||
vdata->vertexBufferBinding->getBuffer(
|
||||
idxElem->getSource());
|
||||
unsigned char *vertex = static_cast<unsigned char *>(
|
||||
vbuf->lock(Ogre::HardwareBuffer::HBL_NORMAL));
|
||||
|
||||
for (size_t j = 0; j < vdata->vertexCount; ++j) {
|
||||
unsigned char *pIndices;
|
||||
idxElem->baseVertexPointerToElement(vertex, &pIndices);
|
||||
|
||||
// Remap the 4 indices (Ogre hardware skinning usually uses 4 bytes)
|
||||
for (int k = 0; k < 4; ++k) {
|
||||
pIndices[k] = static_cast<unsigned char>(
|
||||
indexMap[pIndices[k]]);
|
||||
}
|
||||
vertex += vbuf->getVertexSize();
|
||||
}
|
||||
vbuf->unlock();
|
||||
}
|
||||
|
||||
// 3. Link to Master Skeleton and rebuild
|
||||
clothMesh->setSkeletonName(masterSkel->getName());
|
||||
clothMesh->_compileBoneAssignments();
|
||||
}
|
||||
|
||||
void CharacterModule::getSlotMeshes(const Ogre::String &age,
|
||||
const Ogre::String &sex,
|
||||
const Ogre::String &slotName,
|
||||
std::vector<Ogre::String> &meshes)
|
||||
{
|
||||
OgreAssert(body_parts.find(age) != body_parts.end(), "bad age: " + age);
|
||||
OgreAssert(body_parts[age].find(sex) != body_parts[age].end(),
|
||||
"bad sex: " + sex);
|
||||
OgreAssert(body_parts[age][sex].find(slotName) !=
|
||||
body_parts[age][sex].end(),
|
||||
"bad slot: " + slotName);
|
||||
for (auto &slots : body_parts[age][sex][slotName])
|
||||
meshes.push_back(slots.get<Ogre::String>());
|
||||
}
|
||||
|
||||
void CharacterModule::preloadMeshes()
|
||||
{
|
||||
for (const auto &mesh : mesh_names) {
|
||||
Ogre::Entity *ent =
|
||||
ECS::get<EngineData>().mScnMgr->createEntity(mesh);
|
||||
ECS::get<EngineData>().mScnMgr->destroyEntity(ent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#ifndef CHARACTER_MODULE_H_
|
||||
#define CHARACTER_MODULE_H_
|
||||
#include <flecs.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <Ogre.h>
|
||||
#include "Components.h"
|
||||
namespace ECS
|
||||
@@ -18,12 +19,10 @@ struct Female {};
|
||||
|
||||
struct CharacterBase {
|
||||
Ogre::String type;
|
||||
float mTimer;
|
||||
float mTimer;
|
||||
Ogre::Vector3 mBoneMotion;
|
||||
Ogre::Vector3 mBonePrevMotion;
|
||||
Ogre::Vector3 mGoalDirection; // actual intended direction in world-space
|
||||
Ogre::SceneNode *mBodyNode;
|
||||
Ogre::Entity *mBodyEnt;
|
||||
bool is_submerged;
|
||||
};
|
||||
struct CharacterLocation {
|
||||
@@ -41,12 +40,37 @@ struct CharacterModule {
|
||||
Ogre::Real deltaPitch, Ogre::Real deltaZoom);
|
||||
void createCharacter(flecs::entity e, const Ogre::Vector3 &position,
|
||||
const Ogre::Quaternion &rotation,
|
||||
const Ogre::String model);
|
||||
const Ogre::String &faceModel,
|
||||
const Ogre::String &hairModel,
|
||||
const Ogre::String &topModel,
|
||||
const Ogre::String &bottomModel,
|
||||
const Ogre::String &feetModel);
|
||||
std::unordered_map<flecs::entity_t, Ogre::SceneNode *> characterNodes;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Entity *> characterEntities;
|
||||
std::unordered_map<flecs::entity_t, Ogre::String> characterModels;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Entity *>
|
||||
characterEntitiesFace;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Entity *>
|
||||
characterEntitiesHair;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Entity *> characterEntitiesTop;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Entity *>
|
||||
characterEntitiesBottom;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Entity *>
|
||||
characterEntitiesFeet;
|
||||
std::unordered_map<flecs::entity_t, Ogre::String> characterModelsFace;
|
||||
std::unordered_map<flecs::entity_t, Ogre::String> characterModelsHair;
|
||||
std::unordered_map<flecs::entity_t, Ogre::String> characterModelsTop;
|
||||
std::unordered_map<flecs::entity_t, Ogre::String> characterModelsBottom;
|
||||
std::unordered_map<flecs::entity_t, Ogre::String> characterModelsFeet;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Vector3> characterPositions;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Quaternion> characterOrientations;
|
||||
std::unordered_map<flecs::entity_t, Ogre::Quaternion>
|
||||
characterOrientations;
|
||||
nlohmann::json body_parts;
|
||||
std::set<Ogre::String> mesh_names;
|
||||
void remapMeshToMasterSkeleton(Ogre::MeshPtr clothMesh,
|
||||
Ogre::MeshPtr masterMesh);
|
||||
void getSlotMeshes(const Ogre::String &age, const Ogre::String &sex,
|
||||
const Ogre::String &slotName,
|
||||
std::vector<Ogre::String> &meshes);
|
||||
void preloadMeshes();
|
||||
};
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -200,6 +200,8 @@ void setupExteriorScene(Ogre::SceneManager *scnMgr, Ogre::SceneNode *cameraNode,
|
||||
new_game_run.destruct();
|
||||
}
|
||||
});
|
||||
ecs.get_mut<CharacterModule>().preloadMeshes();
|
||||
ecs.modified<CharacterModule>();
|
||||
std::cout << "scene setup done" << std::endl;
|
||||
}
|
||||
void setupInteriorScene(Ogre::SceneManager *scnMgr, Ogre::SceneNode *cameraNode,
|
||||
|
||||
@@ -457,139 +457,151 @@ LuaData::LuaData()
|
||||
Ogre::Vector3 position = target_node->_getDerivedPosition();
|
||||
Ogre::Quaternion orientation =
|
||||
target_node->_getDerivedOrientation();
|
||||
if (object_e.has<CharacterBase>()) {
|
||||
object_e.get_mut<CharacterBase>()
|
||||
.mBodyNode->_setDerivedPosition(position);
|
||||
object_e.get_mut<CharacterBase>()
|
||||
.mBodyNode->_setDerivedOrientation(orientation);
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_trigger_set_position");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
luaL_checktype(L, 1, LUA_TNUMBER); // trigger
|
||||
int trigger = lua_tointeger(L, 1);
|
||||
flecs::entity trigger_e = idmap.get_entity(trigger);
|
||||
Ogre::SceneNode *node = trigger_e.get<EventTrigger>().node;
|
||||
Ogre::Any animationAny =
|
||||
node->getUserObjectBindings().getUserAny(
|
||||
"trigger_animation");
|
||||
if (animationAny.has_value()) {
|
||||
Ogre::String animation =
|
||||
Ogre::any_cast<Ogre::String>(animationAny);
|
||||
lua_pushstring(L, animation.c_str());
|
||||
if (object_e.has<CharacterBase>()) {
|
||||
Ogre::SceneNode *bodyNode =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
object_e);
|
||||
bodyNode->_setDerivedPosition(position);
|
||||
bodyNode->_setDerivedOrientation(orientation);
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_trigger_set_position");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
luaL_checktype(L, 1, LUA_TNUMBER); // trigger
|
||||
int trigger = lua_tointeger(L, 1);
|
||||
flecs::entity trigger_e = idmap.get_entity(trigger);
|
||||
Ogre::SceneNode *node = trigger_e.get<EventTrigger>().node;
|
||||
Ogre::Any animationAny =
|
||||
node->getUserObjectBindings().getUserAny(
|
||||
"trigger_animation");
|
||||
if (animationAny.has_value()) {
|
||||
Ogre::String animation =
|
||||
Ogre::any_cast<Ogre::String>(animationAny);
|
||||
lua_pushstring(L, animation.c_str());
|
||||
return 1;
|
||||
}
|
||||
lua_pushnil(L);
|
||||
return 1;
|
||||
});
|
||||
lua_setglobal(L, "ecs_trigger_get_animation");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) == 5, "Invalid parameters");
|
||||
luaL_checktype(L, 1, LUA_TSTRING); // type
|
||||
luaL_checktype(L, 2, LUA_TNUMBER);
|
||||
luaL_checktype(L, 3, LUA_TNUMBER);
|
||||
luaL_checktype(L, 4, LUA_TNUMBER);
|
||||
luaL_checktype(L, 5, LUA_TNUMBER);
|
||||
Ogre::String type = lua_tostring(L, 1);
|
||||
float yaw = lua_tonumber(L, 5);
|
||||
float x = lua_tonumber(L, 2);
|
||||
float y = lua_tonumber(L, 3);
|
||||
float z = lua_tonumber(L, 4);
|
||||
Ogre::Quaternion orientation(Ogre::Radian(yaw),
|
||||
Ogre::Vector3::UNIT_Y);
|
||||
Ogre::Vector3 npcPos(x, y, z);
|
||||
flecs::entity e =
|
||||
ECS::get_mut<CharacterManagerModule>()
|
||||
.createCharacterData(type, npcPos, orientation);
|
||||
ECS::modified<CharacterManagerModule>();
|
||||
lua_pushinteger(L, idmap.add_entity(e));
|
||||
return 1;
|
||||
});
|
||||
lua_setglobal(L, "ecs_npc_set");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) == 1, "Bad parameters");
|
||||
luaL_checktype(L, 1, LUA_TSTRING); // type
|
||||
const char *fileName = lua_tostring(L, 1);
|
||||
ECS::get<EngineData>().mScnMgr->getRootSceneNode()->saveChildren(
|
||||
fileName);
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_save_scene_debug");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) == 2, "Bad parameters");
|
||||
luaL_checktype(L, 1, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 2, LUA_TSTRING); // name
|
||||
int object = lua_tointeger(L, 1);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
const char *fileName = lua_tostring(L, 2);
|
||||
Ogre::SceneNode *node = nullptr;
|
||||
if (object_e.has<CharacterBase>())
|
||||
node = object_e.get<CharacterBase>().mBodyNode;
|
||||
else if (object_e.has<BoatBase>())
|
||||
node = object_e.get<BoatBase>().mNode;
|
||||
if (node)
|
||||
node->saveChildren(fileName);
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_save_object_debug");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
luaL_checktype(L, 1, LUA_TBOOLEAN); // object
|
||||
ECS::get_mut<EngineData>().enableDbgDraw = lua_toboolean(L, 1);
|
||||
ECS::modified<EngineData>();
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_set_debug_drawing");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) >= 1, "Bad parameters");
|
||||
luaL_checktype(L, 1, LUA_TSTRING);
|
||||
Ogre::String command = lua_tostring(L, 1);
|
||||
if (command == "physics-control") {
|
||||
OgreAssert(lua_gettop(L) == 3, "Bad parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 3, LUA_TBOOLEAN);
|
||||
int object = lua_tointeger(L, 2);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
bool enable = lua_toboolean(L, 3);
|
||||
OgreAssert(object_e.has<CharacterBase>(),
|
||||
"Not a character");
|
||||
PhysicsModule::controlPhysics(object_e, enable);
|
||||
object_e.add<CharacterUpdatePhysicsState>();
|
||||
}
|
||||
lua_pushnil(L);
|
||||
return 1;
|
||||
});
|
||||
lua_setglobal(L, "ecs_trigger_get_animation");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) == 8, "Invalid parameters");
|
||||
luaL_checktype(L, 1, LUA_TSTRING); // face
|
||||
luaL_checktype(L, 2, LUA_TSTRING); // hair
|
||||
luaL_checktype(L, 3, LUA_TSTRING); // top
|
||||
luaL_checktype(L, 4, LUA_TSTRING); // bottom
|
||||
luaL_checktype(L, 5, LUA_TSTRING); // feet
|
||||
luaL_checktype(L, 6, LUA_TNUMBER);
|
||||
luaL_checktype(L, 7, LUA_TNUMBER);
|
||||
luaL_checktype(L, 8, LUA_TNUMBER);
|
||||
luaL_checktype(L, 9, LUA_TNUMBER);
|
||||
Ogre::String face = lua_tostring(L, 1);
|
||||
Ogre::String hair = lua_tostring(L, 2);
|
||||
Ogre::String top = lua_tostring(L, 3);
|
||||
Ogre::String bottom = lua_tostring(L, 4);
|
||||
Ogre::String feet = lua_tostring(L, 5);
|
||||
float yaw = lua_tonumber(L, 8);
|
||||
float x = lua_tonumber(L, 5);
|
||||
float y = lua_tonumber(L, 6);
|
||||
float z = lua_tonumber(L, 7);
|
||||
Ogre::Quaternion orientation(Ogre::Radian(yaw),
|
||||
Ogre::Vector3::UNIT_Y);
|
||||
Ogre::Vector3 npcPos(x, y, z);
|
||||
flecs::entity e =
|
||||
ECS::get_mut<CharacterManagerModule>()
|
||||
.createCharacterData(face, hair, top, bottom,
|
||||
feet, npcPos, orientation);
|
||||
ECS::modified<CharacterManagerModule>();
|
||||
lua_pushinteger(L, idmap.add_entity(e));
|
||||
return 1;
|
||||
});
|
||||
lua_setglobal(L, "ecs_npc_set");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) == 1, "Bad parameters");
|
||||
luaL_checktype(L, 1, LUA_TSTRING); // type
|
||||
const char *fileName = lua_tostring(L, 1);
|
||||
ECS::get<EngineData>().mScnMgr->getRootSceneNode()->saveChildren(
|
||||
fileName);
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_save_scene_debug");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) == 2, "Bad parameters");
|
||||
luaL_checktype(L, 1, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 2, LUA_TSTRING); // name
|
||||
int object = lua_tointeger(L, 1);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
const char *fileName = lua_tostring(L, 2);
|
||||
Ogre::SceneNode *node = nullptr;
|
||||
if (object_e.has<CharacterBase>()) {
|
||||
node = ECS::get<CharacterModule>().characterNodes.at(
|
||||
object_e);
|
||||
} else if (object_e.has<BoatBase>())
|
||||
node = object_e.get<BoatBase>().mNode;
|
||||
if (node)
|
||||
node->saveChildren(fileName);
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_save_object_debug");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
luaL_checktype(L, 1, LUA_TBOOLEAN); // object
|
||||
ECS::get_mut<EngineData>().enableDbgDraw = lua_toboolean(L, 1);
|
||||
ECS::modified<EngineData>();
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_set_debug_drawing");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) >= 1, "Bad parameters");
|
||||
luaL_checktype(L, 1, LUA_TSTRING);
|
||||
Ogre::String command = lua_tostring(L, 1);
|
||||
if (command == "physics-control") {
|
||||
OgreAssert(lua_gettop(L) == 3, "Bad parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 3, LUA_TBOOLEAN);
|
||||
int object = lua_tointeger(L, 2);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
bool enable = lua_toboolean(L, 3);
|
||||
OgreAssert(object_e.has<CharacterBase>(),
|
||||
"Not a character");
|
||||
PhysicsModule::controlPhysics(object_e, enable);
|
||||
object_e.add<CharacterUpdatePhysicsState>();
|
||||
return 0;
|
||||
} else if (command == "is-player") {
|
||||
} else if (command == "is-player") {
|
||||
OgreAssert(lua_gettop(L) == 2, "Bad parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
int object = lua_tointeger(L, 2);
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
int object = lua_tointeger(L, 2);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
lua_pushboolean(L, object_e.has<Player>());
|
||||
return 1;
|
||||
} else if (command == "set-actuator") {
|
||||
OgreAssert(lua_gettop(L) == 3, "Bad parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 3, LUA_TSTRING); // animation
|
||||
Ogre::String animation = lua_tostring(L, 3);
|
||||
lua_pushboolean(L, object_e.has<Player>());
|
||||
return 1;
|
||||
} else if (command == "set-actuator") {
|
||||
OgreAssert(lua_gettop(L) == 3, "Bad parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 3, LUA_TSTRING); // animation
|
||||
Ogre::String animation = lua_tostring(L, 3);
|
||||
|
||||
int object = lua_tointeger(L, 2);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
object_e.set<CharacterVelocity>(
|
||||
{ { 0, 0, 0 }, { 0, 0, 0 } });
|
||||
if (animation.length() > 0)
|
||||
object_e.set<CharacterInActuator>(
|
||||
{ animation, { 0, 0, 0 } });
|
||||
else
|
||||
object_e.remove<CharacterInActuator>();
|
||||
int object = lua_tointeger(L, 2);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
object_e.set<CharacterVelocity>(
|
||||
{ { 0, 0, 0 }, { 0, 0, 0 } });
|
||||
if (animation.length() > 0)
|
||||
object_e.set<CharacterInActuator>(
|
||||
{ animation, { 0, 0, 0 } });
|
||||
else
|
||||
object_e.remove<CharacterInActuator>();
|
||||
return 0;
|
||||
} else if (command == "animation-state") {
|
||||
OgreAssert(lua_gettop(L) >= 4, "Bad parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 3, LUA_TSTRING); // node
|
||||
luaL_checktype(L, 4, LUA_TSTRING); // state
|
||||
if (lua_gettop(L) == 5)
|
||||
luaL_checktype(L, 5, LUA_TBOOLEAN); // reset
|
||||
int object = lua_tointeger(L, 2);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
Ogre::String nodeName = lua_tostring(L, 3);
|
||||
Ogre::String stateName = lua_tostring(L, 4);
|
||||
} else if (command == "animation-state") {
|
||||
OgreAssert(lua_gettop(L) >= 4, "Bad parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 3, LUA_TSTRING); // node
|
||||
luaL_checktype(L, 4, LUA_TSTRING); // state
|
||||
if (lua_gettop(L) == 5)
|
||||
luaL_checktype(L, 5,
|
||||
LUA_TBOOLEAN); // reset
|
||||
int object = lua_tointeger(L, 2);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
Ogre::String nodeName = lua_tostring(L, 3);
|
||||
Ogre::String stateName = lua_tostring(L, 4);
|
||||
#if 0
|
||||
bool reset = false;
|
||||
if (lua_gettop(L) == 5)
|
||||
@@ -613,183 +625,183 @@ LuaData::LuaData()
|
||||
#endif
|
||||
OgreAssert(false, "Not implemented");
|
||||
|
||||
return 0;
|
||||
} else if (command == "params-set") {
|
||||
OgreAssert(lua_gettop(L) == 4, "Invalid parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER);
|
||||
luaL_checktype(L, 3, LUA_TSTRING);
|
||||
luaL_checktype(L, 4, LUA_TBOOLEAN);
|
||||
bool enable = lua_toboolean(L, 4);
|
||||
flecs::entity e = idmap.get_entity(lua_tointeger(L, 2));
|
||||
Ogre::String what = lua_tostring(L, 3);
|
||||
OgreAssert(e.is_valid(), "Invalid character");
|
||||
OgreAssert(e.has<Character>(), "Not a character");
|
||||
if (what == "gravity") {
|
||||
/* clear momentum */
|
||||
if (e.has<CharacterVelocity>()) {
|
||||
e.get_mut<CharacterVelocity>()
|
||||
.gvelocity.y = 0.0f;
|
||||
e.get_mut<CharacterVelocity>()
|
||||
.velocity.y = 0.0f;
|
||||
e.modified<CharacterVelocity>();
|
||||
}
|
||||
if (enable)
|
||||
e.add<CharacterGravity>();
|
||||
else
|
||||
e.remove<CharacterGravity>();
|
||||
} else if (what == "buoyancy") {
|
||||
if (enable)
|
||||
e.add<CharacterBuoyancy>();
|
||||
else
|
||||
e.remove<CharacterBuoyancy>();
|
||||
} else
|
||||
OgreAssert(false, "Bad parameter " + what);
|
||||
return 0;
|
||||
} else {
|
||||
OgreAssert(false, "bad argument " + command);
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
lua_setglobal(L, "ecs_character");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) == 3, "Bad parameters");
|
||||
luaL_checktype(L, 1, LUA_TNUMBER); // parent
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 3, LUA_TSTRING); // slot
|
||||
int parent = lua_tointeger(L, 1);
|
||||
int object = lua_tointeger(L, 2);
|
||||
Ogre::String slot = lua_tostring(L, 3);
|
||||
flecs::entity parent_e = idmap.get_entity(parent);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
PhysicsModule::controlPhysics(object_e, false);
|
||||
object_e.set<ParentSlot>({ parent_e, slot });
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_set_slot");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
flecs::entity e = ECS::get().lookup("player");
|
||||
int result = idmap.add_entity(e);
|
||||
lua_pushinteger(L, result);
|
||||
return result;
|
||||
});
|
||||
lua_setglobal(L, "ecs_get_player_entity");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
luaL_checktype(L, 1, LUA_TNUMBER); // entity id
|
||||
int id = lua_tointeger(L, 1);
|
||||
flecs::entity e = idmap.get_entity(id);
|
||||
LuaEcsEntity *edata = static_cast<LuaEcsEntity *>(
|
||||
lua_newuserdata(L, sizeof(LuaEcsEntity)));
|
||||
new (edata) LuaEcsEntity();
|
||||
edata->e = e;
|
||||
edata->id = e;
|
||||
luaL_getmetatable(L, "FlecsEntityMetaTable");
|
||||
lua_setmetatable(L, -2);
|
||||
return 1;
|
||||
});
|
||||
lua_setglobal(L, "ecs_get_entity");
|
||||
luaL_newmetatable(L, "FlecsEntityMetaTable");
|
||||
lua_pushstring(L, "__index");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
luaL_checktype(L, 1, LUA_TUSERDATA); //metatable
|
||||
luaL_checktype(L, 2, LUA_TSTRING); //function
|
||||
Ogre::String component = lua_tostring(L, 2);
|
||||
if (component == "components") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
std::cout << ent->id << " called!!!\n";
|
||||
ent->e.each([&](flecs::id id) {
|
||||
flecs::entity cmp =
|
||||
ECS::get().entity(id);
|
||||
if (cmp.is_alive()) {
|
||||
const char *name =
|
||||
cmp.name();
|
||||
if (name)
|
||||
std::cout
|
||||
<< "component: "
|
||||
<< name
|
||||
<< std::endl;
|
||||
}
|
||||
});
|
||||
return 0;
|
||||
},
|
||||
1);
|
||||
} else if (component == "is_trigger") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
lua_pushboolean(
|
||||
L, ent->e.has<EventTrigger>());
|
||||
return 1;
|
||||
},
|
||||
1);
|
||||
} else if (component == "is_character") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
lua_pushboolean(
|
||||
L, ent->e.has<Character>());
|
||||
return 1;
|
||||
},
|
||||
1);
|
||||
} else if (component == "is_boat") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
lua_pushboolean(L,
|
||||
return 0;
|
||||
} else if (command == "params-set") {
|
||||
OgreAssert(lua_gettop(L) == 4, "Invalid parameters");
|
||||
luaL_checktype(L, 2, LUA_TNUMBER);
|
||||
luaL_checktype(L, 3, LUA_TSTRING);
|
||||
luaL_checktype(L, 4, LUA_TBOOLEAN);
|
||||
bool enable = lua_toboolean(L, 4);
|
||||
flecs::entity e = idmap.get_entity(lua_tointeger(L, 2));
|
||||
Ogre::String what = lua_tostring(L, 3);
|
||||
OgreAssert(e.is_valid(), "Invalid character");
|
||||
OgreAssert(e.has<Character>(), "Not a character");
|
||||
if (what == "gravity") {
|
||||
/* clear momentum */
|
||||
if (e.has<CharacterVelocity>()) {
|
||||
e.get_mut<CharacterVelocity>()
|
||||
.gvelocity.y = 0.0f;
|
||||
e.get_mut<CharacterVelocity>()
|
||||
.velocity.y = 0.0f;
|
||||
e.modified<CharacterVelocity>();
|
||||
}
|
||||
if (enable)
|
||||
e.add<CharacterGravity>();
|
||||
else
|
||||
e.remove<CharacterGravity>();
|
||||
} else if (what == "buoyancy") {
|
||||
if (enable)
|
||||
e.add<CharacterBuoyancy>();
|
||||
else
|
||||
e.remove<CharacterBuoyancy>();
|
||||
} else
|
||||
OgreAssert(false, "Bad parameter " + what);
|
||||
return 0;
|
||||
} else {
|
||||
OgreAssert(false, "bad argument " + command);
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
lua_setglobal(L, "ecs_character");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
OgreAssert(lua_gettop(L) == 3, "Bad parameters");
|
||||
luaL_checktype(L, 1, LUA_TNUMBER); // parent
|
||||
luaL_checktype(L, 2, LUA_TNUMBER); // object
|
||||
luaL_checktype(L, 3, LUA_TSTRING); // slot
|
||||
int parent = lua_tointeger(L, 1);
|
||||
int object = lua_tointeger(L, 2);
|
||||
Ogre::String slot = lua_tostring(L, 3);
|
||||
flecs::entity parent_e = idmap.get_entity(parent);
|
||||
flecs::entity object_e = idmap.get_entity(object);
|
||||
PhysicsModule::controlPhysics(object_e, false);
|
||||
object_e.set<ParentSlot>({ parent_e, slot });
|
||||
return 0;
|
||||
});
|
||||
lua_setglobal(L, "ecs_set_slot");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
flecs::entity e = ECS::get().lookup("player");
|
||||
int result = idmap.add_entity(e);
|
||||
lua_pushinteger(L, result);
|
||||
return result;
|
||||
});
|
||||
lua_setglobal(L, "ecs_get_player_entity");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
luaL_checktype(L, 1, LUA_TNUMBER); // entity id
|
||||
int id = lua_tointeger(L, 1);
|
||||
flecs::entity e = idmap.get_entity(id);
|
||||
LuaEcsEntity *edata = static_cast<LuaEcsEntity *>(
|
||||
lua_newuserdata(L, sizeof(LuaEcsEntity)));
|
||||
new (edata) LuaEcsEntity();
|
||||
edata->e = e;
|
||||
edata->id = e;
|
||||
luaL_getmetatable(L, "FlecsEntityMetaTable");
|
||||
lua_setmetatable(L, -2);
|
||||
return 1;
|
||||
});
|
||||
lua_setglobal(L, "ecs_get_entity");
|
||||
luaL_newmetatable(L, "FlecsEntityMetaTable");
|
||||
lua_pushstring(L, "__index");
|
||||
lua_pushcfunction(L, [](lua_State *L) -> int {
|
||||
luaL_checktype(L, 1, LUA_TUSERDATA); //metatable
|
||||
luaL_checktype(L, 2, LUA_TSTRING); //function
|
||||
Ogre::String component = lua_tostring(L, 2);
|
||||
if (component == "components") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
std::cout << ent->id << " called!!!\n";
|
||||
ent->e.each([&](flecs::id id) {
|
||||
flecs::entity cmp =
|
||||
ECS::get().entity(id);
|
||||
if (cmp.is_alive()) {
|
||||
const char *name =
|
||||
cmp.name();
|
||||
if (name)
|
||||
std::cout
|
||||
<< "component: "
|
||||
<< name
|
||||
<< std::endl;
|
||||
}
|
||||
});
|
||||
return 0;
|
||||
},
|
||||
1);
|
||||
} else if (component == "is_trigger") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
lua_pushboolean(
|
||||
L, ent->e.has<EventTrigger>());
|
||||
return 1;
|
||||
},
|
||||
1);
|
||||
} else if (component == "is_character") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
lua_pushboolean(
|
||||
L, ent->e.has<Character>());
|
||||
return 1;
|
||||
},
|
||||
1);
|
||||
} else if (component == "is_boat") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
lua_pushboolean(L,
|
||||
ent->e.has<BoatBase>());
|
||||
return 1;
|
||||
},
|
||||
1);
|
||||
} else if (component == "is_player") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
lua_pushboolean(L,
|
||||
return 1;
|
||||
},
|
||||
1);
|
||||
} else if (component == "is_player") {
|
||||
lua_pushvalue(L, 1);
|
||||
lua_pushcclosure(
|
||||
L,
|
||||
[](lua_State *L) -> int {
|
||||
luaL_checktype(L, lua_upvalueindex(1),
|
||||
LUA_TUSERDATA);
|
||||
LuaEcsEntity *ent = static_cast<
|
||||
LuaEcsEntity *>(lua_touserdata(
|
||||
L, lua_upvalueindex(1)));
|
||||
lua_pushboolean(L,
|
||||
ent->e.has<Player>());
|
||||
return 1;
|
||||
},
|
||||
1);
|
||||
} else
|
||||
lua_pushnil(L);
|
||||
return 1;
|
||||
});
|
||||
lua_settable(L, -3);
|
||||
return 1;
|
||||
},
|
||||
1);
|
||||
} else
|
||||
lua_pushnil(L);
|
||||
return 1;
|
||||
});
|
||||
lua_settable(L, -3);
|
||||
}
|
||||
|
||||
LuaData::~LuaData()
|
||||
{
|
||||
lua_close(L);
|
||||
lua_close(L);
|
||||
}
|
||||
|
||||
void LuaData::lateSetup()
|
||||
@@ -808,96 +820,95 @@ void LuaData::lateSetup()
|
||||
}
|
||||
}
|
||||
#endif
|
||||
Ogre::DataStreamPtr stream =
|
||||
Ogre::ResourceGroupManager::getSingleton().openResource(
|
||||
"data.lua", "LuaScripts");
|
||||
std::cout << "stream: " << stream->getAsString() << "\n";
|
||||
if (luaL_dostring(L, stream->getAsString().c_str()) != LUA_OK) {
|
||||
std::cout << "error: " << lua_tostring(L, -1) << "\n";
|
||||
OgreAssert(false, "Script failure");
|
||||
}
|
||||
Ogre::DataStreamPtr stream =
|
||||
Ogre::ResourceGroupManager::getSingleton().openResource(
|
||||
"data.lua", "LuaScripts");
|
||||
std::cout << "stream: " << stream->getAsString() << "\n";
|
||||
if (luaL_dostring(L, stream->getAsString().c_str()) != LUA_OK) {
|
||||
std::cout << "error: " << lua_tostring(L, -1) << "\n";
|
||||
OgreAssert(false, "Script failure");
|
||||
}
|
||||
|
||||
const char *lua_code = "\n\
|
||||
const char *lua_code = "\n\
|
||||
function stuff()\n\
|
||||
return 4\n\
|
||||
end\n\
|
||||
x = stuff()\n\
|
||||
";
|
||||
luaL_dostring(L, lua_code);
|
||||
lua_getglobal(L, "x");
|
||||
int x = lua_tonumber(L, 1);
|
||||
std::cout << "lua: " << x << "\n";
|
||||
luaL_dostring(L, lua_code);
|
||||
lua_getglobal(L, "x");
|
||||
int x = lua_tonumber(L, 1);
|
||||
std::cout << "lua: " << x << "\n";
|
||||
}
|
||||
|
||||
LuaModule::LuaModule(flecs::world &ecs)
|
||||
{
|
||||
ecs.module<LuaModule>();
|
||||
ecs.import <SlotsModule>();
|
||||
ecs.import <VehicleManagerModule>();
|
||||
ecs.module<LuaModule>();
|
||||
ecs.import <SlotsModule>();
|
||||
ecs.import <VehicleManagerModule>();
|
||||
ecs.import <PlayerActionModule>();
|
||||
ecs.component<LuaChildEventTrigger>();
|
||||
ecs.component<LuaBase>()
|
||||
.on_add([](LuaBase &lua) {
|
||||
lua.mLua = new LuaData;
|
||||
lua.setup_called = false;
|
||||
lua.startup_called = false;
|
||||
})
|
||||
.add(flecs::Singleton);
|
||||
ecs.component<LuaEvent>().add(flecs::Singleton);
|
||||
ecs.system<const EngineData, LuaBase>("LuaUpdate")
|
||||
.kind(flecs::OnUpdate)
|
||||
.each([](const EngineData &eng, LuaBase &lua) {
|
||||
if (!lua.setup_called) {
|
||||
lua.mLua->lateSetup();
|
||||
lua.mLua->call_handler("setup");
|
||||
lua.setup_called = true;
|
||||
}
|
||||
if (!lua.startup_called) {
|
||||
if (eng.startupDelay <= 0.0f) {
|
||||
lua.mLua->call_handler("startup");
|
||||
lua.startup_called = true;
|
||||
ecs.component<LuaChildEventTrigger>();
|
||||
ecs.component<LuaBase>()
|
||||
.on_add([](LuaBase &lua) {
|
||||
lua.mLua = new LuaData;
|
||||
lua.setup_called = false;
|
||||
lua.startup_called = false;
|
||||
})
|
||||
.add(flecs::Singleton);
|
||||
ecs.component<LuaEvent>().add(flecs::Singleton);
|
||||
ecs.system<const EngineData, LuaBase>("LuaUpdate")
|
||||
.kind(flecs::OnUpdate)
|
||||
.each([](const EngineData &eng, LuaBase &lua) {
|
||||
if (!lua.setup_called) {
|
||||
lua.mLua->lateSetup();
|
||||
lua.mLua->call_handler("setup");
|
||||
lua.setup_called = true;
|
||||
}
|
||||
if (!lua.startup_called) {
|
||||
if (eng.startupDelay <= 0.0f) {
|
||||
lua.mLua->call_handler("startup");
|
||||
lua.startup_called = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
ecs.system<const EngineData, const LuaChildEventTrigger>(
|
||||
"CreateChildTrigger")
|
||||
.kind(flecs::OnUpdate)
|
||||
.without<EventTrigger>()
|
||||
.write<EventTrigger>()
|
||||
.each([](flecs::entity e, const EngineData &env,
|
||||
const LuaChildEventTrigger &lct) {
|
||||
Ogre::SceneNode *parentNode = nullptr;
|
||||
flecs::entity parent_e = lct.parent_e;
|
||||
if (parent_e.has<CharacterBase>()) {
|
||||
parentNode =
|
||||
parent_e.get<CharacterBase>().mBodyNode;
|
||||
OgreAssert(
|
||||
parent_e.get<CharacterBase>().mBodyNode,
|
||||
"bad node");
|
||||
}
|
||||
});
|
||||
ecs.system<const EngineData, const LuaChildEventTrigger>(
|
||||
"CreateChildTrigger")
|
||||
.kind(flecs::OnUpdate)
|
||||
.without<EventTrigger>()
|
||||
.write<EventTrigger>()
|
||||
.each([](flecs::entity e, const EngineData &env,
|
||||
const LuaChildEventTrigger &lct) {
|
||||
Ogre::SceneNode *parentNode = nullptr;
|
||||
flecs::entity parent_e = lct.parent_e;
|
||||
if (parent_e.has<CharacterBase>()) {
|
||||
parentNode =
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(parent_e);
|
||||
OgreAssert(parentNode, "bad node");
|
||||
|
||||
} else if (parent_e.has<BoatBase>()) {
|
||||
parentNode = parent_e.get<BoatBase>().mNode;
|
||||
OgreAssert(parent_e.get<BoatBase>().mNode,
|
||||
"bad node");
|
||||
} else
|
||||
return;
|
||||
EventTrigger &trigger = e.ensure<EventTrigger>();
|
||||
OgreAssert(parentNode, "bad parent");
|
||||
trigger.position = lct.position;
|
||||
trigger.halfheight = lct.halfheight;
|
||||
trigger.radius = lct.radius;
|
||||
trigger.event = lct.event;
|
||||
trigger.parent = parentNode;
|
||||
e.modified<EventTrigger>();
|
||||
});
|
||||
ecs.system<LuaBase, LuaEvent>("HandleLuaEvents")
|
||||
.kind(flecs::OnUpdate)
|
||||
.each([](LuaBase &base, LuaEvent &evt) {
|
||||
while (!evt.events.empty()) {
|
||||
LuaEvent::Event &ev = evt.events.front();
|
||||
base.mLua->call_handler(ev.event, ev.e1, ev.e2);
|
||||
evt.events.pop_front();
|
||||
}
|
||||
});
|
||||
} else if (parent_e.has<BoatBase>()) {
|
||||
parentNode = parent_e.get<BoatBase>().mNode;
|
||||
OgreAssert(parent_e.get<BoatBase>().mNode,
|
||||
"bad node");
|
||||
} else
|
||||
return;
|
||||
EventTrigger &trigger = e.ensure<EventTrigger>();
|
||||
OgreAssert(parentNode, "bad parent");
|
||||
trigger.position = lct.position;
|
||||
trigger.halfheight = lct.halfheight;
|
||||
trigger.radius = lct.radius;
|
||||
trigger.event = lct.event;
|
||||
trigger.parent = parentNode;
|
||||
e.modified<EventTrigger>();
|
||||
});
|
||||
ecs.system<LuaBase, LuaEvent>("HandleLuaEvents")
|
||||
.kind(flecs::OnUpdate)
|
||||
.each([](LuaBase &base, LuaEvent &evt) {
|
||||
while (!evt.events.empty()) {
|
||||
LuaEvent::Event &ev = evt.events.front();
|
||||
base.mLua->call_handler(ev.event, ev.e1, ev.e2);
|
||||
evt.events.pop_front();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,8 +58,8 @@ PhysicsModule::PhysicsModule(flecs::world &ecs)
|
||||
|
||||
ecs.component<CharacterBody>().on_remove([](flecs::entity e,
|
||||
CharacterBody &body) {
|
||||
JPH::Character *ch =
|
||||
static_cast<JPH::Character *>(body.ch.get());
|
||||
std::shared_ptr<JPH::Character> ch =
|
||||
std::static_pointer_cast<JPH::Character>(body.ch);
|
||||
if (ch) {
|
||||
if (e.has<JPH::BodyID>())
|
||||
e.remove<JPH::BodyID>();
|
||||
@@ -187,9 +187,11 @@ PhysicsModule::PhysicsModule(flecs::world &ecs)
|
||||
const CharacterBase &base) {
|
||||
ZoneScopedN("SetupCharacterPh");
|
||||
CharacterBody &b = e.ensure<CharacterBody>();
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
e);
|
||||
b.ch.reset(JoltPhysicsWrapper::getSingleton()
|
||||
.createCharacter(base.mBodyNode,
|
||||
1.75f, 0.23f));
|
||||
.createCharacter(n, 1.75f, 0.23f));
|
||||
if (!e.has<CharacterDisablePhysics>())
|
||||
static_cast<JPH::Character *>(b.ch.get())
|
||||
->AddToPhysicsSystem(
|
||||
@@ -534,8 +536,11 @@ PhysicsModule::PhysicsModule(flecs::world &ecs)
|
||||
const CharacterBase &chbase,
|
||||
const CharacterBody &body, CharacterVelocity &gr) {
|
||||
ZoneScopedN("HandleVelocity");
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
e);
|
||||
if (e.has<InWater>() &&
|
||||
chbase.mBodyNode->_getDerivedPosition().y > -0.5f)
|
||||
n->_getDerivedPosition().y > -0.5f)
|
||||
e.remove<InWater>();
|
||||
Ogre::Vector3 v = gr.velocity;
|
||||
v.y = 0.0f;
|
||||
@@ -572,7 +577,10 @@ PhysicsModule::PhysicsModule(flecs::world &ecs)
|
||||
.each([this](flecs::entity e, CharacterBase &ch) {
|
||||
ZoneScopedNC("HandleSubmerge", 0xFF3030);
|
||||
float full_subm = 2.0f;
|
||||
Ogre::Vector3 pos = ch.mBodyNode->getPosition();
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
e);
|
||||
Ogre::Vector3 pos = n->getPosition();
|
||||
float current_subm = -Ogre::Math::Clamp(
|
||||
pos.y + Ogre::Math::Sin(ch.mTimer * 0.13f +
|
||||
130.0f) *
|
||||
@@ -595,23 +603,23 @@ PhysicsModule::PhysicsModule(flecs::world &ecs)
|
||||
CharacterBase &ch, const CharacterBody &body,
|
||||
CharacterVelocity &gr) {
|
||||
ZoneScopedNC("HandleVelocityNoPhysics", 0xFF4040);
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
e);
|
||||
Ogre::Vector3 v = gr.velocity;
|
||||
// v.y = 0.0f;
|
||||
ch.mBodyNode->_setDerivedPosition(
|
||||
ch.mBodyNode->_getDerivedPosition() +
|
||||
v * eng.delta);
|
||||
n->_setDerivedPosition(n->_getDerivedPosition() +
|
||||
v * eng.delta);
|
||||
gr.velocity = Ogre::Vector3::ZERO;
|
||||
if (e.has<JPH::BodyID>())
|
||||
JoltPhysicsWrapper::getSingleton()
|
||||
.setPositionAndRotation(
|
||||
e.get<JPH::BodyID>(),
|
||||
ch.mBodyNode
|
||||
->_getDerivedPosition(),
|
||||
ch.mBodyNode
|
||||
->_getDerivedOrientation(),
|
||||
n->_getDerivedPosition(),
|
||||
n->_getDerivedOrientation(),
|
||||
false);
|
||||
if (e.has<InWater>() &&
|
||||
ch.mBodyNode->_getDerivedPosition().y > -0.5f) {
|
||||
n->_getDerivedPosition().y > -0.5f) {
|
||||
e.remove<InWater>();
|
||||
ch.is_submerged = false;
|
||||
ZoneTextF("remove in water");
|
||||
@@ -646,26 +654,27 @@ void PhysicsModule::controlPhysics(flecs::entity e, bool enable)
|
||||
OgreAssert(e.has<CharacterBody>(), "No body component");
|
||||
OgreAssert(e.has<JPH::BodyID>(),
|
||||
"No body id in entity");
|
||||
}
|
||||
if (!JoltPhysicsWrapper::getSingleton().isAdded(
|
||||
e.get<JPH::BodyID>())) {
|
||||
Ogre::Vector3 position =
|
||||
e.get<CharacterBase>()
|
||||
.mBodyNode->_getDerivedPosition();
|
||||
Ogre::Quaternion orientation =
|
||||
e.get<CharacterBase>()
|
||||
.mBodyNode->_getDerivedOrientation();
|
||||
if (position.y >= -0.5f)
|
||||
e.remove<InWater>();
|
||||
JoltPhysicsWrapper::getSingleton()
|
||||
.setPositionAndRotation(e.get<JPH::BodyID>(),
|
||||
position, orientation,
|
||||
false);
|
||||
JoltPhysicsWrapper::getSingleton().addBody(
|
||||
e.get<JPH::BodyID>(),
|
||||
JPH::EActivation::Activate);
|
||||
}
|
||||
} else {
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
e);
|
||||
if (!JoltPhysicsWrapper::getSingleton().isAdded(
|
||||
e.get<JPH::BodyID>())) {
|
||||
Ogre::Vector3 position =
|
||||
n->_getDerivedPosition();
|
||||
Ogre::Quaternion orientation =
|
||||
n->_getDerivedOrientation();
|
||||
if (position.y >= -0.5f)
|
||||
e.remove<InWater>();
|
||||
JoltPhysicsWrapper::getSingleton()
|
||||
.setPositionAndRotation(
|
||||
e.get<JPH::BodyID>(), position,
|
||||
orientation, false);
|
||||
JoltPhysicsWrapper::getSingleton().addBody(
|
||||
e.get<JPH::BodyID>(),
|
||||
JPH::EActivation::Activate);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (e.has<CharacterBase>()) {
|
||||
e.add<CharacterDisablePhysics>();
|
||||
if (!e.has<CharacterBody>())
|
||||
@@ -673,14 +682,16 @@ void PhysicsModule::controlPhysics(flecs::entity e, bool enable)
|
||||
OgreAssert(e.has<CharacterBody>(), "No body component");
|
||||
OgreAssert(e.has<JPH::BodyID>(),
|
||||
"No body id in entity");
|
||||
}
|
||||
if (JoltPhysicsWrapper::getSingleton().isAdded(
|
||||
e.get<JPH::BodyID>()))
|
||||
JoltPhysicsWrapper::getSingleton().removeBody(
|
||||
e.get<JPH::BodyID>());
|
||||
Ogre::Vector3 position =
|
||||
e.get<CharacterBase>().mBodyNode->_getDerivedPosition();
|
||||
e.remove<InWater>();
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
e);
|
||||
if (JoltPhysicsWrapper::getSingleton().isAdded(
|
||||
e.get<JPH::BodyID>()))
|
||||
JoltPhysicsWrapper::getSingleton().removeBody(
|
||||
e.get<JPH::BodyID>());
|
||||
Ogre::Vector3 position = n->_getDerivedPosition();
|
||||
e.remove<InWater>();
|
||||
}
|
||||
}
|
||||
}
|
||||
bool PhysicsModule::raycastQuery(const Ogre::Vector3 &startPos,
|
||||
|
||||
@@ -318,10 +318,12 @@ PlayerActionModule::PlayerActionModule(flecs::world &ecs)
|
||||
ECS::get<CharacterManagerModule>()
|
||||
.getPlayer();
|
||||
if (player.is_valid()) {
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(
|
||||
player);
|
||||
Ogre::Vector3 playerPos =
|
||||
player.get<CharacterBase>()
|
||||
.mBodyNode
|
||||
->_getDerivedPosition();
|
||||
n->_getDerivedPosition();
|
||||
list.UIquery(playerPos);
|
||||
} else {
|
||||
list.UIquery(cameraPos);
|
||||
@@ -479,10 +481,11 @@ out:;
|
||||
anode.position +
|
||||
anode.rotation * placeLocalOffset[place];
|
||||
if (ch.is_valid() && ch.has<CharacterBase>()) {
|
||||
ch.get<CharacterBase>()
|
||||
.mBodyNode->_setDerivedOrientation(newRotation);
|
||||
ch.get<CharacterBase>().mBodyNode->_setDerivedPosition(
|
||||
newPosition);
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>().characterNodes.at(
|
||||
ch);
|
||||
n->_setDerivedOrientation(newRotation);
|
||||
n->_setDerivedPosition(newPosition);
|
||||
}
|
||||
if (actor >= 0) {
|
||||
town.get_mut<TownNPCs>().npcs[actor].position =
|
||||
|
||||
@@ -45,7 +45,10 @@ SlotsModule::SlotsModule(flecs::world &ecs)
|
||||
slot.removeSlot(e);
|
||||
return;
|
||||
}
|
||||
slot.addChild(ch.mBodyNode);
|
||||
Ogre::SceneNode *n =
|
||||
ECS::get<CharacterModule>()
|
||||
.characterNodes.at(e);
|
||||
slot.addChild(n);
|
||||
slot.createSlotData(e);
|
||||
std::cout << "base: "
|
||||
<< slot.getSlotBase()->getName();
|
||||
|
||||
@@ -13,8 +13,10 @@
|
||||
#include "TerrainModule.h"
|
||||
#include "physics.h"
|
||||
#include "PhysicsModule.h"
|
||||
#include "CharacterManagerModule.h"
|
||||
#include "items.h"
|
||||
#include "StaticGeometryModule.h"
|
||||
#include "CharacterAIModule.h"
|
||||
#include <tracy/Tracy.hpp>
|
||||
|
||||
namespace ECS
|
||||
@@ -28,6 +30,8 @@ StaticGeometryModule::StaticGeometryModule(flecs::world &ecs)
|
||||
{
|
||||
ZoneScoped;
|
||||
ecs.module<StaticGeometryModule>();
|
||||
ecs.import <CharacterManagerModule>();
|
||||
ecs.import <CharacterAIModule>();
|
||||
ecs.component<TerrainSlotParent>();
|
||||
ecs.component<TerrainItem>();
|
||||
ecs.component<FurnitureItem>();
|
||||
@@ -89,6 +93,19 @@ StaticGeometryModule::StaticGeometryModule(flecs::world &ecs)
|
||||
});
|
||||
if (!Ogre::MeshLodGenerator::getSingletonPtr())
|
||||
new Ogre::MeshLodGenerator();
|
||||
ecs.system<TerrainItem>("SetupTowns")
|
||||
.kind(flecs::OnUpdate)
|
||||
.without<TownNPCs>()
|
||||
.without<TownAI>()
|
||||
.each([&](flecs::entity e, TerrainItem &item) {
|
||||
Ogre::String props = item.properties;
|
||||
nlohmann::json jp = nlohmann::json::parse(props);
|
||||
if (jp.find("type") == jp.end())
|
||||
return;
|
||||
Ogre::String itemType = jp["type"].get<Ogre::String>();
|
||||
if (itemType == "town")
|
||||
Geometry::registerTownItem(e);
|
||||
});
|
||||
ecs.system("AddGeometryQueue").kind(flecs::OnUpdate).run([&](flecs::iter &it) {
|
||||
ZoneScopedN("AddGeometryQueue");
|
||||
std::list<flecs::entity> items;
|
||||
@@ -134,9 +151,8 @@ StaticGeometryModule::StaticGeometryModule(flecs::world &ecs)
|
||||
const TerrainItem &item) {
|
||||
items.push_back(e);
|
||||
});
|
||||
for (auto e : items) {
|
||||
for (auto e : items)
|
||||
createItemGeometry(e);
|
||||
}
|
||||
addQueue.pop_front();
|
||||
} else {
|
||||
output.push_back(item);
|
||||
|
||||
@@ -313,7 +313,7 @@ void createItemGeometry(flecs::entity e)
|
||||
e.set<TerrainItemNode>({ itemNode, geo });
|
||||
} else if (itemType == "town") {
|
||||
OgreAssert(geo, "Can't create static geometry");
|
||||
createTown(e, itemNode, geo);
|
||||
createTown(e, itemNode, geo);
|
||||
e.set<TerrainItemNode>({ itemNode, geo });
|
||||
std::cout << " town created: " << e.id() << std::endl;
|
||||
} else {
|
||||
@@ -381,5 +381,10 @@ flecs::entity createMeshGeometry(const Ogre::String &meshName,
|
||||
return e;
|
||||
}
|
||||
|
||||
void registerTownItem(flecs::entity e)
|
||||
{
|
||||
registerTown(e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,6 +57,7 @@ struct harbourMaker {
|
||||
void createItemGeometry(flecs::entity e);
|
||||
void destroyItemGeometry(flecs::entity e);
|
||||
void updateItemGeometry(flecs::entity e);
|
||||
void registerTownItem(flecs::entity e);
|
||||
flecs::entity createMeshGeometry(const Ogre::String &meshName,
|
||||
flecs::entity parente,
|
||||
Ogre::SceneNode *sceneNode,
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
#include "PhysicsModule.h"
|
||||
#include "LuaData.h"
|
||||
#include "PlayerActionModule.h"
|
||||
#include "CharacterModule.h"
|
||||
#include "CharacterManagerModule.h"
|
||||
#include "CharacterAIModule.h"
|
||||
#include "items.h"
|
||||
@@ -2142,13 +2143,95 @@ void runAllScriptsForTown(flecs::entity e)
|
||||
j["districts"] = districts;
|
||||
StaticGeometryModule::setItemProperties(e, j.dump());
|
||||
}
|
||||
struct Selector {
|
||||
int selection;
|
||||
Ogre::String result;
|
||||
Ogre::String label;
|
||||
std::vector<Ogre::String> options;
|
||||
Selector(const Ogre::String &label,
|
||||
const std::vector<Ogre::String> &options)
|
||||
: label(label)
|
||||
, options(options)
|
||||
, selection(-1)
|
||||
{
|
||||
}
|
||||
bool select()
|
||||
{
|
||||
bool changed = false;
|
||||
if (selection < 0)
|
||||
selection = 0;
|
||||
if (selection >= options.size())
|
||||
selection = (options.size() > 0) ? options.size() - 1 :
|
||||
0;
|
||||
if (options.size() == 0) {
|
||||
ImGui::Text("None: %s", label.c_str());
|
||||
return false;
|
||||
}
|
||||
if (ImGui::BeginCombo(label.c_str(),
|
||||
options[selection].c_str())) {
|
||||
int i;
|
||||
for (i = 0; i < options.size(); i++) {
|
||||
bool selected = selection == i;
|
||||
if (ImGui::Selectable(options[i].c_str(),
|
||||
selected)) {
|
||||
selection = i;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
ImGui::EndCombo();
|
||||
}
|
||||
if (changed || result.empty())
|
||||
result = options[selection];
|
||||
return changed;
|
||||
}
|
||||
void set_default(const Ogre::String &def)
|
||||
{
|
||||
int index = -1;
|
||||
auto pos = std::find(options.begin(), options.end(), def);
|
||||
if (pos == options.end())
|
||||
index = -1;
|
||||
else {
|
||||
index = std::distance(options.begin(), pos);
|
||||
selection = index;
|
||||
result = options[index];
|
||||
}
|
||||
}
|
||||
};
|
||||
bool editNPCs(nlohmann::json &npcs)
|
||||
{
|
||||
struct slotEdit {
|
||||
const Ogre::String &label;
|
||||
const Ogre::String &slotBase;
|
||||
std::vector<Ogre::String> *options_m;
|
||||
std::vector<Ogre::String> *options_f;
|
||||
const Ogre::String &slot;
|
||||
};
|
||||
static std::vector<Ogre::String> faces_a_m, hairs_a_m, tops_a_m,
|
||||
bottoms_a_m, feet_a_m;
|
||||
static std::vector<Ogre::String> faces_a_f, hairs_a_f, tops_a_f,
|
||||
bottoms_a_f, feet_a_f;
|
||||
ZoneScoped;
|
||||
bool changed = false;
|
||||
ImGui::Text("NPC");
|
||||
int id = 0;
|
||||
struct slotEdit pslots_a[] = {
|
||||
{ "Face", "face", &faces_a_m, &faces_a_f, "slot_face" },
|
||||
{ "Hair", "hair", &hairs_a_m, &hairs_a_f, "slot_hair" },
|
||||
{ "Top", "top", &tops_a_m, &tops_a_f, "slot_top" },
|
||||
{ "Bottom", "bottom", &bottoms_a_m, &bottoms_a_f,
|
||||
"slot_bottom" },
|
||||
{ "Feet", "feet", &feet_a_m, &feet_a_f, "slot_feet" },
|
||||
};
|
||||
for (auto g : pslots_a) {
|
||||
g.options_m->clear();
|
||||
g.options_f->clear();
|
||||
ECS::get_mut<CharacterModule>().getSlotMeshes(
|
||||
"adult", "male", g.slotBase, *g.options_m);
|
||||
ECS::get_mut<CharacterModule>().getSlotMeshes(
|
||||
"adult", "female", g.slotBase, *g.options_f);
|
||||
}
|
||||
for (auto &npc : npcs) {
|
||||
Ogre::String meid = Ogre::StringConverter::toString(id);
|
||||
static char firstName[64] = { 0 };
|
||||
static char lastName[64] = { 0 };
|
||||
static char nickName[64] = { 0 };
|
||||
@@ -2163,6 +2246,21 @@ bool editNPCs(nlohmann::json &npcs)
|
||||
npc["tags"] = "";
|
||||
if (npc.find("sex") == npc.end())
|
||||
npc["sex"] = 1;
|
||||
if (npc["sex"].get<int>() == 0) {
|
||||
for (const auto &m : pslots_a) {
|
||||
if (npc.find(m.slot) == npc.end()) {
|
||||
npc[m.slot] = m.options_m->at(0);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
} else if (npc["sex"].get<int>() == 1) {
|
||||
for (const auto &m : pslots_a) {
|
||||
if (npc.find(m.slot) == npc.end()) {
|
||||
npc[m.slot] = m.options_m->at(0);
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
strncpy(firstName, npc["firstName"].get<Ogre::String>().c_str(),
|
||||
sizeof(firstName));
|
||||
@@ -2173,51 +2271,77 @@ bool editNPCs(nlohmann::json &npcs)
|
||||
strncpy(tags, npc["tags"].get<Ogre::String>().c_str(),
|
||||
sizeof(tags));
|
||||
|
||||
ImGui::InputText(
|
||||
("Last name##" + Ogre::StringConverter::toString(id))
|
||||
.c_str(),
|
||||
lastName, sizeof(lastName));
|
||||
ImGui::InputText(("Last name##" + meid).c_str(), lastName,
|
||||
sizeof(lastName));
|
||||
if (ImGui::IsItemDeactivatedAfterEdit()) {
|
||||
npc["lastName"] = Ogre::String(lastName);
|
||||
changed = true;
|
||||
}
|
||||
ImGui::InputText(
|
||||
("First name##" + Ogre::StringConverter::toString(id))
|
||||
.c_str(),
|
||||
firstName, sizeof(firstName));
|
||||
ImGui::InputText(("First name##" + meid).c_str(), firstName,
|
||||
sizeof(firstName));
|
||||
if (ImGui::IsItemDeactivatedAfterEdit()) {
|
||||
npc["firstName"] = Ogre::String(firstName);
|
||||
changed = true;
|
||||
}
|
||||
ImGui::InputText(
|
||||
("Nickname##" + Ogre::StringConverter::toString(id))
|
||||
.c_str(),
|
||||
nickName, sizeof(nickName));
|
||||
ImGui::InputText(("Nickname##" + meid).c_str(), nickName,
|
||||
sizeof(nickName));
|
||||
if (ImGui::IsItemDeactivatedAfterEdit()) {
|
||||
npc["nickName"] = Ogre::String(nickName);
|
||||
changed = true;
|
||||
}
|
||||
Selector race(("Race##" + meid).c_str(), { "human" });
|
||||
Selector age(("Age##" + meid).c_str(), { "adult" });
|
||||
Selector sex(("Sex##" + meid).c_str(), { "male", "female" });
|
||||
if (npc.find("race") != npc.end())
|
||||
race.set_default(npc["race"].get<Ogre::String>());
|
||||
if (npc.find("age") != npc.end())
|
||||
age.set_default(npc["age"].get<Ogre::String>());
|
||||
if (npc.find("sex") != npc.end())
|
||||
sex.selection = npc["sex"].get<int>();
|
||||
if (race.select()) {
|
||||
npc["race"] = race.result;
|
||||
changed = true;
|
||||
}
|
||||
if (age.select()) {
|
||||
npc["age"] = age.result;
|
||||
changed = true;
|
||||
}
|
||||
/* sex is integer */
|
||||
if (sex.select()) {
|
||||
npc["sex"] = sex.selection;
|
||||
changed = true;
|
||||
}
|
||||
if (sex.selection == 0) {
|
||||
for (const auto &es : pslots_a) {
|
||||
Selector sel((es.label + "##" + meid).c_str(),
|
||||
*es.options_m);
|
||||
sel.set_default(
|
||||
npc[es.slot].get<Ogre::String>());
|
||||
if (sel.select()) {
|
||||
npc[es.slot] = sel.result;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
} else if (sex.selection == 1) {
|
||||
for (const auto &es : pslots_a) {
|
||||
Selector sel((es.label + "##" + meid).c_str(),
|
||||
*es.options_f);
|
||||
sel.set_default(
|
||||
npc[es.slot].get<Ogre::String>());
|
||||
if (sel.select()) {
|
||||
npc[es.slot] = sel.result;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ImGui::InputText(
|
||||
("Tags##" + Ogre::StringConverter::toString(id)).c_str(),
|
||||
tags, sizeof(tags));
|
||||
ImGui::InputText(("Tags##" + meid).c_str(), tags, sizeof(tags));
|
||||
if (ImGui::IsItemDeactivatedAfterEdit()) {
|
||||
npc["tags"] = Ogre::String(tags);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
int selection = npc["sex"].get<int>();
|
||||
const char *items[] = { "Male", "Female" };
|
||||
if (ImGui::Combo(("Sex##" + Ogre::StringConverter::toString(id))
|
||||
.c_str(),
|
||||
&selection, items, 2))
|
||||
npc["sex"] = selection;
|
||||
if (ImGui::SmallButton(
|
||||
("Spawn##" + Ogre::StringConverter::toString(id))
|
||||
.c_str())) {
|
||||
int sex = npc["sex"].get<int>();
|
||||
const char *models[] = { "normal-male.glb",
|
||||
"normal-female.glb" };
|
||||
if (ImGui::SmallButton(("Spawn##" + meid).c_str())) {
|
||||
Ogre::Vector3 npcPosition;
|
||||
Ogre::Quaternion npcOrientation;
|
||||
from_json(npc["position"], npcPosition);
|
||||
@@ -2225,8 +2349,13 @@ bool editNPCs(nlohmann::json &npcs)
|
||||
|
||||
// FIXME: create TownCharacterManager and register NPCs through there
|
||||
ECS::get_mut<CharacterManagerModule>()
|
||||
.createCharacterData(models[sex], npcPosition,
|
||||
npcOrientation);
|
||||
.createCharacterData(
|
||||
npc["slot_face"].get<Ogre::String>(),
|
||||
npc["slot_hair"].get<Ogre::String>(),
|
||||
npc["slot_top"].get<Ogre::String>(),
|
||||
npc["slot_bottom"].get<Ogre::String>(),
|
||||
npc["slot_feet"].get<Ogre::String>(),
|
||||
npcPosition, npcOrientation);
|
||||
}
|
||||
if (ImGui::SmallButton(
|
||||
("Delete##" + Ogre::StringConverter::toString(id))
|
||||
@@ -2245,9 +2374,39 @@ bool editNPCs(nlohmann::json &npcs)
|
||||
ImGui::InputText("First name", firstName, sizeof(firstName));
|
||||
static char tags[256] = { 0 };
|
||||
ImGui::InputText("Tags", tags, sizeof(tags));
|
||||
static int selection = 0;
|
||||
const char *items[] = { "Male", "Female" };
|
||||
ImGui::Combo("Sex", &selection, items, 2);
|
||||
static Selector race("Race##new_npc", { "human" });
|
||||
static Selector age("Age##new_npc", { "adult" });
|
||||
static Selector sex("Sex##new_npc", { "male", "female" });
|
||||
changed = changed || race.select();
|
||||
changed = changed || age.select();
|
||||
changed = changed || sex.select();
|
||||
static Selector sel_face("Face##new_npc", {});
|
||||
static Selector sel_hair("Hair##new_npc", {});
|
||||
static Selector sel_top("Top##new_npc", {});
|
||||
static Selector sel_bottom("Bottom##new_npc", {});
|
||||
static Selector sel_feet("Feet##new_npc", {});
|
||||
if (changed || sel_face.options.size() == 0) {
|
||||
if (sex.selection == 0) {
|
||||
sel_face = Selector("Face##new_npc", faces_a_m);
|
||||
sel_hair = Selector("Hair##new_npc", hairs_a_m);
|
||||
sel_top = Selector("Top##new_npc", tops_a_m);
|
||||
sel_bottom = Selector("Bottom##new_npc",
|
||||
bottoms_a_m);
|
||||
sel_feet = Selector("Feet##new_npc", feet_a_m);
|
||||
} else if (sex.selection == 1) {
|
||||
sel_face = Selector("Face##new_npc", faces_a_f);
|
||||
sel_hair = Selector("Hair##new_npc", hairs_a_f);
|
||||
sel_top = Selector("Top##new_npc", tops_a_f);
|
||||
sel_bottom = Selector("Bottom##new_npc",
|
||||
bottoms_a_f);
|
||||
sel_feet = Selector("Feet##new_npc", feet_a_f);
|
||||
}
|
||||
}
|
||||
changed = changed || sel_face.select();
|
||||
changed = changed || sel_hair.select();
|
||||
changed = changed || sel_top.select();
|
||||
changed = changed || sel_bottom.select();
|
||||
changed = changed || sel_feet.select();
|
||||
if (ImGui::SmallButton("Add NPC")) {
|
||||
nlohmann::json npc;
|
||||
npc["lastName"] = Ogre::String(lastName);
|
||||
@@ -2261,7 +2420,12 @@ bool editNPCs(nlohmann::json &npcs)
|
||||
.sceneNode->_getDerivedOrientation();
|
||||
to_json(npc["position"], npcPosition);
|
||||
to_json(npc["orientation"], npcOrientation);
|
||||
npc["sex"] = selection;
|
||||
npc["sex"] = sex.selection;
|
||||
npc["slot_hair"] = sel_hair.result;
|
||||
npc["slot_face"] = sel_face.result;
|
||||
npc["slot_top"] = sel_top.result;
|
||||
npc["slot_bottom"] = sel_bottom.result;
|
||||
npc["slot_feet"] = sel_feet.result;
|
||||
npc["health"] = 100;
|
||||
npc["stamina"] = 100;
|
||||
npcs.push_back(npc);
|
||||
@@ -5524,6 +5688,7 @@ struct TownDecorateDoors : TownTask {
|
||||
};
|
||||
struct TownDecorateFurniture : TownTask {
|
||||
std::shared_future<bool> townDecorateFurnitureComplete;
|
||||
/* here we place all the furniture as scene objects */
|
||||
void createDecorateFurniture(flecs::entity e,
|
||||
const nlohmann::json &jdistrict, int index,
|
||||
Ogre::SceneNode *sceneNode,
|
||||
@@ -5601,11 +5766,11 @@ struct TownDecorateFurniture : TownTask {
|
||||
Ogre::Vector3::UNIT_Z *
|
||||
(float)z * 2.0f;
|
||||
Ogre::Vector3 offsetY(0, y * 4.0f, 0);
|
||||
if (furniture.find("mesh") !=
|
||||
furniture.end()) {
|
||||
Ogre::String meshName =
|
||||
furniture["mesh"]
|
||||
.get<Ogre::String>();
|
||||
static Ogre::String materialName1 = "";
|
||||
static Ogre::String materialName2 = "";
|
||||
auto getFurnitureMesh =
|
||||
[](const Ogre::String &meshName)
|
||||
-> Ogre::MeshPtr {
|
||||
Ogre::MeshPtr mesh =
|
||||
Ogre::MeshManager::getSingleton()
|
||||
.getByName(
|
||||
@@ -5624,6 +5789,70 @@ struct TownDecorateFurniture : TownTask {
|
||||
meshconf);
|
||||
}
|
||||
}
|
||||
return mesh;
|
||||
};
|
||||
auto placeFurnitureMesh = [](flecs::entity
|
||||
e,
|
||||
Ogre::MeshPtr
|
||||
mesh,
|
||||
const Ogre::Vector3
|
||||
&position,
|
||||
const Ogre::Quaternion
|
||||
&rotation) {
|
||||
Ogre::Entity *ent =
|
||||
ECS::get<EngineData>()
|
||||
.mScnMgr
|
||||
->createEntity(
|
||||
mesh->getName());
|
||||
Ogre::String tmpMatName =
|
||||
mesh->getSubMesh(0)
|
||||
->getMaterialName();
|
||||
if (tmpMatName.substr(0, 14) ==
|
||||
"furniture-sofa") {
|
||||
if (materialName2 == "")
|
||||
materialName2 =
|
||||
tmpMatName;
|
||||
else
|
||||
ent->setMaterialName(
|
||||
|
||||
materialName2);
|
||||
} else {
|
||||
if (materialName1 == "")
|
||||
materialName1 =
|
||||
tmpMatName;
|
||||
else
|
||||
ent->setMaterialName(
|
||||
materialName1);
|
||||
}
|
||||
Ogre::SceneNode *node =
|
||||
ECS::get<EngineData>()
|
||||
.mScnMgr
|
||||
->getRootSceneNode()
|
||||
->createChildSceneNode();
|
||||
node->_setDerivedPosition(
|
||||
position);
|
||||
node->_setDerivedOrientation(
|
||||
rotation);
|
||||
node->attachObject(ent);
|
||||
ent->setRenderingDistance(
|
||||
60.0f);
|
||||
addStaticBodyMesh(e, mesh,
|
||||
position,
|
||||
rotation);
|
||||
ECS::get()
|
||||
.entity()
|
||||
.child_of(e)
|
||||
.set<FurnitureInstance>(
|
||||
{ node });
|
||||
};
|
||||
if (furniture.find("mesh") !=
|
||||
furniture.end()) {
|
||||
Ogre::String meshName =
|
||||
furniture["mesh"]
|
||||
.get<Ogre::String>();
|
||||
Ogre::MeshPtr mesh =
|
||||
getFurnitureMesh(
|
||||
meshName);
|
||||
if (mesh) {
|
||||
int rotation = 2;
|
||||
if (jfcell.find(
|
||||
@@ -5638,6 +5867,25 @@ struct TownDecorateFurniture : TownTask {
|
||||
Ogre::Vector3::
|
||||
UNIT_Z *
|
||||
0.0f;
|
||||
Ogre::Vector3 furniturePosition =
|
||||
worldCenterPosition +
|
||||
offsetX +
|
||||
offsetZ +
|
||||
offsetY +
|
||||
offset;
|
||||
Ogre::Quaternion furnitureOrientation =
|
||||
worldCenterOrientation *
|
||||
Ogre::Quaternion(
|
||||
Ogre::Degree(
|
||||
90.0f *
|
||||
(float)rotation),
|
||||
Ogre::Vector3::
|
||||
UNIT_Y);
|
||||
placeFurnitureMesh(
|
||||
e, mesh,
|
||||
furniturePosition,
|
||||
furnitureOrientation);
|
||||
#if 0
|
||||
Ogre::Entity *ent =
|
||||
ECS::get<
|
||||
EngineData>()
|
||||
@@ -5686,6 +5934,7 @@ struct TownDecorateFurniture : TownTask {
|
||||
.child_of(e)
|
||||
.set<FurnitureInstance>(
|
||||
{ node });
|
||||
#endif
|
||||
#if 0
|
||||
if (furniture.find(
|
||||
"sensors") !=
|
||||
@@ -5879,6 +6128,7 @@ struct TownDecorateFurniture : TownTask {
|
||||
float radius =
|
||||
action["radius"]
|
||||
.get<float>();
|
||||
#if 0
|
||||
if (ECS::get()
|
||||
.has<ActionNodeList>()) {
|
||||
ActionNodeList::ActionNode
|
||||
@@ -5921,6 +6171,7 @@ struct TownDecorateFurniture : TownTask {
|
||||
ECS::modified<
|
||||
ActionNodeList>();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6025,12 +6276,209 @@ void createTown(flecs::entity e, Ogre::SceneNode *sceneNode,
|
||||
geo->build();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
void registerTown(flecs::entity e)
|
||||
{
|
||||
ZoneScoped;
|
||||
registerTownNPCs(e);
|
||||
if (ECS::get().entity<CharacterAIModule>().is_valid())
|
||||
if (ECS::get().has<CharacterAIModule>()) {
|
||||
ECS::get_mut<CharacterAIModule>().createAI(e);
|
||||
ECS::modified<CharacterAIModule>();
|
||||
}
|
||||
}
|
||||
createTownActionNodes(e);
|
||||
}
|
||||
|
||||
void createTownActionNodes(flecs::entity e)
|
||||
{
|
||||
const TerrainItem &item = e.get<TerrainItem>();
|
||||
Ogre::String props = item.properties;
|
||||
nlohmann::json jprops = nlohmann::json::parse(props);
|
||||
#if 0
|
||||
Ogre::MaterialPtr townMaterial = createTownMaterial(e);
|
||||
#endif
|
||||
for (const auto &jdistrict : jprops["districts"]) {
|
||||
const nlohmann::json &jp = jdistrict;
|
||||
nlohmann::json jlots = nlohmann::json::array();
|
||||
float baseHeight = 4.0f;
|
||||
Ogre::Vector3 localPosition(0, 0, 0);
|
||||
Ogre::Quaternion localRotation = Ogre::Quaternion::IDENTITY;
|
||||
Ogre::Vector3 centerPosition = item.position;
|
||||
Ogre::Quaternion centerOrientation = item.orientation;
|
||||
float delevation = 0.0f;
|
||||
float radius = 50.0f;
|
||||
if (jp.find("elevation") != jp.end())
|
||||
delevation = jp["elevation"].get<float>();
|
||||
if (jp.find("radius") != jp.end())
|
||||
radius = jp["radius"].get<float>();
|
||||
from_json(jp["position"], localPosition);
|
||||
from_json(jp["rotation"], localRotation);
|
||||
centerPosition = centerPosition + localPosition +
|
||||
Ogre::Vector3(0, delevation, 0);
|
||||
centerOrientation = centerOrientation * localRotation;
|
||||
if (jdistrict.find("lots") != jdistrict.end())
|
||||
jlots = jdistrict["lots"];
|
||||
for (const auto &jb : jlots) {
|
||||
float angle = 0.0f;
|
||||
int depth = 10;
|
||||
int width = 10;
|
||||
float distance = radius;
|
||||
float elevation = 0.0f;
|
||||
std::cout << jb.dump() << std::endl;
|
||||
if (jb.find("angle") != jb.end())
|
||||
angle = jb["angle"].get<float>();
|
||||
if (jb.find("depth") != jb.end())
|
||||
depth = jb["depth"].get<int>();
|
||||
if (jb.find("width") != jb.end())
|
||||
width = jb["width"].get<int>();
|
||||
if (jb.find("elevation") != jb.end())
|
||||
elevation = jb["elevation"].get<float>();
|
||||
|
||||
OgreAssert(width > 1 && depth > 1 && baseHeight > 1,
|
||||
"Bad stuff happen");
|
||||
|
||||
Ogre::Quaternion rotation = Ogre::Quaternion(
|
||||
Ogre::Degree(angle), Ogre::Vector3::UNIT_Y);
|
||||
Ogre::Vector3 offset =
|
||||
centerOrientation * rotation *
|
||||
(Ogre::Vector3::UNIT_Z * distance);
|
||||
Ogre::Vector3 worldCenterPosition =
|
||||
centerPosition + offset +
|
||||
Ogre::Vector3(0, elevation, 0);
|
||||
Ogre::Quaternion worldCenterOrientation =
|
||||
centerOrientation * rotation;
|
||||
float outOffset = 1.05f;
|
||||
if (jb.find("furniture_cells") != jb.end()) {
|
||||
for (auto &jfcell : jb["furniture_cells"]) {
|
||||
int x = jfcell["x"].get<int>();
|
||||
int y = jfcell["y"].get<int>();
|
||||
int z = jfcell["z"].get<int>();
|
||||
nlohmann::json furniture =
|
||||
jfcell["furniture"];
|
||||
Ogre::Vector3 cellOffset(
|
||||
x * 2.0f, y * 4.0f, z * 2.0f);
|
||||
Ogre::Vector3 offsetX =
|
||||
worldCenterOrientation *
|
||||
Ogre::Vector3::UNIT_X *
|
||||
(float)x * 2.0f;
|
||||
Ogre::Vector3 offsetZ =
|
||||
worldCenterOrientation *
|
||||
Ogre::Vector3::UNIT_Z *
|
||||
(float)z * 2.0f;
|
||||
Ogre::Vector3 offsetY(0, y * 4.0f, 0);
|
||||
static Ogre::String materialName1 = "";
|
||||
static Ogre::String materialName2 = "";
|
||||
int rotation = 2;
|
||||
if (jfcell.find("rotation") !=
|
||||
jfcell.end())
|
||||
rotation = jfcell["rotation"]
|
||||
.get<int>();
|
||||
|
||||
Ogre::Vector3 offset =
|
||||
worldCenterOrientation *
|
||||
Ogre::Vector3::UNIT_Z * 0.0f;
|
||||
#if 0
|
||||
Ogre::Vector3 furniturePosition =
|
||||
worldCenterPosition + offsetX +
|
||||
offsetZ + offsetY + offset;
|
||||
Ogre::Quaternion furnitureOrientation =
|
||||
worldCenterOrientation *
|
||||
Ogre::Quaternion(
|
||||
Ogre::Degree(
|
||||
90.0f *
|
||||
(float)rotation),
|
||||
Ogre::Vector3::UNIT_Y);
|
||||
#endif
|
||||
|
||||
if (furniture.find("actions") !=
|
||||
furniture.end()) {
|
||||
for (const auto &action :
|
||||
furniture["actions"]) {
|
||||
std::cout
|
||||
<< "SENSOR: "
|
||||
<< action.dump()
|
||||
<< std::endl;
|
||||
std::cout
|
||||
<< furniture
|
||||
.dump()
|
||||
<< std::endl;
|
||||
Ogre::Vector3
|
||||
actionPosition;
|
||||
actionPosition.x =
|
||||
action["position_x"]
|
||||
.get<float>();
|
||||
actionPosition.y =
|
||||
action["position_y"]
|
||||
.get<float>();
|
||||
actionPosition.z =
|
||||
action["position_z"]
|
||||
.get<float>();
|
||||
Ogre::Quaternion worldSensorRotation =
|
||||
worldCenterOrientation *
|
||||
Ogre::Quaternion(
|
||||
Ogre::Degree(
|
||||
90.0f *
|
||||
(float)rotation),
|
||||
Ogre::Vector3::
|
||||
UNIT_Y);
|
||||
Ogre::Vector3 worldSensorPosition =
|
||||
worldCenterPosition +
|
||||
offsetX +
|
||||
offsetZ +
|
||||
offsetY +
|
||||
offset +
|
||||
worldSensorRotation *
|
||||
actionPosition;
|
||||
if (ECS::get()
|
||||
.has<ActionNodeList>()) {
|
||||
ActionNodeList::ActionNode
|
||||
anode;
|
||||
anode.action =
|
||||
action["action"]
|
||||
.get<Ogre::String>();
|
||||
anode.action_text =
|
||||
action["action_text"]
|
||||
.get<Ogre::String>();
|
||||
anode.radius =
|
||||
action["radius"]
|
||||
.get<float>();
|
||||
anode.height =
|
||||
action["height"]
|
||||
.get<float>();
|
||||
anode.props =
|
||||
action;
|
||||
anode.props
|
||||
["town"] =
|
||||
e.id();
|
||||
anode.props
|
||||
["index"] =
|
||||
-1;
|
||||
anode.position =
|
||||
worldSensorPosition;
|
||||
anode.rotation =
|
||||
worldSensorRotation;
|
||||
anode.dynamic =
|
||||
false;
|
||||
ECS::get_mut<
|
||||
ActionNodeList>()
|
||||
.addNode(
|
||||
anode);
|
||||
std::cout
|
||||
<< "action: "
|
||||
<< action.dump(
|
||||
4)
|
||||
<< std::endl;
|
||||
ECS::modified<
|
||||
ActionNodeList>();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
#define __TOWN_H__
|
||||
#include <OgreMeshLodGenerator.h>
|
||||
#include <flecs.h>
|
||||
namespace Procedural {
|
||||
namespace Procedural
|
||||
{
|
||||
class TriangleBuffer;
|
||||
}
|
||||
namespace ECS
|
||||
@@ -21,6 +22,8 @@ void clampUV(flecs::entity e, Procedural::TriangleBuffer &tb,
|
||||
Ogre::MaterialPtr createTownMaterial(flecs::entity e, bool force = false);
|
||||
void createTown(flecs::entity e, Ogre::SceneNode *sceneNode,
|
||||
Ogre::StaticGeometry *geo);
|
||||
void registerTown(flecs::entity e);
|
||||
void createTownActionNodes(flecs::entity e);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -573,14 +573,13 @@ public:
|
||||
std::thread::hardware_concurrency() - 1)
|
||||
, mDebugRenderer(new DebugRenderer(scnMgr, cameraNode))
|
||||
, object_vs_broadphase_layer_filter{}
|
||||
, object_vs_object_layer_filter{}
|
||||
, object_vs_object_layer_filter{}
|
||||
, debugDraw(false)
|
||||
{
|
||||
static int instanceCount = 0;
|
||||
OgreAssert(instanceCount == 0, "Bad initialisation");
|
||||
instanceCount++;
|
||||
|
||||
|
||||
// This is the max amount of rigid bodies that you can add to the physics system. If you try to add more you'll get an error.
|
||||
// Note: This value is low because this is a simple test. For a real project use something in the order of 65536.
|
||||
const uint cMaxBodies = 65536;
|
||||
@@ -1510,14 +1509,14 @@ public:
|
||||
{
|
||||
return characterBodies.find(id) != characterBodies.end();
|
||||
}
|
||||
void destroyCharacter(JPH::Character *ch)
|
||||
void destroyCharacter(std::shared_ptr<JPH::Character> ch)
|
||||
{
|
||||
characterBodies.erase(characterBodies.find(ch->GetBodyID()));
|
||||
characters.erase(ch);
|
||||
characters.erase(ch.get());
|
||||
Ogre::SceneNode *node = id2node[ch->GetBodyID()];
|
||||
id2node.erase(ch->GetBodyID());
|
||||
node2id.erase(node);
|
||||
OGRE_DELETE ch;
|
||||
ch = nullptr;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1539,17 +1538,17 @@ JoltPhysicsWrapper::JoltPhysicsWrapper(Ogre::SceneManager *scnMgr,
|
||||
// Install trace and assert callbacks
|
||||
JPH::Trace = TraceImpl;
|
||||
JPH_IF_ENABLE_ASSERTS(JPH::AssertFailed = AssertFailedImpl;)
|
||||
|
||||
|
||||
// Create a factory, this class is responsible for creating instances of classes based on their name or hash and is mainly used for deserialization of saved data.
|
||||
// It is not directly used in this example but still required.
|
||||
JPH::Factory::sInstance = new JPH::Factory();
|
||||
// Register all physics types with the factory and install their collision handlers with the CollisionDispatch class.
|
||||
// If you have your own custom shape types you probably need to register their handlers with the CollisionDispatch before calling this function.
|
||||
// If you implement your own default material (PhysicsMaterial::sDefault) make sure to initialize it before this function or else this function will create one for you.
|
||||
// Register all physics types with the factory and install their collision handlers with the CollisionDispatch class.
|
||||
// If you have your own custom shape types you probably need to register their handlers with the CollisionDispatch before calling this function.
|
||||
// If you implement your own default material (PhysicsMaterial::sDefault) make sure to initialize it before this function or else this function will create one for you.
|
||||
JPH::RegisterTypes();
|
||||
|
||||
phys = std::make_unique<Physics>(scnMgr, cameraNode, nullptr, &contacts);
|
||||
phys = std::make_unique<Physics>(scnMgr, cameraNode, nullptr,
|
||||
&contacts);
|
||||
}
|
||||
|
||||
JoltPhysicsWrapper::~JoltPhysicsWrapper()
|
||||
@@ -1824,7 +1823,7 @@ bool JoltPhysicsWrapper::bodyIsCharacter(JPH::BodyID id) const
|
||||
return phys->bodyIsCharacter(id);
|
||||
}
|
||||
|
||||
void JoltPhysicsWrapper::destroyCharacter(JPH::Character *ch)
|
||||
void JoltPhysicsWrapper::destroyCharacter(std::shared_ptr<JPH::Character> ch)
|
||||
{
|
||||
phys->destroyCharacter(ch);
|
||||
}
|
||||
|
||||
@@ -219,6 +219,6 @@ public:
|
||||
bool raycastQuery(Ogre::Vector3 startPoint, Ogre::Vector3 endPoint,
|
||||
Ogre::Vector3 &position, JPH::BodyID &id);
|
||||
bool bodyIsCharacter(JPH::BodyID id) const;
|
||||
void destroyCharacter(JPH::Character *ch);
|
||||
void destroyCharacter(std::shared_ptr<JPH::Character> ch);
|
||||
};
|
||||
#endif
|
||||
|
||||
Reference in New Issue
Block a user