From d9bdbd6020285b81adf98e23c205993e71af6e3f Mon Sep 17 00:00:00 2001 From: Bryan Galdrikian Date: Mon, 10 Jun 2019 14:37:48 -0700 Subject: Hierarchy optimization bugfixes and improvements Takes a selection set and only merges those chunks --- .../include/NvBlastExtAuthoringFractureTool.h | 9 +- .../source/NvBlastExtAuthoringFractureToolImpl.cpp | 104 ++++++++++++--------- .../source/NvBlastExtAuthoringFractureToolImpl.h | 4 +- 3 files changed, 71 insertions(+), 46 deletions(-) (limited to 'sdk/extensions') diff --git a/sdk/extensions/authoring/include/NvBlastExtAuthoringFractureTool.h b/sdk/extensions/authoring/include/NvBlastExtAuthoringFractureTool.h index b11851e..8f9e3bb 100755 --- a/sdk/extensions/authoring/include/NvBlastExtAuthoringFractureTool.h +++ b/sdk/extensions/authoring/include/NvBlastExtAuthoringFractureTool.h @@ -497,14 +497,17 @@ class FractureTool Optimize chunk hierarhy for better runtime performance. It tries to unite chunks to groups of some size in order to transform flat hierarchy (all chunks are children of single root) to tree like hieracrhy with limited number of children for each chunk. - \param[in] maxAtLevel If number of children of some chunk less then maxAtLevel then it would be considered as already + \param[in] threshold If number of children of some chunk less then maxAtLevel then it would be considered as already optimized and skipped. - \param[in] maxGroupSize Max number of children for processed chunks. \param[in] removeOriginalChunks. + \param[in] targetClusterSize Target number of children for processed chunks. + \param[in] chunksToMerge Which chunks are merge candidate. If NULL, all chunks will be a merge candidate. + \param[in] mergeChunkCount size of chunksToMerge array, if chunksToMerge != NULL. \param[in] adjChunks Optional index pairs to describe chunk adjacency. May be NULL. \param[in] adjChunksSize If 'adjChunks' is not NULL, the number of index pairs in the adjChunks array. \param[in] removeOriginalChunks If true, original chunks that are merged are removed. */ - virtual void uniteChunks(uint32_t maxAtLevel, uint32_t maxGroupSize, + virtual void uniteChunks(uint32_t threshold, uint32_t targetClusterSize, + const uint32_t* chunksToMerge, uint32_t mergeChunkCount, const NvcVec2i* adjChunks, uint32_t adjChunksSize, bool removeOriginalChunks = false) = 0; diff --git a/sdk/extensions/authoring/source/NvBlastExtAuthoringFractureToolImpl.cpp b/sdk/extensions/authoring/source/NvBlastExtAuthoringFractureToolImpl.cpp index f8d9a2d..2354474 100755 --- a/sdk/extensions/authoring/source/NvBlastExtAuthoringFractureToolImpl.cpp +++ b/sdk/extensions/authoring/source/NvBlastExtAuthoringFractureToolImpl.cpp @@ -2402,51 +2402,70 @@ bool VecIntComp(const std::pair& a, const std::pair depth(mChunkData.size(), 0); std::vector > chunkGraph(mChunkData.size()); - std::vector atEachDepth; std::vector childNumber(mChunkData.size(), 0); + std::vector chunksToRemove; - for (uint32_t i = 0; i < mChunkData.size(); ++i) + enum ChunkFlags + { + Mergeable = (1 << 0), + Merged = (1 << 1) + }; + + std::vector chunkFlags(mChunkData.size()); + + if (chunksToMerge == nullptr) + { + std::fill(chunkFlags.begin(), chunkFlags.end(), Mergeable); + } + else + { + // Seed all mergeable chunks with Mergeable flag + for (uint32_t chunkN = 0; chunkN < mergeChunkCount; ++chunkN) + { + const uint32_t chunkIndex = chunksToMerge[chunkN]; + chunkFlags[chunkIndex] |= Mergeable; + } + + // Make all descendants mergable too + std::vector treeWalk; + for (uint32_t chunkIndex = 0; chunkIndex < mChunkData.size(); ++chunkIndex) + { + treeWalk.clear(); + int32_t walkIndex = (int32_t)chunkIndex; + do + { + if (chunkFlags[walkIndex] & Mergeable) + { + std::for_each(treeWalk.begin(), treeWalk.end(), [&chunkFlags](int32_t index) {chunkFlags[index] |= Mergeable; }); + break; + } + treeWalk.push_back(walkIndex); + } while ((walkIndex = mChunkData[walkIndex].parent) >= 0); + } + } + + int32_t maxDepth = 0; + + for (uint32_t i = 0; i < mChunkData.size(); ++i) { if (mChunkData[i].parent != -1) childNumber[getChunkIndex(mChunkData[i].parent)]++; depth[i] = getChunkDepth(mChunkData[i].chunkId); NVBLAST_ASSERT(depth[i] >= 0); - if (depth[i] >= 0) - { - if ((size_t)depth[i] >= atEachDepth.size()) - { - atEachDepth.resize(depth[i]+1, 0); - } - atEachDepth[depth[i]]++; - } + maxDepth = std::max(maxDepth, depth[i]); } - std::vector chunksToRemove; - - std::vector chunkFlags(mChunkData.size(), 0); - - enum ChunkFlags - { - ChunkUsage = (1 << 0), - MergedChunk = (1 << 1) - }; - - for (int32_t level = (int32_t)atEachDepth.size(); level--;) // go from leaves to trunk and rebuild hierarchy + for (int32_t level = maxDepth; level > 0; --level) // go from leaves to trunk and rebuild hierarchy { - if (atEachDepth[level] < maxChunksAtLevel) - continue; - std::vector cGroup; std::vector chunksToUnify; @@ -2455,7 +2474,7 @@ void FractureToolImpl::uniteChunks(uint32_t maxChunksAtLevel, uint32_t maxGroup, for (uint32_t ch = 0; ch < depth.size(); ++ch) { - if (depth[ch] == level && childNumber[getChunkIndex(mChunkData[ch].parent)] > maxChunksAtLevel) + if (depth[ch] == level && childNumber[getChunkIndex(mChunkData[ch].parent)] > threshold && (chunkFlags[ch] & Mergeable) != 0) { chunksToUnify.push_back(ch); NvcVec3 cp = fromPxShared(toPxShared(mChunkData[ch].meshData->getBoundingBox()).getCenter()); @@ -2480,42 +2499,43 @@ void FractureToolImpl::uniteChunks(uint32_t maxChunksAtLevel, uint32_t maxGroup, } rebuildAdjGraph(chunksToUnify, adjChunks, adjChunksSize, chunkGraph); - - for (uint32_t iter = 0; iter < 32 && chunksToUnify.size() > maxChunksAtLevel; ++iter) + for (uint32_t iter = 0; iter < 32 && chunksToUnify.size() > threshold; ++iter) { std::vector newChunksToUnify; for (uint32_t c = 0; c < chunksToUnify.size(); ++c) { - if (chunkFlags[chunksToUnify[c]] & ChunkUsage) + if ((chunkFlags[chunksToUnify[c]] & Mergeable) == 0) continue; - - chunkFlags[chunksToUnify[c]] |= ChunkUsage; + chunkFlags[chunksToUnify[c]] &= ~Mergeable; cGroup.push_back(chunksToUnify[c]); - for (uint32_t sc = 0; sc < cGroup.size() && cGroup.size() < maxGroup; ++sc) + for (uint32_t sc = 0; sc < cGroup.size() && cGroup.size() < targetClusterSize; ++sc) { uint32_t sid = cGroup[sc]; - for (uint32_t neighb = 0; neighb < chunkGraph[sid].size() && cGroup.size() < maxGroup; ++neighb) + for (uint32_t neighbN = 0; neighbN < chunkGraph[sid].size() && cGroup.size() < targetClusterSize; ++neighbN) { - if (chunkFlags[chunkGraph[sid][neighb]] & ChunkUsage) + const uint32_t chunkNeighb = chunkGraph[sid][neighbN]; + if (mChunkData[chunkNeighb].parent != mChunkData[sid].parent) + continue; + if ((chunkFlags[chunkNeighb] & Mergeable) == 0) continue; - cGroup.push_back(chunkGraph[sid][neighb]); - chunkFlags[chunkGraph[sid][neighb]] |= ChunkUsage; - } + chunkFlags[chunkNeighb] &= ~Mergeable; + cGroup.push_back(chunkNeighb); + } } if (cGroup.size() > 1) { uint32_t newChunk = stretchGroup(cGroup, chunkGraph); for (uint32_t chunk : cGroup) { - if (removeOriginalChunks && !(chunkFlags[chunk] & MergedChunk)) + if (removeOriginalChunks && !(chunkFlags[chunk] & Merged)) { chunksToRemove.push_back(chunk); } } cGroup.clear(); newChunksToUnify.push_back(newChunk); - chunkFlags.push_back(MergedChunk); + chunkFlags.push_back(Merged); } else { diff --git a/sdk/extensions/authoring/source/NvBlastExtAuthoringFractureToolImpl.h b/sdk/extensions/authoring/source/NvBlastExtAuthoringFractureToolImpl.h index fb3ba37..a8c59dc 100755 --- a/sdk/extensions/authoring/source/NvBlastExtAuthoringFractureToolImpl.h +++ b/sdk/extensions/authoring/source/NvBlastExtAuthoringFractureToolImpl.h @@ -375,7 +375,9 @@ public: bool deleteAllChildrenOfChunk(int32_t chunkId) override; - void uniteChunks(uint32_t maxAtLevel, uint32_t maxGroupSize, const NvcVec2i* adjChunks, uint32_t adjChunksSize, + void uniteChunks(uint32_t threshold, uint32_t targetClusterSize, + const uint32_t* chunksToMerge, uint32_t mergeChunkCount, + const NvcVec2i* adjChunks, uint32_t adjChunksSize, bool removeOriginalChunks = false) override; -- cgit v1.2.3