2015-05-19 03:48:29 +00:00
/*
Open Asset Import Library ( assimp )
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2020-01-20 13:53:12 +00:00
Copyright ( c ) 2006 - 2020 , assimp team
2018-01-28 18:42:05 +00:00
2017-05-09 17:57:36 +00:00
2015-05-19 03:48:29 +00:00
All rights reserved .
2015-05-19 03:52:10 +00:00
Redistribution and use of this software in source and binary forms ,
with or without modification , are permitted provided that the
2015-05-19 03:48:29 +00:00
following conditions are met :
* Redistributions of source code must retain the above
copyright notice , this list of conditions and the
following disclaimer .
* Redistributions in binary form must reproduce the above
copyright notice , this list of conditions and the
following disclaimer in the documentation and / or other
materials provided with the distribution .
* Neither the name of the assimp team , nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team .
2015-05-19 03:52:10 +00:00
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
2015-05-19 03:48:29 +00:00
LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
2015-05-19 03:52:10 +00:00
A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT
2015-05-19 03:48:29 +00:00
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
2015-05-19 03:52:10 +00:00
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
2015-05-19 03:48:29 +00:00
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
2015-05-19 03:52:10 +00:00
DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE
2015-05-19 03:48:29 +00:00
OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
*/
2015-05-19 03:52:10 +00:00
/// @file SplitByBoneCountProcess.cpp
2015-05-19 03:48:29 +00:00
/// Implementation of the SplitByBoneCount postprocessing step
// internal headers of the post-processing framework
# include "SplitByBoneCountProcess.h"
2016-06-06 20:04:29 +00:00
# include <assimp/postprocess.h>
# include <assimp/DefaultLogger.hpp>
2015-05-19 03:48:29 +00:00
# include <limits>
2018-01-06 00:18:33 +00:00
# include <assimp/TinyFormatter.h>
2020-02-29 02:39:40 +00:00
# include <assimp/Exceptional.h>
2015-05-19 03:48:29 +00:00
using namespace Assimp ;
2016-04-05 22:03:05 +00:00
using namespace Assimp : : Formatter ;
2015-05-19 03:48:29 +00:00
// ------------------------------------------------------------------------------------------------
// Constructor
SplitByBoneCountProcess : : SplitByBoneCountProcess ( )
{
2016-12-08 02:31:51 +00:00
// set default, might be overridden by importer config
2015-05-19 03:57:13 +00:00
mMaxBoneCount = AI_SBBC_DEFAULT_MAX_BONES ;
2015-05-19 03:48:29 +00:00
}
// ------------------------------------------------------------------------------------------------
// Destructor
SplitByBoneCountProcess : : ~ SplitByBoneCountProcess ( )
{
2015-05-19 03:57:13 +00:00
// nothing to do here
2015-05-19 03:48:29 +00:00
}
// ------------------------------------------------------------------------------------------------
// Returns whether the processing step is present in the given flag.
bool SplitByBoneCountProcess : : IsActive ( unsigned int pFlags ) const
{
2015-05-19 03:57:13 +00:00
return ! ! ( pFlags & aiProcess_SplitByBoneCount ) ;
2015-05-19 03:48:29 +00:00
}
// ------------------------------------------------------------------------------------------------
// Updates internal properties
void SplitByBoneCountProcess : : SetupProperties ( const Importer * pImp )
{
2015-05-19 03:57:13 +00:00
mMaxBoneCount = pImp - > GetPropertyInteger ( AI_CONFIG_PP_SBBC_MAX_BONES , AI_SBBC_DEFAULT_MAX_BONES ) ;
2015-05-19 03:48:29 +00:00
}
// ------------------------------------------------------------------------------------------------
// Executes the post processing step on the given imported data.
void SplitByBoneCountProcess : : Execute ( aiScene * pScene )
{
2018-04-19 15:21:21 +00:00
ASSIMP_LOG_DEBUG ( " SplitByBoneCountProcess begin " ) ;
2015-05-19 03:57:13 +00:00
// early out
bool isNecessary = false ;
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < pScene - > mNumMeshes ; + + a )
2015-05-19 03:57:13 +00:00
if ( pScene - > mMeshes [ a ] - > mNumBones > mMaxBoneCount )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
isNecessary = true ;
2020-02-29 02:39:40 +00:00
break ;
}
2015-05-19 03:57:13 +00:00
if ( ! isNecessary )
{
2018-04-19 15:21:21 +00:00
ASSIMP_LOG_DEBUG ( format ( ) < < " SplitByBoneCountProcess early-out: no meshes with more than " < < mMaxBoneCount < < " bones. " ) ;
2015-05-19 03:57:13 +00:00
return ;
}
// we need to do something. Let's go.
mSubMeshIndices . clear ( ) ;
mSubMeshIndices . resize ( pScene - > mNumMeshes ) ;
// build a new array of meshes for the scene
std : : vector < aiMesh * > meshes ;
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < pScene - > mNumMeshes ; + + a )
2015-05-19 03:57:13 +00:00
{
aiMesh * srcMesh = pScene - > mMeshes [ a ] ;
std : : vector < aiMesh * > newMeshes ;
SplitMesh ( pScene - > mMeshes [ a ] , newMeshes ) ;
// mesh was split
if ( ! newMeshes . empty ( ) )
{
// store new meshes and indices of the new meshes
2016-05-05 13:41:50 +00:00
for ( unsigned int b = 0 ; b < newMeshes . size ( ) ; + + b )
2015-05-19 03:57:13 +00:00
{
2016-05-05 13:41:50 +00:00
mSubMeshIndices [ a ] . push_back ( static_cast < unsigned int > ( meshes . size ( ) ) ) ;
2015-05-19 03:57:13 +00:00
meshes . push_back ( newMeshes [ b ] ) ;
}
// and destroy the source mesh. It should be completely contained inside the new submeshes
delete srcMesh ;
}
else
{
// Mesh is kept unchanged - store it's new place in the mesh array
2016-05-05 13:41:50 +00:00
mSubMeshIndices [ a ] . push_back ( static_cast < unsigned int > ( meshes . size ( ) ) ) ;
2015-05-19 03:57:13 +00:00
meshes . push_back ( srcMesh ) ;
}
}
// rebuild the scene's mesh array
2016-05-05 13:41:50 +00:00
pScene - > mNumMeshes = static_cast < unsigned int > ( meshes . size ( ) ) ;
2015-05-19 03:57:13 +00:00
delete [ ] pScene - > mMeshes ;
pScene - > mMeshes = new aiMesh * [ pScene - > mNumMeshes ] ;
std : : copy ( meshes . begin ( ) , meshes . end ( ) , pScene - > mMeshes ) ;
// recurse through all nodes and translate the node's mesh indices to fit the new mesh array
UpdateNode ( pScene - > mRootNode ) ;
2018-04-19 15:21:21 +00:00
ASSIMP_LOG_DEBUG ( format ( ) < < " SplitByBoneCountProcess end: split " < < mSubMeshIndices . size ( ) < < " meshes into " < < meshes . size ( ) < < " submeshes. " ) ;
2015-05-19 03:48:29 +00:00
}
// ------------------------------------------------------------------------------------------------
// Splits the given mesh by bone count.
void SplitByBoneCountProcess : : SplitMesh ( const aiMesh * pMesh , std : : vector < aiMesh * > & poNewMeshes ) const
{
2015-05-19 03:57:13 +00:00
// skip if not necessary
if ( pMesh - > mNumBones < = mMaxBoneCount )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
return ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
// necessary optimisation: build a list of all affecting bones for each vertex
// TODO: (thom) maybe add a custom allocator here to avoid allocating tens of thousands of small arrays
2016-05-05 13:41:50 +00:00
typedef std : : pair < unsigned int , float > BoneWeight ;
2015-05-19 03:57:13 +00:00
std : : vector < std : : vector < BoneWeight > > vertexBones ( pMesh - > mNumVertices ) ;
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < pMesh - > mNumBones ; + + a )
2015-05-19 03:57:13 +00:00
{
const aiBone * bone = pMesh - > mBones [ a ] ;
2016-05-05 13:41:50 +00:00
for ( unsigned int b = 0 ; b < bone - > mNumWeights ; + + b )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
vertexBones [ bone - > mWeights [ b ] . mVertexId ] . push_back ( BoneWeight ( a , bone - > mWeights [ b ] . mWeight ) ) ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
}
2016-05-05 13:41:50 +00:00
unsigned int numFacesHandled = 0 ;
2015-05-19 03:57:13 +00:00
std : : vector < bool > isFaceHandled ( pMesh - > mNumFaces , false ) ;
while ( numFacesHandled < pMesh - > mNumFaces )
{
// which bones are used in the current submesh
2016-05-05 13:41:50 +00:00
unsigned int numBones = 0 ;
2015-05-19 03:57:13 +00:00
std : : vector < bool > isBoneUsed ( pMesh - > mNumBones , false ) ;
// indices of the faces which are going to go into this submesh
2016-05-05 13:41:50 +00:00
std : : vector < unsigned int > subMeshFaces ;
2015-05-19 03:57:13 +00:00
subMeshFaces . reserve ( pMesh - > mNumFaces ) ;
// accumulated vertex count of all the faces in this submesh
2016-05-05 13:41:50 +00:00
unsigned int numSubMeshVertices = 0 ;
2015-05-19 03:57:13 +00:00
// a small local array of new bones for the current face. State of all used bones for that face
// can only be updated AFTER the face is completely analysed. Thanks to imre for the fix.
2016-05-05 13:41:50 +00:00
std : : vector < unsigned int > newBonesAtCurrentFace ;
2015-05-19 03:57:13 +00:00
// add faces to the new submesh as long as all bones affecting the faces' vertices fit in the limit
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < pMesh - > mNumFaces ; + + a )
2015-05-19 03:57:13 +00:00
{
// skip if the face is already stored in a submesh
if ( isFaceHandled [ a ] )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
continue ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
const aiFace & face = pMesh - > mFaces [ a ] ;
// check every vertex if its bones would still fit into the current submesh
2016-05-05 13:41:50 +00:00
for ( unsigned int b = 0 ; b < face . mNumIndices ; + + b )
2015-05-19 03:57:13 +00:00
{
const std : : vector < BoneWeight > & vb = vertexBones [ face . mIndices [ b ] ] ;
2016-05-05 13:41:50 +00:00
for ( unsigned int c = 0 ; c < vb . size ( ) ; + + c )
2015-05-19 03:57:13 +00:00
{
2016-05-05 13:41:50 +00:00
unsigned int boneIndex = vb [ c ] . first ;
2015-05-19 03:57:13 +00:00
// if the bone is already used in this submesh, it's ok
if ( isBoneUsed [ boneIndex ] )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
continue ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
// if it's not used, yet, we would need to add it. Store its bone index
if ( std : : find ( newBonesAtCurrentFace . begin ( ) , newBonesAtCurrentFace . end ( ) , boneIndex ) = = newBonesAtCurrentFace . end ( ) )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
newBonesAtCurrentFace . push_back ( boneIndex ) ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
}
}
2020-02-29 02:39:40 +00:00
if ( newBonesAtCurrentFace . size ( ) > mMaxBoneCount )
2020-02-29 03:17:53 +00:00
{
2020-02-29 02:39:40 +00:00
throw DeadlyImportError ( " SplitByBoneCountProcess: Single face requires more bones than specified max bone count! " ) ;
}
2015-05-19 03:57:13 +00:00
// leave out the face if the new bones required for this face don't fit the bone count limit anymore
if ( numBones + newBonesAtCurrentFace . size ( ) > mMaxBoneCount )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
continue ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
// mark all new bones as necessary
while ( ! newBonesAtCurrentFace . empty ( ) )
{
2016-05-05 13:41:50 +00:00
unsigned int newIndex = newBonesAtCurrentFace . back ( ) ;
2015-05-19 03:57:13 +00:00
newBonesAtCurrentFace . pop_back ( ) ; // this also avoids the deallocation which comes with a clear()
if ( isBoneUsed [ newIndex ] )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
continue ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
isBoneUsed [ newIndex ] = true ;
numBones + + ;
}
// store the face index and the vertex count
subMeshFaces . push_back ( a ) ;
numSubMeshVertices + = face . mNumIndices ;
// remember that this face is handled
isFaceHandled [ a ] = true ;
numFacesHandled + + ;
}
// create a new mesh to hold this subset of the source mesh
aiMesh * newMesh = new aiMesh ;
if ( pMesh - > mName . length > 0 )
2020-02-29 02:39:40 +00:00
{
2016-04-05 22:03:05 +00:00
newMesh - > mName . Set ( format ( ) < < pMesh - > mName . data < < " _sub " < < poNewMeshes . size ( ) ) ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
newMesh - > mMaterialIndex = pMesh - > mMaterialIndex ;
newMesh - > mPrimitiveTypes = pMesh - > mPrimitiveTypes ;
poNewMeshes . push_back ( newMesh ) ;
// create all the arrays for this mesh if the old mesh contained them
newMesh - > mNumVertices = numSubMeshVertices ;
2016-05-05 13:41:50 +00:00
newMesh - > mNumFaces = static_cast < unsigned int > ( subMeshFaces . size ( ) ) ;
2015-05-19 03:57:13 +00:00
newMesh - > mVertices = new aiVector3D [ newMesh - > mNumVertices ] ;
if ( pMesh - > HasNormals ( ) )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
newMesh - > mNormals = new aiVector3D [ newMesh - > mNumVertices ] ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
if ( pMesh - > HasTangentsAndBitangents ( ) )
{
newMesh - > mTangents = new aiVector3D [ newMesh - > mNumVertices ] ;
newMesh - > mBitangents = new aiVector3D [ newMesh - > mNumVertices ] ;
}
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < AI_MAX_NUMBER_OF_TEXTURECOORDS ; + + a )
2015-05-19 03:57:13 +00:00
{
if ( pMesh - > HasTextureCoords ( a ) )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
newMesh - > mTextureCoords [ a ] = new aiVector3D [ newMesh - > mNumVertices ] ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
newMesh - > mNumUVComponents [ a ] = pMesh - > mNumUVComponents [ a ] ;
}
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < AI_MAX_NUMBER_OF_COLOR_SETS ; + + a )
2015-05-19 03:57:13 +00:00
{
if ( pMesh - > HasVertexColors ( a ) )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
newMesh - > mColors [ a ] = new aiColor4D [ newMesh - > mNumVertices ] ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
}
// and copy over the data, generating faces with linear indices along the way
newMesh - > mFaces = new aiFace [ subMeshFaces . size ( ) ] ;
2016-05-05 13:41:50 +00:00
unsigned int nvi = 0 ; // next vertex index
std : : vector < unsigned int > previousVertexIndices ( numSubMeshVertices , std : : numeric_limits < unsigned int > : : max ( ) ) ; // per new vertex: its index in the source mesh
for ( unsigned int a = 0 ; a < subMeshFaces . size ( ) ; + + a )
2015-05-19 03:57:13 +00:00
{
const aiFace & srcFace = pMesh - > mFaces [ subMeshFaces [ a ] ] ;
aiFace & dstFace = newMesh - > mFaces [ a ] ;
dstFace . mNumIndices = srcFace . mNumIndices ;
dstFace . mIndices = new unsigned int [ dstFace . mNumIndices ] ;
// accumulate linearly all the vertices of the source face
2016-05-05 13:41:50 +00:00
for ( unsigned int b = 0 ; b < dstFace . mNumIndices ; + + b )
2015-05-19 03:57:13 +00:00
{
2016-05-05 13:41:50 +00:00
unsigned int srcIndex = srcFace . mIndices [ b ] ;
2015-05-19 03:57:13 +00:00
dstFace . mIndices [ b ] = nvi ;
previousVertexIndices [ nvi ] = srcIndex ;
newMesh - > mVertices [ nvi ] = pMesh - > mVertices [ srcIndex ] ;
if ( pMesh - > HasNormals ( ) )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
newMesh - > mNormals [ nvi ] = pMesh - > mNormals [ srcIndex ] ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
if ( pMesh - > HasTangentsAndBitangents ( ) )
{
newMesh - > mTangents [ nvi ] = pMesh - > mTangents [ srcIndex ] ;
newMesh - > mBitangents [ nvi ] = pMesh - > mBitangents [ srcIndex ] ;
}
2016-05-05 13:41:50 +00:00
for ( unsigned int c = 0 ; c < AI_MAX_NUMBER_OF_TEXTURECOORDS ; + + c )
2015-05-19 03:57:13 +00:00
{
if ( pMesh - > HasTextureCoords ( c ) )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
newMesh - > mTextureCoords [ c ] [ nvi ] = pMesh - > mTextureCoords [ c ] [ srcIndex ] ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
}
2016-05-05 13:41:50 +00:00
for ( unsigned int c = 0 ; c < AI_MAX_NUMBER_OF_COLOR_SETS ; + + c )
2015-05-19 03:57:13 +00:00
{
if ( pMesh - > HasVertexColors ( c ) )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
newMesh - > mColors [ c ] [ nvi ] = pMesh - > mColors [ c ] [ srcIndex ] ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
}
nvi + + ;
}
}
ai_assert ( nvi = = numSubMeshVertices ) ;
// Create the bones for the new submesh: first create the bone array
newMesh - > mNumBones = 0 ;
newMesh - > mBones = new aiBone * [ numBones ] ;
2016-05-05 13:41:50 +00:00
std : : vector < unsigned int > mappedBoneIndex ( pMesh - > mNumBones , std : : numeric_limits < unsigned int > : : max ( ) ) ;
for ( unsigned int a = 0 ; a < pMesh - > mNumBones ; + + a )
2015-05-19 03:57:13 +00:00
{
if ( ! isBoneUsed [ a ] )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
continue ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
// create the new bone
const aiBone * srcBone = pMesh - > mBones [ a ] ;
aiBone * dstBone = new aiBone ;
mappedBoneIndex [ a ] = newMesh - > mNumBones ;
newMesh - > mBones [ newMesh - > mNumBones + + ] = dstBone ;
dstBone - > mName = srcBone - > mName ;
dstBone - > mOffsetMatrix = srcBone - > mOffsetMatrix ;
dstBone - > mNumWeights = 0 ;
}
ai_assert ( newMesh - > mNumBones = = numBones ) ;
// iterate over all new vertices and count which bones affected its old vertex in the source mesh
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < numSubMeshVertices ; + + a )
2015-05-19 03:57:13 +00:00
{
2016-05-05 13:41:50 +00:00
unsigned int oldIndex = previousVertexIndices [ a ] ;
2015-05-19 03:57:13 +00:00
const std : : vector < BoneWeight > & bonesOnThisVertex = vertexBones [ oldIndex ] ;
2016-05-05 13:41:50 +00:00
for ( unsigned int b = 0 ; b < bonesOnThisVertex . size ( ) ; + + b )
2015-05-19 03:57:13 +00:00
{
2016-05-05 13:41:50 +00:00
unsigned int newBoneIndex = mappedBoneIndex [ bonesOnThisVertex [ b ] . first ] ;
if ( newBoneIndex ! = std : : numeric_limits < unsigned int > : : max ( ) )
2020-02-29 02:39:40 +00:00
{
2015-05-19 03:57:13 +00:00
newMesh - > mBones [ newBoneIndex ] - > mNumWeights + + ;
2020-02-29 02:39:40 +00:00
}
2015-05-19 03:57:13 +00:00
}
}
// allocate all bone weight arrays accordingly
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < newMesh - > mNumBones ; + + a )
2015-05-19 03:57:13 +00:00
{
aiBone * bone = newMesh - > mBones [ a ] ;
ai_assert ( bone - > mNumWeights > 0 ) ;
bone - > mWeights = new aiVertexWeight [ bone - > mNumWeights ] ;
bone - > mNumWeights = 0 ; // for counting up in the next step
}
// now copy all the bone vertex weights for all the vertices which made it into the new submesh
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < numSubMeshVertices ; + + a )
2015-05-19 03:57:13 +00:00
{
// find the source vertex for it in the source mesh
2016-05-05 13:41:50 +00:00
unsigned int previousIndex = previousVertexIndices [ a ] ;
2015-05-19 03:57:13 +00:00
// these bones were affecting it
const std : : vector < BoneWeight > & bonesOnThisVertex = vertexBones [ previousIndex ] ;
// all of the bones affecting it should be present in the new submesh, or else
// the face it comprises shouldn't be present
2016-05-05 13:41:50 +00:00
for ( unsigned int b = 0 ; b < bonesOnThisVertex . size ( ) ; + + b )
2015-05-19 03:57:13 +00:00
{
2016-05-05 13:41:50 +00:00
unsigned int newBoneIndex = mappedBoneIndex [ bonesOnThisVertex [ b ] . first ] ;
ai_assert ( newBoneIndex ! = std : : numeric_limits < unsigned int > : : max ( ) ) ;
2015-05-19 03:57:13 +00:00
aiVertexWeight * dstWeight = newMesh - > mBones [ newBoneIndex ] - > mWeights + newMesh - > mBones [ newBoneIndex ] - > mNumWeights ;
newMesh - > mBones [ newBoneIndex ] - > mNumWeights + + ;
dstWeight - > mVertexId = a ;
dstWeight - > mWeight = bonesOnThisVertex [ b ] . second ;
}
}
// I have the strange feeling that this will break apart at some point in time...
}
2015-05-19 03:48:29 +00:00
}
// ------------------------------------------------------------------------------------------------
// Recursively updates the node's mesh list to account for the changed mesh list
void SplitByBoneCountProcess : : UpdateNode ( aiNode * pNode ) const
{
2015-05-19 03:57:13 +00:00
// rebuild the node's mesh index list
if ( pNode - > mNumMeshes > 0 )
{
2016-05-05 13:41:50 +00:00
std : : vector < unsigned int > newMeshList ;
for ( unsigned int a = 0 ; a < pNode - > mNumMeshes ; + + a )
2015-05-19 03:57:13 +00:00
{
2016-05-05 13:41:50 +00:00
unsigned int srcIndex = pNode - > mMeshes [ a ] ;
const std : : vector < unsigned int > & replaceMeshes = mSubMeshIndices [ srcIndex ] ;
2015-05-19 03:57:13 +00:00
newMeshList . insert ( newMeshList . end ( ) , replaceMeshes . begin ( ) , replaceMeshes . end ( ) ) ;
}
2018-08-16 20:56:35 +00:00
delete [ ] pNode - > mMeshes ;
2016-05-05 13:41:50 +00:00
pNode - > mNumMeshes = static_cast < unsigned int > ( newMeshList . size ( ) ) ;
2015-05-19 03:57:13 +00:00
pNode - > mMeshes = new unsigned int [ pNode - > mNumMeshes ] ;
std : : copy ( newMeshList . begin ( ) , newMeshList . end ( ) , pNode - > mMeshes ) ;
}
// do that also recursively for all children
2016-05-05 13:41:50 +00:00
for ( unsigned int a = 0 ; a < pNode - > mNumChildren ; + + a )
2015-05-19 03:57:13 +00:00
{
UpdateNode ( pNode - > mChildren [ a ] ) ;
}
2015-05-19 03:48:29 +00:00
}