1 /*
2 Open Asset Import Library (assimp)
3 ----------------------------------------------------------------------
4
5 Copyright (c) 2006-2012, assimp team
6 All rights reserved.
7
8 Redistribution and use of this software in source and binary forms,
9 with or without modification, are permitted provided that the
10 following conditions are met:
11
12 * Redistributions of source code must retain the above
13 copyright notice, this list of conditions and the
14 following disclaimer.
15
16 * Redistributions in binary form must reproduce the above
17 copyright notice, this list of conditions and the
18 following disclaimer in the documentation and/or other
19 materials provided with the distribution.
20
21 * Neither the name of the assimp team, nor the names of its
22 contributors may be used to endorse or promote products
23 derived from this software without specific prior
24 written permission of the assimp team.
25
26 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37
38 ----------------------------------------------------------------------
39 */
40
41
42 /// @file SplitByBoneCountProcess.cpp
43 /// Implementation of the SplitByBoneCount postprocessing step
44
45 #include "AssimpPCH.h"
46
47 // internal headers of the post-processing framework
48 #include "SplitByBoneCountProcess.h"
49
50 #include <limits>
51
52 using namespace Assimp;
53
54 // ------------------------------------------------------------------------------------------------
55 // Constructor
SplitByBoneCountProcess()56 SplitByBoneCountProcess::SplitByBoneCountProcess()
57 {
58 // set default, might be overriden by importer config
59 mMaxBoneCount = AI_SBBC_DEFAULT_MAX_BONES;
60 }
61
62 // ------------------------------------------------------------------------------------------------
63 // Destructor
~SplitByBoneCountProcess()64 SplitByBoneCountProcess::~SplitByBoneCountProcess()
65 {
66 // nothing to do here
67 }
68
69 // ------------------------------------------------------------------------------------------------
70 // Returns whether the processing step is present in the given flag.
IsActive(unsigned int pFlags) const71 bool SplitByBoneCountProcess::IsActive( unsigned int pFlags) const
72 {
73 return !!(pFlags & aiProcess_SplitByBoneCount);
74 }
75
76 // ------------------------------------------------------------------------------------------------
77 // Updates internal properties
SetupProperties(const Importer * pImp)78 void SplitByBoneCountProcess::SetupProperties(const Importer* pImp)
79 {
80 mMaxBoneCount = pImp->GetPropertyInteger(AI_CONFIG_PP_SBBC_MAX_BONES,AI_SBBC_DEFAULT_MAX_BONES);
81 }
82
83 // ------------------------------------------------------------------------------------------------
84 // Executes the post processing step on the given imported data.
Execute(aiScene * pScene)85 void SplitByBoneCountProcess::Execute( aiScene* pScene)
86 {
87 DefaultLogger::get()->debug("SplitByBoneCountProcess begin");
88
89 // early out
90 bool isNecessary = false;
91 for( size_t a = 0; a < pScene->mNumMeshes; ++a)
92 if( pScene->mMeshes[a]->mNumBones > mMaxBoneCount )
93 isNecessary = true;
94
95 if( !isNecessary )
96 {
97 DefaultLogger::get()->debug( boost::str( boost::format( "SplitByBoneCountProcess early-out: no meshes with more than %d bones.") % mMaxBoneCount));
98 return;
99 }
100
101 // we need to do something. Let's go.
102 mSubMeshIndices.clear();
103 mSubMeshIndices.resize( pScene->mNumMeshes);
104
105 // build a new array of meshes for the scene
106 std::vector<aiMesh*> meshes;
107
108 for( size_t a = 0; a < pScene->mNumMeshes; ++a)
109 {
110 aiMesh* srcMesh = pScene->mMeshes[a];
111
112 std::vector<aiMesh*> newMeshes;
113 SplitMesh( pScene->mMeshes[a], newMeshes);
114
115 // mesh was split
116 if( !newMeshes.empty() )
117 {
118 // store new meshes and indices of the new meshes
119 for( size_t b = 0; b < newMeshes.size(); ++b)
120 {
121 mSubMeshIndices[a].push_back( meshes.size());
122 meshes.push_back( newMeshes[b]);
123 }
124
125 // and destroy the source mesh. It should be completely contained inside the new submeshes
126 delete srcMesh;
127 }
128 else
129 {
130 // Mesh is kept unchanged - store it's new place in the mesh array
131 mSubMeshIndices[a].push_back( meshes.size());
132 meshes.push_back( srcMesh);
133 }
134 }
135
136 // rebuild the scene's mesh array
137 pScene->mNumMeshes = meshes.size();
138 delete [] pScene->mMeshes;
139 pScene->mMeshes = new aiMesh*[pScene->mNumMeshes];
140 std::copy( meshes.begin(), meshes.end(), pScene->mMeshes);
141
142 // recurse through all nodes and translate the node's mesh indices to fit the new mesh array
143 UpdateNode( pScene->mRootNode);
144
145 DefaultLogger::get()->debug( boost::str( boost::format( "SplitByBoneCountProcess end: split %d meshes into %d submeshes.") % mSubMeshIndices.size() % meshes.size()));
146 }
147
148 // ------------------------------------------------------------------------------------------------
149 // Splits the given mesh by bone count.
SplitMesh(const aiMesh * pMesh,std::vector<aiMesh * > & poNewMeshes) const150 void SplitByBoneCountProcess::SplitMesh( const aiMesh* pMesh, std::vector<aiMesh*>& poNewMeshes) const
151 {
152 // skip if not necessary
153 if( pMesh->mNumBones <= mMaxBoneCount )
154 return;
155
156 // necessary optimisation: build a list of all affecting bones for each vertex
157 // TODO: (thom) maybe add a custom allocator here to avoid allocating tens of thousands of small arrays
158 typedef std::pair<size_t, float> BoneWeight;
159 std::vector< std::vector<BoneWeight> > vertexBones( pMesh->mNumVertices);
160 for( size_t a = 0; a < pMesh->mNumBones; ++a)
161 {
162 const aiBone* bone = pMesh->mBones[a];
163 for( size_t b = 0; b < bone->mNumWeights; ++b)
164 vertexBones[ bone->mWeights[b].mVertexId ].push_back( BoneWeight( a, bone->mWeights[b].mWeight));
165 }
166
167 size_t numFacesHandled = 0;
168 std::vector<bool> isFaceHandled( pMesh->mNumFaces, false);
169 while( numFacesHandled < pMesh->mNumFaces )
170 {
171 // which bones are used in the current submesh
172 size_t numBones = 0;
173 std::vector<bool> isBoneUsed( pMesh->mNumBones, false);
174 // indices of the faces which are going to go into this submesh
175 std::vector<size_t> subMeshFaces;
176 subMeshFaces.reserve( pMesh->mNumFaces);
177 // accumulated vertex count of all the faces in this submesh
178 size_t numSubMeshVertices = 0;
179 // a small local array of new bones for the current face. State of all used bones for that face
180 // can only be updated AFTER the face is completely analysed. Thanks to imre for the fix.
181 std::vector<size_t> newBonesAtCurrentFace;
182
183 // add faces to the new submesh as long as all bones affecting the faces' vertices fit in the limit
184 for( size_t a = 0; a < pMesh->mNumFaces; ++a)
185 {
186 // skip if the face is already stored in a submesh
187 if( isFaceHandled[a] )
188 continue;
189
190 const aiFace& face = pMesh->mFaces[a];
191 // check every vertex if its bones would still fit into the current submesh
192 for( size_t b = 0; b < face.mNumIndices; ++b )
193 {
194 const std::vector<BoneWeight>& vb = vertexBones[face.mIndices[b]];
195 for( size_t c = 0; c < vb.size(); ++c)
196 {
197 size_t boneIndex = vb[c].first;
198 // if the bone is already used in this submesh, it's ok
199 if( isBoneUsed[boneIndex] )
200 continue;
201
202 // if it's not used, yet, we would need to add it. Store its bone index
203 if( std::find( newBonesAtCurrentFace.begin(), newBonesAtCurrentFace.end(), boneIndex) == newBonesAtCurrentFace.end() )
204 newBonesAtCurrentFace.push_back( boneIndex);
205 }
206 }
207
208 // leave out the face if the new bones required for this face don't fit the bone count limit anymore
209 if( numBones + newBonesAtCurrentFace.size() > mMaxBoneCount )
210 continue;
211
212 // mark all new bones as necessary
213 while( !newBonesAtCurrentFace.empty() )
214 {
215 size_t newIndex = newBonesAtCurrentFace.back();
216 newBonesAtCurrentFace.pop_back(); // this also avoids the deallocation which comes with a clear()
217 if( isBoneUsed[newIndex] )
218 continue;
219
220 isBoneUsed[newIndex] = true;
221 numBones++;
222 }
223
224 // store the face index and the vertex count
225 subMeshFaces.push_back( a);
226 numSubMeshVertices += face.mNumIndices;
227
228 // remember that this face is handled
229 isFaceHandled[a] = true;
230 numFacesHandled++;
231 }
232
233 // create a new mesh to hold this subset of the source mesh
234 aiMesh* newMesh = new aiMesh;
235 if( pMesh->mName.length > 0 )
236 newMesh->mName.Set( boost::str( boost::format( "%s_sub%d") % pMesh->mName.data % poNewMeshes.size()));
237 newMesh->mMaterialIndex = pMesh->mMaterialIndex;
238 newMesh->mPrimitiveTypes = pMesh->mPrimitiveTypes;
239 poNewMeshes.push_back( newMesh);
240
241 // create all the arrays for this mesh if the old mesh contained them
242 newMesh->mNumVertices = numSubMeshVertices;
243 newMesh->mNumFaces = subMeshFaces.size();
244 newMesh->mVertices = new aiVector3D[newMesh->mNumVertices];
245 if( pMesh->HasNormals() )
246 newMesh->mNormals = new aiVector3D[newMesh->mNumVertices];
247 if( pMesh->HasTangentsAndBitangents() )
248 {
249 newMesh->mTangents = new aiVector3D[newMesh->mNumVertices];
250 newMesh->mBitangents = new aiVector3D[newMesh->mNumVertices];
251 }
252 for( size_t a = 0; a < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++a )
253 {
254 if( pMesh->HasTextureCoords( a) )
255 newMesh->mTextureCoords[a] = new aiVector3D[newMesh->mNumVertices];
256 newMesh->mNumUVComponents[a] = pMesh->mNumUVComponents[a];
257 }
258 for( size_t a = 0; a < AI_MAX_NUMBER_OF_COLOR_SETS; ++a )
259 {
260 if( pMesh->HasVertexColors( a) )
261 newMesh->mColors[a] = new aiColor4D[newMesh->mNumVertices];
262 }
263
264 // and copy over the data, generating faces with linear indices along the way
265 newMesh->mFaces = new aiFace[subMeshFaces.size()];
266 size_t nvi = 0; // next vertex index
267 std::vector<size_t> previousVertexIndices( numSubMeshVertices, std::numeric_limits<size_t>::max()); // per new vertex: its index in the source mesh
268 for( size_t a = 0; a < subMeshFaces.size(); ++a )
269 {
270 const aiFace& srcFace = pMesh->mFaces[subMeshFaces[a]];
271 aiFace& dstFace = newMesh->mFaces[a];
272 dstFace.mNumIndices = srcFace.mNumIndices;
273 dstFace.mIndices = new unsigned int[dstFace.mNumIndices];
274
275 // accumulate linearly all the vertices of the source face
276 for( size_t b = 0; b < dstFace.mNumIndices; ++b )
277 {
278 size_t srcIndex = srcFace.mIndices[b];
279 dstFace.mIndices[b] = nvi;
280 previousVertexIndices[nvi] = srcIndex;
281
282 newMesh->mVertices[nvi] = pMesh->mVertices[srcIndex];
283 if( pMesh->HasNormals() )
284 newMesh->mNormals[nvi] = pMesh->mNormals[srcIndex];
285 if( pMesh->HasTangentsAndBitangents() )
286 {
287 newMesh->mTangents[nvi] = pMesh->mTangents[srcIndex];
288 newMesh->mBitangents[nvi] = pMesh->mBitangents[srcIndex];
289 }
290 for( size_t c = 0; c < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++c )
291 {
292 if( pMesh->HasTextureCoords( c) )
293 newMesh->mTextureCoords[c][nvi] = pMesh->mTextureCoords[c][srcIndex];
294 }
295 for( size_t c = 0; c < AI_MAX_NUMBER_OF_COLOR_SETS; ++c )
296 {
297 if( pMesh->HasVertexColors( c) )
298 newMesh->mColors[c][nvi] = pMesh->mColors[c][srcIndex];
299 }
300
301 nvi++;
302 }
303 }
304
305 ai_assert( nvi == numSubMeshVertices );
306
307 // Create the bones for the new submesh: first create the bone array
308 newMesh->mNumBones = 0;
309 newMesh->mBones = new aiBone*[numBones];
310
311 std::vector<size_t> mappedBoneIndex( pMesh->mNumBones, std::numeric_limits<size_t>::max());
312 for( size_t a = 0; a < pMesh->mNumBones; ++a )
313 {
314 if( !isBoneUsed[a] )
315 continue;
316
317 // create the new bone
318 const aiBone* srcBone = pMesh->mBones[a];
319 aiBone* dstBone = new aiBone;
320 mappedBoneIndex[a] = newMesh->mNumBones;
321 newMesh->mBones[newMesh->mNumBones++] = dstBone;
322 dstBone->mName = srcBone->mName;
323 dstBone->mOffsetMatrix = srcBone->mOffsetMatrix;
324 dstBone->mNumWeights = 0;
325 }
326
327 ai_assert( newMesh->mNumBones == numBones );
328
329 // iterate over all new vertices and count which bones affected its old vertex in the source mesh
330 for( size_t a = 0; a < numSubMeshVertices; ++a )
331 {
332 size_t oldIndex = previousVertexIndices[a];
333 const std::vector<BoneWeight>& bonesOnThisVertex = vertexBones[oldIndex];
334
335 for( size_t b = 0; b < bonesOnThisVertex.size(); ++b )
336 {
337 size_t newBoneIndex = mappedBoneIndex[ bonesOnThisVertex[b].first ];
338 if( newBoneIndex != std::numeric_limits<size_t>::max() )
339 newMesh->mBones[newBoneIndex]->mNumWeights++;
340 }
341 }
342
343 // allocate all bone weight arrays accordingly
344 for( size_t a = 0; a < newMesh->mNumBones; ++a )
345 {
346 aiBone* bone = newMesh->mBones[a];
347 ai_assert( bone->mNumWeights > 0 );
348 bone->mWeights = new aiVertexWeight[bone->mNumWeights];
349 bone->mNumWeights = 0; // for counting up in the next step
350 }
351
352 // now copy all the bone vertex weights for all the vertices which made it into the new submesh
353 for( size_t a = 0; a < numSubMeshVertices; ++a)
354 {
355 // find the source vertex for it in the source mesh
356 size_t previousIndex = previousVertexIndices[a];
357 // these bones were affecting it
358 const std::vector<BoneWeight>& bonesOnThisVertex = vertexBones[previousIndex];
359 // all of the bones affecting it should be present in the new submesh, or else
360 // the face it comprises shouldn't be present
361 for( size_t b = 0; b < bonesOnThisVertex.size(); ++b)
362 {
363 size_t newBoneIndex = mappedBoneIndex[ bonesOnThisVertex[b].first ];
364 ai_assert( newBoneIndex != std::numeric_limits<size_t>::max() );
365 aiVertexWeight* dstWeight = newMesh->mBones[newBoneIndex]->mWeights + newMesh->mBones[newBoneIndex]->mNumWeights;
366 newMesh->mBones[newBoneIndex]->mNumWeights++;
367
368 dstWeight->mVertexId = a;
369 dstWeight->mWeight = bonesOnThisVertex[b].second;
370 }
371 }
372
373 // I have the strange feeling that this will break apart at some point in time...
374 }
375 }
376
377 // ------------------------------------------------------------------------------------------------
378 // Recursively updates the node's mesh list to account for the changed mesh list
UpdateNode(aiNode * pNode) const379 void SplitByBoneCountProcess::UpdateNode( aiNode* pNode) const
380 {
381 // rebuild the node's mesh index list
382 if( pNode->mNumMeshes > 0 )
383 {
384 std::vector<size_t> newMeshList;
385 for( size_t a = 0; a < pNode->mNumMeshes; ++a)
386 {
387 size_t srcIndex = pNode->mMeshes[a];
388 const std::vector<size_t>& replaceMeshes = mSubMeshIndices[srcIndex];
389 newMeshList.insert( newMeshList.end(), replaceMeshes.begin(), replaceMeshes.end());
390 }
391
392 delete pNode->mMeshes;
393 pNode->mNumMeshes = newMeshList.size();
394 pNode->mMeshes = new unsigned int[pNode->mNumMeshes];
395 std::copy( newMeshList.begin(), newMeshList.end(), pNode->mMeshes);
396 }
397
398 // do that also recursively for all children
399 for( size_t a = 0; a < pNode->mNumChildren; ++a )
400 {
401 UpdateNode( pNode->mChildren[a]);
402 }
403 }
404