Merge branch 'master' into gltf2_sparseAccessor_import

pull/3219/head
Kim Kulling 2020-05-25 16:11:01 +02:00 committed by GitHub
commit 78c9ac2889
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
52 changed files with 288 additions and 193 deletions

View File

@ -539,7 +539,7 @@ void Discreet3DSImporter::AddNodeToGraph(aiScene *pcSOut, aiNode *pcOut,
ai_assert(nullptr != anim);
if (pcIn->aCameraRollKeys.size() > 1) {
ASSIMP_LOG_DEBUG("3DS: Converting camera roll track ...");
ASSIMP_LOG_VERBOSE_DEBUG("3DS: Converting camera roll track ...");
// Camera roll keys - in fact they're just rotations
// around the camera's z axis. The angles are given
@ -558,7 +558,7 @@ void Discreet3DSImporter::AddNodeToGraph(aiScene *pcSOut, aiNode *pcOut,
#if 0
if (pcIn->aTargetPositionKeys.size() > 1)
{
ASSIMP_LOG_DEBUG("3DS: Converting target track ...");
ASSIMP_LOG_VERBOSE_DEBUG("3DS: Converting target track ...");
// Camera or spot light - need to convert the separate
// target position channel to our representation

View File

@ -147,7 +147,7 @@ D3MFOpcPackage::D3MFOpcPackage(IOSystem* pIOHandler, const std::string& rFile)
}
}
ASSIMP_LOG_DEBUG(rootFile);
ASSIMP_LOG_VERBOSE_DEBUG(rootFile);
mZipArchive->Close(fileStream);

View File

@ -207,7 +207,7 @@ void AC3DImporter::LoadObjectSection(std::vector<Object> &objects) {
light->mName.length = ::ai_snprintf(light->mName.data, MAXLEN, "ACLight_%i", static_cast<unsigned int>(mLights->size()) - 1);
obj.name = std::string(light->mName.data);
ASSIMP_LOG_DEBUG("AC3D: Light source encountered");
ASSIMP_LOG_VERBOSE_DEBUG("AC3D: Light source encountered");
obj.type = Object::Light;
} else if (!ASSIMP_strincmp(buffer, "group", 5)) {
obj.type = Object::Group;
@ -294,7 +294,7 @@ void AC3DImporter::LoadObjectSection(std::vector<Object> &objects) {
// example writes no surf chunks
if (!Q3DWorkAround) {
ASSIMP_LOG_WARN("AC3D: SURF token was expected");
ASSIMP_LOG_DEBUG("Continuing with Quick3D Workaround enabled");
ASSIMP_LOG_VERBOSE_DEBUG("Continuing with Quick3D Workaround enabled");
}
--buffer; // make sure the line is processed a second time
// break; --- see fix notes above

View File

@ -614,7 +614,7 @@ void ASEImporter::AddNodes(const std::vector<BaseNode *> &nodes,
node->mNumChildren++;
// What we did is so great, it is at least worth a debug message
ASSIMP_LOG_DEBUG("ASE: Generating separate target node (" + snode->mName + ")");
ASSIMP_LOG_VERBOSE_DEBUG("ASE: Generating separate target node (" + snode->mName + ")");
}
}

View File

@ -493,37 +493,30 @@ void BVHLoader::CreateAnimation(aiScene *pScene) {
for (unsigned int fr = 0; fr < mAnimNumFrames; ++fr) {
aiMatrix4x4 temp;
aiMatrix3x3 rotMatrix;
for (BVHLoader::ChannelType channel = Channel_RotationX; channel <= Channel_RotationZ; channel = (BVHLoader::ChannelType)(channel + 1)) {
//Find channel in node
std::map<BVHLoader::ChannelType, int>::iterator mapIter = channelMap.find(channel);
if (mapIter == channelMap.end())
throw DeadlyImportError("Missing rotation channel in node " + nodeName);
else {
int channelIdx = mapIter->second;
// translate ZXY euler angels into a quaternion
for (unsigned int channelIdx = 0; channelIdx < node.mChannels.size(); ++ channelIdx) {
switch (node.mChannels[channelIdx]) {
case Channel_RotationX:
{
const float angle = node.mChannelValues[fr * node.mChannels.size() + channelIdx] * float(AI_MATH_PI) / 180.0f;
// Compute rotation transformations in the right order
switch (channel) {
case Channel_RotationX:
aiMatrix4x4::RotationX(angle, temp);
rotMatrix *= aiMatrix3x3(temp);
break;
case Channel_RotationY:
aiMatrix4x4::RotationY(angle, temp);
rotMatrix *= aiMatrix3x3(temp);
break;
case Channel_RotationZ:
aiMatrix4x4::RotationZ(angle, temp);
rotMatrix *= aiMatrix3x3(temp);
break;
default:
break;
aiMatrix4x4::RotationX( angle, temp); rotMatrix *= aiMatrix3x3( temp);
}
}
}
break;
case Channel_RotationY:
{
const float angle = node.mChannelValues[fr * node.mChannels.size() + channelIdx] * float(AI_MATH_PI) / 180.0f;
aiMatrix4x4::RotationY( angle, temp); rotMatrix *= aiMatrix3x3( temp);
}
break;
case Channel_RotationZ:
{
const float angle = node.mChannelValues[fr * node.mChannels.size() + channelIdx] * float(AI_MATH_PI) / 180.0f;
aiMatrix4x4::RotationZ( angle, temp); rotMatrix *= aiMatrix3x3( temp);
}
break;
default:
break;
}
}
rotkey->mTime = double(fr);
rotkey->mValue = aiQuaternion(rotMatrix);
++rotkey;

View File

@ -353,7 +353,7 @@ void SectionParser ::Next() {
}
#ifdef ASSIMP_BUILD_BLENDER_DEBUG
ASSIMP_LOG_DEBUG(current.id);
ASSIMP_LOG_VERBOSE_DEBUG(current.id);
#endif
}

View File

@ -290,7 +290,7 @@ aiNode *COBImporter::BuildNodes(const Node &root, const Scene &scin, aiScene *fi
}
std::unique_ptr<const Material> defmat;
if (!min) {
ASSIMP_LOG_DEBUG(format() << "Could not resolve material index "
ASSIMP_LOG_VERBOSE_DEBUG(format() << "Could not resolve material index "
<< reflist.first << " - creating default material for this slot");
defmat.reset(min = new Material());

View File

@ -284,7 +284,7 @@ void ColladaParser::ReadContents() {
ReadStructure();
} else {
ASSIMP_LOG_DEBUG_F("Ignoring global element <", mReader->getNodeName(), ">.");
ASSIMP_LOG_VERBOSE_DEBUG_F("Ignoring global element <", mReader->getNodeName(), ">.");
SkipElement();
}
} else {

View File

@ -135,7 +135,7 @@ public:
for(;splitter->length() && splitter->at(0) != '}'; splitter++, cnt++);
splitter++;
ASSIMP_LOG_DEBUG((Formatter::format("DXF: skipped over control group ("),cnt," lines)"));
ASSIMP_LOG_VERBOSE_DEBUG((Formatter::format("DXF: skipped over control group ("),cnt," lines)"));
}
} catch(std::logic_error&) {
ai_assert(!splitter);

View File

@ -241,7 +241,7 @@ void DXFImporter::ConvertMeshes(aiScene* pScene, DXF::FileData& output) {
}
}
ASSIMP_LOG_DEBUG_F("DXF: Unexpanded polycount is ", icount, ", vertex count is ", vcount);
ASSIMP_LOG_VERBOSE_DEBUG_F("DXF: Unexpanded polycount is ", icount, ", vertex count is ", vcount);
}
if (! output.blocks.size() ) {
@ -473,7 +473,7 @@ void DXFImporter::ParseBlocks(DXF::LineReader& reader, DXF::FileData& output) {
++reader;
}
ASSIMP_LOG_DEBUG_F("DXF: got ", output.blocks.size()," entries in BLOCKS" );
ASSIMP_LOG_VERBOSE_DEBUG_F("DXF: got ", output.blocks.size()," entries in BLOCKS" );
}
// ------------------------------------------------------------------------------------------------
@ -549,7 +549,7 @@ void DXFImporter::ParseEntities(DXF::LineReader& reader, DXF::FileData& output)
++reader;
}
ASSIMP_LOG_DEBUG_F( "DXF: got ", block.lines.size()," polylines and ", block.insertions.size(),
ASSIMP_LOG_VERBOSE_DEBUG_F( "DXF: got ", block.lines.size()," polylines and ", block.insertions.size(),
" inserted blocks in ENTITIES" );
}

View File

@ -53,6 +53,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <stdint.h>
#include <assimp/Exceptional.h>
#include <assimp/ByteSwapper.h>
#include <assimp/DefaultLogger.hpp>
namespace Assimp {
namespace FBX {
@ -426,7 +427,8 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
{
ai_assert(input);
ai_assert(input);
ASSIMP_LOG_DEBUG("Tokenizing binary FBX file");
if(length < 0x1b) {
TokenizeError("file is too short",0);
@ -451,6 +453,7 @@ void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
/*Result ignored*/ ReadByte(input, cursor, input + length);
/*Result ignored*/ ReadByte(input, cursor, input + length);
const uint32_t version = ReadWord(input, cursor, input + length);
ASSIMP_LOG_DEBUG_F("FBX version: ", version);
const bool is64bits = version >= 7500;
const char *end = input + length;
while (cursor < end ) {

View File

@ -105,7 +105,7 @@ FBXConverter::FBXConverter(aiScene *out, const Document &doc, bool removeEmptyBo
// The idea here is to traverse all objects to find these Textures and convert them,
// so later during material conversion it will find converted texture in the textures_converted array.
if (doc.Settings().readTextures) {
ConvertOrphantEmbeddedTextures();
ConvertOrphanedEmbeddedTextures();
}
ConvertRootNode();
@ -1542,10 +1542,10 @@ void FBXConverter::ConvertCluster(std::vector<aiBone *> &local_mesh_bones, const
aiBone *bone = nullptr;
if (bone_map.count(deformer_name)) {
ASSIMP_LOG_DEBUG_F("retrieved bone from lookup ", bone_name.C_Str(), ". Deformer:", deformer_name);
ASSIMP_LOG_VERBOSE_DEBUG_F("retrieved bone from lookup ", bone_name.C_Str(), ". Deformer:", deformer_name);
bone = bone_map[deformer_name];
} else {
ASSIMP_LOG_DEBUG_F("created new bone ", bone_name.C_Str(), ". Deformer: ", deformer_name);
ASSIMP_LOG_VERBOSE_DEBUG_F("created new bone ", bone_name.C_Str(), ". Deformer: ", deformer_name);
bone = new aiBone();
bone->mName = bone_name;
@ -2719,7 +2719,7 @@ void FBXConverter::GenerateNodeAnimations(std::vector<aiNodeAnim *> &node_anims,
if (doc.Settings().optimizeEmptyAnimationCurves &&
IsRedundantAnimationData(target, comp, (chain[i]->second))) {
FBXImporter::LogDebug("dropping redundant animation channel for node " + target.Name());
FBXImporter::LogVerboseDebug("dropping redundant animation channel for node " + target.Name());
continue;
}
@ -3467,7 +3467,7 @@ void FBXConverter::TransferDataToScene() {
}
}
void FBXConverter::ConvertOrphantEmbeddedTextures() {
void FBXConverter::ConvertOrphanedEmbeddedTextures() {
// in C++14 it could be:
// for (auto&& [id, object] : objects)
for (auto &&id_and_object : doc.Objects()) {

View File

@ -412,7 +412,7 @@ private:
// ------------------------------------------------------------------------------------------------
// FBX file could have embedded textures not connected to anything
void ConvertOrphantEmbeddedTextures();
void ConvertOrphanedEmbeddedTextures();
private:
// 0: not assigned yet, others: index is value - 1

View File

@ -55,6 +55,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "FBXDocumentUtil.h"
#include "FBXProperties.h"
#include <assimp/DefaultLogger.hpp>
#include <memory>
#include <functional>
#include <map>
@ -264,6 +266,8 @@ Document::Document(const Parser& parser, const ImportSettings& settings)
: settings(settings)
, parser(parser)
{
ASSIMP_LOG_DEBUG("Creating FBX Document");
// Cannot use array default initialization syntax because vc8 fails on it
for (auto &timeStamp : creationTimeStamp) {
timeStamp = 0;
@ -308,6 +312,7 @@ void Document::ReadHeader() {
const Scope& shead = *ehead->Compound();
fbxVersion = ParseTokenAsInt(GetRequiredToken(GetRequiredElement(shead,"FBXVersion",ehead),0));
ASSIMP_LOG_DEBUG_F("FBX Version: ", fbxVersion);
// While we may have some success with newer files, we don't support
// the older 6.n fbx format

View File

@ -146,6 +146,8 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
ThrowException("Could not open file for reading");
}
ASSIMP_LOG_DEBUG("Reading FBX file");
// read entire file into memory - no streaming for this, fbx
// files can grow large, but the assimp output data structure
// then becomes very large, too. Assimp doesn't support

View File

@ -383,7 +383,7 @@ Video::Video(uint64_t id, const Element& element, const Document& doc, const std
} catch (const runtime_error& runtimeError)
{
//we don't need the content data for contents that has already been loaded
ASSIMP_LOG_DEBUG_F("Caught exception in FBXMaterial (likely because content was already loaded): ",
ASSIMP_LOG_VERBOSE_DEBUG_F("Caught exception in FBXMaterial (likely because content was already loaded): ",
runtimeError.what());
}
}

View File

@ -59,6 +59,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <assimp/ParsingUtils.h>
#include <assimp/fast_atof.h>
#include <assimp/ByteSwapper.h>
#include <assimp/DefaultLogger.hpp>
#include <iostream>
@ -220,6 +221,7 @@ Parser::Parser (const TokenList& tokens, bool is_binary)
, cursor(tokens.begin())
, is_binary(is_binary)
{
ASSIMP_LOG_DEBUG("Parsing FBX tokens");
root.reset(new Scope(*this,true));
}

View File

@ -54,6 +54,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "FBXTokenizer.h"
#include "FBXUtil.h"
#include <assimp/Exceptional.h>
#include <assimp/DefaultLogger.hpp>
namespace Assimp {
namespace FBX {
@ -134,7 +135,8 @@ void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*
// ------------------------------------------------------------------------------------------------
void Tokenize(TokenList& output_tokens, const char* input)
{
ai_assert(input);
ai_assert(input);
ASSIMP_LOG_DEBUG("Tokenizing ASCII FBX file");
// line and column numbers numbers are one-based
unsigned int line = 1;

View File

@ -221,7 +221,7 @@ void ProcessBooleanHalfSpaceDifference(const Schema_2x3::IfcHalfSpaceSolid *hs,
result.mVerts.pop_back();
}
}
IFCImporter::LogDebug("generating CSG geometry by plane clipping (IfcBooleanClippingResult)");
IFCImporter::LogVerboseDebug("generating CSG geometry by plane clipping (IfcBooleanClippingResult)");
}
// ------------------------------------------------------------------------------------------------
@ -658,7 +658,7 @@ void ProcessPolygonalBoundedBooleanHalfSpaceDifference(const Schema_2x3::IfcPoly
}
}
}
IFCImporter::LogDebug("generating CSG geometry by plane clipping with polygonal bounding (IfcBooleanClippingResult)");
IFCImporter::LogVerboseDebug("generating CSG geometry by plane clipping with polygonal bounding (IfcBooleanClippingResult)");
}
// ------------------------------------------------------------------------------------------------
@ -706,7 +706,7 @@ void ProcessBooleanExtrudedAreaSolidDifference(const Schema_2x3::IfcExtrudedArea
vit += pcount;
}
IFCImporter::LogDebug("generating CSG geometry by geometric difference to a solid (IfcExtrudedAreaSolid)");
IFCImporter::LogVerboseDebug("generating CSG geometry by geometric difference to a solid (IfcExtrudedAreaSolid)");
}
// ------------------------------------------------------------------------------------------------

View File

@ -223,7 +223,7 @@ public:
}
if ( (std::string)curveSegment.Transition != "CONTINUOUS" ) {
IFCImporter::LogDebug("ignoring transition code on composite curve segment, only continuous transitions are supported");
IFCImporter::LogVerboseDebug("ignoring transition code on composite curve segment, only continuous transitions are supported");
}
curves.push_back( CurveEntry(bc,IsTrue(curveSegment.SameSense)) );

View File

@ -319,7 +319,7 @@ void ProcessRevolvedAreaSolid(const Schema_2x3::IfcRevolvedAreaSolid& solid, Tem
ConvertAxisPlacement(trafo, solid.Position);
result.Transform(trafo);
IFCImporter::LogDebug("generate mesh procedurally by radial extrusion (IfcRevolvedAreaSolid)");
IFCImporter::LogVerboseDebug("generate mesh procedurally by radial extrusion (IfcRevolvedAreaSolid)");
}
// ------------------------------------------------------------------------------------------------
@ -457,7 +457,7 @@ void ProcessSweptDiskSolid(const Schema_2x3::IfcSweptDiskSolid &solid, TempMesh&
}
}
IFCImporter::LogDebug("generate mesh procedurally by sweeping a disk along a curve (IfcSweptDiskSolid)");
IFCImporter::LogVerboseDebug("generate mesh procedurally by sweeping a disk along a curve (IfcSweptDiskSolid)");
}
// ------------------------------------------------------------------------------------------------
@ -660,7 +660,7 @@ void ProcessExtrudedArea(const Schema_2x3::IfcExtrudedAreaSolid& solid, const Te
IFCImporter::LogWarn("failed to resolve all openings, presumably their topology is not supported by Assimp");
}
IFCImporter::LogDebug("generate mesh procedurally by extrusion (IfcExtrudedAreaSolid)");
IFCImporter::LogVerboseDebug("generate mesh procedurally by extrusion (IfcExtrudedAreaSolid)");
// If this is an opening element, store both the extruded mesh and the 2D profile mesh
// it was created from. Return an empty mesh to the caller.

View File

@ -641,14 +641,14 @@ aiNode *ProcessSpatialStructure(aiNode *parent, const Schema_2x3::IfcProduct &el
bool skipGeometry = false;
if (conv.settings.skipSpaceRepresentations) {
if (el.ToPtr<Schema_2x3::IfcSpace>()) {
IFCImporter::LogDebug("skipping IfcSpace entity due to importer settings");
IFCImporter::LogVerboseDebug("skipping IfcSpace entity due to importer settings");
skipGeometry = true;
}
}
if (conv.settings.skipAnnotations) {
if (el.ToPtr<Schema_2x3::IfcAnnotation>()) {
IFCImporter::LogDebug("skipping IfcAnnotation entity due to importer settings");
IFCImporter::LogVerboseDebug("skipping IfcAnnotation entity due to importer settings");
return nullptr;
}
}
@ -856,7 +856,7 @@ void ProcessSpatialStructures(ConversionData &conv) {
if (!prod) {
continue;
}
IFCImporter::LogDebug("looking at spatial structure `" + (prod->Name ? prod->Name.Get() : "unnamed") + "`" + (prod->ObjectType ? " which is of type " + prod->ObjectType.Get() : ""));
IFCImporter::LogVerboseDebug("looking at spatial structure `" + (prod->Name ? prod->Name.Get() : "unnamed") + "`" + (prod->ObjectType ? " which is of type " + prod->ObjectType.Get() : ""));
// the primary sites are referenced by an IFCRELAGGREGATES element which assigns them to the IFCPRODUCT
const STEP::DB::RefMap &refs = conv.db.GetRefs();
@ -868,7 +868,7 @@ void ProcessSpatialStructures(ConversionData &conv) {
// comparing pointer values is not sufficient, we would need to cast them to the same type first
// as there is multiple inheritance in the game.
if (def.GetID() == prod->GetID()) {
IFCImporter::LogDebug("selecting this spatial structure as root structure");
IFCImporter::LogVerboseDebug("selecting this spatial structure as root structure");
// got it, this is one primary site.
nodes.push_back(ProcessSpatialStructure(NULL, *prod, conv, NULL));
}

View File

@ -1359,7 +1359,7 @@ bool GenerateOpenings(std::vector<TempOpening>& openings,
break;
}
else {
IFCImporter::LogDebug("merging overlapping openings");
IFCImporter::LogVerboseDebug("merging overlapping openings");
ExtractVerticesFromClipper(poly[0].outer, temp_contour, false);
// Generate the union of the bounding boxes

View File

@ -162,7 +162,7 @@ void TempMesh::RemoveDegenerates()
}
if(drop) {
IFCImporter::LogDebug("removing degenerate faces");
IFCImporter::LogVerboseDebug("removing degenerate faces");
}
}
@ -437,7 +437,7 @@ void TempMesh::RemoveAdjacentDuplicates() {
base += cnt;
}
if(drop) {
IFCImporter::LogDebug("removing duplicate vertices");
IFCImporter::LogVerboseDebug("removing duplicate vertices");
}
}

View File

@ -422,7 +422,7 @@ void LWOImporter::InternReadFile(const std::string &pFile,
// So we use a separate implementation.
ComputeNormals(mesh, smoothingGroups, _mSurfaces[j]);
} else {
ASSIMP_LOG_DEBUG("LWO2: No need to compute normals, they're already there");
ASSIMP_LOG_VERBOSE_DEBUG("LWO2: No need to compute normals, they're already there");
}
++p;
}

View File

@ -154,7 +154,7 @@ bool LWOImporter::HandleTextures(aiMaterial *pcMat, const TextureList &in, aiTex
static_assert(sizeof(aiUVTransform) / sizeof(ai_real) == 5, "sizeof(aiUVTransform)/sizeof(ai_real) == 5");
pcMat->AddProperty(&trafo, 1, AI_MATKEY_UVTRANSFORM(type, cur));
}
ASSIMP_LOG_DEBUG("LWO2: Setting up non-UV mapping");
ASSIMP_LOG_VERBOSE_DEBUG("LWO2: Setting up non-UV mapping");
}
// The older LWOB format does not use indirect references to clips.

View File

@ -101,7 +101,7 @@ void LWS::Element::Parse(const char *&buffer) {
SkipSpaces(&buffer);
if (children.back().tokens[0] == "Plugin") {
ASSIMP_LOG_DEBUG("LWS: Skipping over plugin-specific data");
ASSIMP_LOG_VERBOSE_DEBUG("LWS: Skipping over plugin-specific data");
// strange stuff inside Plugin/Endplugin blocks. Needn't
// follow LWS syntax, so we skip over it

View File

@ -888,7 +888,7 @@ void MD3Importer::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
if (it != skins.textures.end()) {
texture_name = &*( _texture_name = (*it).second).begin();
ASSIMP_LOG_DEBUG_F("MD3: Assigning skin texture ", (*it).second, " to surface ", pcSurfaces->NAME);
ASSIMP_LOG_VERBOSE_DEBUG_F("MD3: Assigning skin texture ", (*it).second, " to surface ", pcSurfaces->NAME);
(*it).resolved = true; // mark entry as resolved
}

View File

@ -168,7 +168,7 @@ void OgreBinarySerializer::RollbackHeader() {
void OgreBinarySerializer::SkipBytes(size_t numBytes) {
#if (OGRE_BINARY_SERIALIZER_DEBUG == 1)
ASSIMP_LOG_DEBUG_F("Skipping ", numBytes, " bytes");
ASSIMP_LOG_VERBOSE_DEBUG_F("Skipping ", numBytes, " bytes");
#endif
m_reader->IncPtr(numBytes);
@ -207,8 +207,8 @@ Mesh *OgreBinarySerializer::ImportMesh(MemoryStreamReader *stream) {
void OgreBinarySerializer::ReadMesh(Mesh *mesh) {
mesh->hasSkeletalAnimations = Read<bool>();
ASSIMP_LOG_DEBUG("Reading Mesh");
ASSIMP_LOG_DEBUG_F(" - Skeletal animations: ", mesh->hasSkeletalAnimations ? "true" : "false");
ASSIMP_LOG_VERBOSE_DEBUG("Reading Mesh");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Skeletal animations: ", mesh->hasSkeletalAnimations ? "true" : "false");
if (!AtEnd()) {
uint16_t id = ReadHeader();
@ -364,9 +364,9 @@ void OgreBinarySerializer::ReadSubMesh(Mesh *mesh) {
submesh->indexData->faceCount = static_cast<uint32_t>(submesh->indexData->count / 3);
submesh->indexData->is32bit = Read<bool>();
ASSIMP_LOG_DEBUG_F("Reading SubMesh ", mesh->subMeshes.size());
ASSIMP_LOG_DEBUG_F(" - Material: '", submesh->materialRef, "'");
ASSIMP_LOG_DEBUG_F(" - Uses shared geometry: ", submesh->usesSharedVertexData ? "true" : "false");
ASSIMP_LOG_VERBOSE_DEBUG_F("Reading SubMesh ", mesh->subMeshes.size());
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Material: '", submesh->materialRef, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Uses shared geometry: ", submesh->usesSharedVertexData ? "true" : "false");
// Index buffer
if (submesh->indexData->count > 0) {
@ -374,7 +374,7 @@ void OgreBinarySerializer::ReadSubMesh(Mesh *mesh) {
uint8_t *indexBuffer = ReadBytes(numBytes);
submesh->indexData->buffer = MemoryStreamPtr(new Assimp::MemoryIOStream(indexBuffer, numBytes, true));
ASSIMP_LOG_DEBUG_F(" - ", submesh->indexData->faceCount,
ASSIMP_LOG_VERBOSE_DEBUG_F(" - ", submesh->indexData->faceCount,
" faces from ", submesh->indexData->count, (submesh->indexData->is32bit ? " 32bit" : " 16bit"),
" indexes of ", numBytes, " bytes");
}
@ -475,7 +475,7 @@ void OgreBinarySerializer::ReadSubMeshNames(Mesh *mesh) {
}
submesh->name = ReadLine();
ASSIMP_LOG_DEBUG_F(" - SubMesh ", submesh->index, " name '", submesh->name, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - SubMesh ", submesh->index, " name '", submesh->name, "'");
if (!AtEnd())
id = ReadHeader();
@ -488,7 +488,7 @@ void OgreBinarySerializer::ReadSubMeshNames(Mesh *mesh) {
void OgreBinarySerializer::ReadGeometry(VertexData *dest) {
dest->count = Read<uint32_t>();
ASSIMP_LOG_DEBUG_F(" - Reading geometry of ", dest->count, " vertices");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Reading geometry of ", dest->count, " vertices");
if (!AtEnd()) {
uint16_t id = ReadHeader();
@ -536,7 +536,7 @@ void OgreBinarySerializer::ReadGeometryVertexElement(VertexData *dest) {
element.offset = Read<uint16_t>();
element.index = Read<uint16_t>();
ASSIMP_LOG_DEBUG_F(" - Vertex element ", element.SemanticToString(), " of type ",
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Vertex element ", element.SemanticToString(), " of type ",
element.TypeToString(), " index=", element.index, " source=", element.source);
dest->vertexElements.push_back(element);
@ -557,7 +557,7 @@ void OgreBinarySerializer::ReadGeometryVertexBuffer(VertexData *dest) {
uint8_t *vertexBuffer = ReadBytes(numBytes);
dest->vertexBindings[bindIndex] = MemoryStreamPtr(new Assimp::MemoryIOStream(vertexBuffer, numBytes, true));
ASSIMP_LOG_DEBUG_F(" - Read vertex buffer for source ", bindIndex, " of ", numBytes, " bytes");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Read vertex buffer for source ", bindIndex, " of ", numBytes, " bytes");
}
void OgreBinarySerializer::ReadEdgeList(Mesh * /*mesh*/) {
@ -807,7 +807,7 @@ void OgreBinarySerializer::ReadSkeleton(Skeleton *skeleton) {
<< " Supported versions: " << SKELETON_VERSION_1_8 << " and " << SKELETON_VERSION_1_1);
}
ASSIMP_LOG_DEBUG("Reading Skeleton");
ASSIMP_LOG_VERBOSE_DEBUG("Reading Skeleton");
bool firstBone = true;
bool firstAnim = true;
@ -821,7 +821,7 @@ void OgreBinarySerializer::ReadSkeleton(Skeleton *skeleton) {
}
case SKELETON_BONE: {
if (firstBone) {
ASSIMP_LOG_DEBUG(" - Bones");
ASSIMP_LOG_VERBOSE_DEBUG(" - Bones");
firstBone = false;
}
@ -834,7 +834,7 @@ void OgreBinarySerializer::ReadSkeleton(Skeleton *skeleton) {
}
case SKELETON_ANIMATION: {
if (firstAnim) {
ASSIMP_LOG_DEBUG(" - Animations");
ASSIMP_LOG_VERBOSE_DEBUG(" - Animations");
firstAnim = false;
}
@ -874,7 +874,7 @@ void OgreBinarySerializer::ReadBone(Skeleton *skeleton) {
throw DeadlyImportError(Formatter::format() << "Ogre Skeleton bone indexes not contiguous. Error at bone index " << bone->id);
}
ASSIMP_LOG_DEBUG_F(" ", bone->id, " ", bone->name);
ASSIMP_LOG_VERBOSE_DEBUG_F(" ", bone->id, " ", bone->name);
skeleton->bones.push_back(bone);
}
@ -919,7 +919,7 @@ void OgreBinarySerializer::ReadSkeletonAnimation(Skeleton *skeleton) {
skeleton->animations.push_back(anim);
ASSIMP_LOG_DEBUG_F(" ", anim->name, " (", anim->length, " sec, ", anim->tracks.size(), " tracks)");
ASSIMP_LOG_VERBOSE_DEBUG_F(" ", anim->name, " (", anim->length, " sec, ", anim->tracks.size(), " tracks)");
}
void OgreBinarySerializer::ReadSkeletonAnimationTrack(Skeleton * /*skeleton*/, Animation *dest) {

View File

@ -175,7 +175,7 @@ aiMaterial* OgreImporter::ReadMaterial(const std::string &pFile, Assimp::IOSyste
if (materialFile) {
break;
}
ASSIMP_LOG_DEBUG_F( "Source file for material '", materialName, "' ", potentialFiles[i], " does not exist");
ASSIMP_LOG_VERBOSE_DEBUG_F( "Source file for material '", materialName, "' ", potentialFiles[i], " does not exist");
}
if (!materialFile)
{
@ -201,7 +201,7 @@ aiMaterial* OgreImporter::ReadMaterial(const std::string &pFile, Assimp::IOSyste
ss << &data[0];
}
ASSIMP_LOG_DEBUG_F("Reading material '", materialName, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F("Reading material '", materialName, "'");
aiMaterial *material = new aiMaterial();
m_textures.clear();
@ -245,7 +245,7 @@ aiMaterial* OgreImporter::ReadMaterial(const std::string &pFile, Assimp::IOSyste
return material;
}
ASSIMP_LOG_DEBUG_F("material '", materialName, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F("material '", materialName, "'");
while(linePart != partBlockEnd)
{
@ -353,7 +353,7 @@ bool OgreImporter::ReadTechnique(const std::string &techniqueName, stringstream
return false;
}
ASSIMP_LOG_DEBUG_F(" technique '", techniqueName, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F(" technique '", techniqueName, "'");
const string partPass = "pass";
@ -389,7 +389,7 @@ bool OgreImporter::ReadPass(const std::string &passName, stringstream &ss, aiMat
return false;
}
ASSIMP_LOG_DEBUG_F(" pass '", passName, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F(" pass '", passName, "'");
const string partAmbient = "ambient";
const string partDiffuse = "diffuse";
@ -416,7 +416,7 @@ bool OgreImporter::ReadPass(const std::string &passName, stringstream &ss, aiMat
ss >> r >> g >> b;
const aiColor3D color(r, g, b);
ASSIMP_LOG_DEBUG_F( " ", linePart, " ", r, " ", g, " ", b);
ASSIMP_LOG_VERBOSE_DEBUG_F( " ", linePart, " ", r, " ", g, " ", b);
if (linePart == partAmbient)
{
@ -455,7 +455,7 @@ bool OgreImporter::ReadTextureUnit(const std::string &textureUnitName, stringstr
return false;
}
ASSIMP_LOG_DEBUG_F(" texture_unit '", textureUnitName, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F(" texture_unit '", textureUnitName, "'");
const string partTexture = "texture";
const string partTextCoordSet = "tex_coord_set";
@ -490,7 +490,7 @@ bool OgreImporter::ReadTextureUnit(const std::string &textureUnitName, stringstr
if (posSuffix != string::npos && posUnderscore != string::npos && posSuffix > posUnderscore)
{
string identifier = Ogre::ToLower(textureRef.substr(posUnderscore, posSuffix - posUnderscore));
ASSIMP_LOG_DEBUG_F( "Detecting texture type from filename postfix '", identifier, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F( "Detecting texture type from filename postfix '", identifier, "'");
if (identifier == "_n" || identifier == "_nrm" || identifier == "_nrml" || identifier == "_normal" || identifier == "_normals" || identifier == "_normalmap")
{
@ -580,7 +580,7 @@ bool OgreImporter::ReadTextureUnit(const std::string &textureUnitName, stringstr
unsigned int textureTypeIndex = m_textures[textureType];
m_textures[textureType]++;
ASSIMP_LOG_DEBUG_F( " texture '", textureRef, "' type ", textureType,
ASSIMP_LOG_VERBOSE_DEBUG_F( " texture '", textureRef, "' type ", textureType,
" index ", textureTypeIndex, " UV ", uvCoord);
aiString assimpTextureRef(textureRef);

View File

@ -268,7 +268,7 @@ void OgreXmlSerializer::ReadMesh(MeshXml *mesh) {
throw DeadlyImportError("Root node is <" + m_currentNodeName + "> expecting <mesh>");
}
ASSIMP_LOG_DEBUG("Reading Mesh");
ASSIMP_LOG_VERBOSE_DEBUG("Reading Mesh");
NextNode();
@ -294,7 +294,7 @@ void OgreXmlSerializer::ReadMesh(MeshXml *mesh) {
ReadBoneAssignments(mesh->sharedVertexData);
} else if (m_currentNodeName == nnSkeletonLink) {
mesh->skeletonRef = ReadAttribute<std::string>("name");
ASSIMP_LOG_DEBUG_F("Read skeleton link ", mesh->skeletonRef);
ASSIMP_LOG_VERBOSE_DEBUG_F("Read skeleton link ", mesh->skeletonRef);
NextNode();
}
// Assimp incompatible/ignored nodes
@ -305,7 +305,7 @@ void OgreXmlSerializer::ReadMesh(MeshXml *mesh) {
void OgreXmlSerializer::ReadGeometry(VertexDataXml *dest) {
dest->count = ReadAttribute<uint32_t>("vertexcount");
ASSIMP_LOG_DEBUG_F(" - Reading geometry of ", dest->count, " vertices");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Reading geometry of ", dest->count, " vertices");
NextNode();
while (m_currentNodeName == nnVertexBuffer) {
@ -325,19 +325,19 @@ void OgreXmlSerializer::ReadGeometryVertexBuffer(VertexDataXml *dest) {
}
if (positions) {
ASSIMP_LOG_DEBUG(" - Contains positions");
ASSIMP_LOG_VERBOSE_DEBUG(" - Contains positions");
dest->positions.reserve(dest->count);
}
if (normals) {
ASSIMP_LOG_DEBUG(" - Contains normals");
ASSIMP_LOG_VERBOSE_DEBUG(" - Contains normals");
dest->normals.reserve(dest->count);
}
if (tangents) {
ASSIMP_LOG_DEBUG(" - Contains tangents");
ASSIMP_LOG_VERBOSE_DEBUG(" - Contains tangents");
dest->tangents.reserve(dest->count);
}
if (uvs > 0) {
ASSIMP_LOG_DEBUG_F(" - Contains ", uvs, " texture coords");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Contains ", uvs, " texture coords");
dest->uvs.resize(uvs);
for (size_t i = 0, len = dest->uvs.size(); i < len; ++i) {
dest->uvs[i].reserve(dest->count);
@ -464,9 +464,9 @@ void OgreXmlSerializer::ReadSubMesh(MeshXml *mesh) {
submesh->usesSharedVertexData = ReadAttribute<bool>(anUseSharedVertices);
}
ASSIMP_LOG_DEBUG_F("Reading SubMesh ", mesh->subMeshes.size());
ASSIMP_LOG_DEBUG_F(" - Material: '", submesh->materialRef, "'");
ASSIMP_LOG_DEBUG_F(" - Uses shared geometry: ", (submesh->usesSharedVertexData ? "true" : "false"));
ASSIMP_LOG_VERBOSE_DEBUG_F("Reading SubMesh ", mesh->subMeshes.size());
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Material: '", submesh->materialRef, "'");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Uses shared geometry: ", (submesh->usesSharedVertexData ? "true" : "false"));
// TODO: maybe we have always just 1 faces and 1 geometry and always in this order. this loop will only work correct, when the order
// of faces and geometry changed, and not if we have more than one of one
@ -505,7 +505,7 @@ void OgreXmlSerializer::ReadSubMesh(MeshXml *mesh) {
}
if (submesh->indexData->faces.size() == submesh->indexData->faceCount) {
ASSIMP_LOG_DEBUG_F(" - Faces ", submesh->indexData->faceCount);
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Faces ", submesh->indexData->faceCount);
} else {
throw DeadlyImportError(Formatter::format() << "Read only " << submesh->indexData->faces.size() << " faces when should have read " << submesh->indexData->faceCount);
}
@ -571,7 +571,7 @@ void OgreXmlSerializer::ReadBoneAssignments(VertexDataXml *dest) {
}
}
ASSIMP_LOG_DEBUG_F(" - ", dest->boneAssignments.size(), " bone assignments");
ASSIMP_LOG_VERBOSE_DEBUG_F(" - ", dest->boneAssignments.size(), " bone assignments");
}
// Skeleton
@ -648,7 +648,7 @@ void OgreXmlSerializer::ReadSkeleton(Skeleton *skeleton) {
throw DeadlyImportError("Root node is <" + m_currentNodeName + "> expecting <skeleton>");
}
ASSIMP_LOG_DEBUG("Reading Skeleton");
ASSIMP_LOG_VERBOSE_DEBUG("Reading Skeleton");
// Optional blend mode from root node
if (HasAttribute("blendmode")) {
@ -678,7 +678,7 @@ void OgreXmlSerializer::ReadAnimations(Skeleton *skeleton) {
throw DeadlyImportError("Cannot read <animations> for a Skeleton without bones");
}
ASSIMP_LOG_DEBUG(" - Animations");
ASSIMP_LOG_VERBOSE_DEBUG(" - Animations");
NextNode();
while (m_currentNodeName == nnAnimation) {
@ -693,7 +693,7 @@ void OgreXmlSerializer::ReadAnimations(Skeleton *skeleton) {
ReadAnimationTracks(anim);
skeleton->animations.push_back(anim);
ASSIMP_LOG_DEBUG_F(" ", anim->name, " (", anim->length, " sec, ", anim->tracks.size(), " tracks)");
ASSIMP_LOG_VERBOSE_DEBUG_F(" ", anim->name, " (", anim->length, " sec, ", anim->tracks.size(), " tracks)");
}
}
@ -793,7 +793,7 @@ static bool BoneCompare(Bone *a, Bone *b) {
}
void OgreXmlSerializer::ReadBones(Skeleton *skeleton) {
ASSIMP_LOG_DEBUG(" - Bones");
ASSIMP_LOG_VERBOSE_DEBUG(" - Bones");
NextNode();
while (m_currentNodeName == nnBone) {
@ -851,7 +851,7 @@ void OgreXmlSerializer::ReadBones(Skeleton *skeleton) {
as per the Ogre skeleton spec. It might be more that other (later) code in this imported does not break. */
for (size_t i = 0, len = skeleton->bones.size(); i < len; ++i) {
Bone *b = skeleton->bones[i];
ASSIMP_LOG_DEBUG_F(" ", b->id, " ", b->name);
ASSIMP_LOG_VERBOSE_DEBUG_F(" ", b->id, " ", b->name);
if (b->id != static_cast<uint16_t>(i)) {
throw DeadlyImportError(Formatter::format() << "Bone ids are not in sequence starting from 0. Missing index " << i);

View File

@ -514,7 +514,7 @@ bool PLY::DOM::SkipComments(std::vector<char> &buffer)
// ------------------------------------------------------------------------------------------------
bool PLY::DOM::ParseHeader(IOStreamBuffer<char> &streamBuffer, std::vector<char> &buffer, bool isBinary) {
ASSIMP_LOG_DEBUG("PLY::DOM::ParseHeader() begin");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseHeader() begin");
// parse all elements
while (!buffer.empty())
@ -543,14 +543,14 @@ bool PLY::DOM::ParseHeader(IOStreamBuffer<char> &streamBuffer, std::vector<char>
if (!isBinary) // it would occur an error, if binary data start with values as space or line end.
SkipSpacesAndLineEnd(buffer);
ASSIMP_LOG_DEBUG("PLY::DOM::ParseHeader() succeeded");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseHeader() succeeded");
return true;
}
// ------------------------------------------------------------------------------------------------
bool PLY::DOM::ParseElementInstanceLists(IOStreamBuffer<char> &streamBuffer, std::vector<char> &buffer, PLYImporter* loader)
{
ASSIMP_LOG_DEBUG("PLY::DOM::ParseElementInstanceLists() begin");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseElementInstanceLists() begin");
alElementData.resize(alElements.size());
std::vector<PLY::Element>::const_iterator i = alElements.begin();
@ -571,7 +571,7 @@ bool PLY::DOM::ParseElementInstanceLists(IOStreamBuffer<char> &streamBuffer, std
}
}
ASSIMP_LOG_DEBUG("PLY::DOM::ParseElementInstanceLists() succeeded");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseElementInstanceLists() succeeded");
return true;
}
@ -582,7 +582,7 @@ bool PLY::DOM::ParseElementInstanceListsBinary(IOStreamBuffer<char> &streamBuffe
PLYImporter* loader,
bool p_bBE)
{
ASSIMP_LOG_DEBUG("PLY::DOM::ParseElementInstanceListsBinary() begin");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseElementInstanceListsBinary() begin");
alElementData.resize(alElements.size());
std::vector<PLY::Element>::const_iterator i = alElements.begin();
@ -602,7 +602,7 @@ bool PLY::DOM::ParseElementInstanceListsBinary(IOStreamBuffer<char> &streamBuffe
}
}
ASSIMP_LOG_DEBUG("PLY::DOM::ParseElementInstanceListsBinary() succeeded");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseElementInstanceListsBinary() succeeded");
return true;
}
@ -615,11 +615,11 @@ bool PLY::DOM::ParseInstanceBinary(IOStreamBuffer<char> &streamBuffer, DOM* p_pc
std::vector<char> buffer;
streamBuffer.getNextLine(buffer);
ASSIMP_LOG_DEBUG("PLY::DOM::ParseInstanceBinary() begin");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseInstanceBinary() begin");
if (!p_pcOut->ParseHeader(streamBuffer, buffer, true))
{
ASSIMP_LOG_DEBUG("PLY::DOM::ParseInstanceBinary() failure");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseInstanceBinary() failure");
return false;
}
@ -628,10 +628,10 @@ bool PLY::DOM::ParseInstanceBinary(IOStreamBuffer<char> &streamBuffer, DOM* p_pc
const char* pCur = (char*)&buffer[0];
if (!p_pcOut->ParseElementInstanceListsBinary(streamBuffer, buffer, pCur, bufferSize, loader, p_bBE))
{
ASSIMP_LOG_DEBUG("PLY::DOM::ParseInstanceBinary() failure");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseInstanceBinary() failure");
return false;
}
ASSIMP_LOG_DEBUG("PLY::DOM::ParseInstanceBinary() succeeded");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseInstanceBinary() succeeded");
return true;
}
@ -644,11 +644,11 @@ bool PLY::DOM::ParseInstance(IOStreamBuffer<char> &streamBuffer, DOM* p_pcOut, P
std::vector<char> buffer;
streamBuffer.getNextLine(buffer);
ASSIMP_LOG_DEBUG("PLY::DOM::ParseInstance() begin");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseInstance() begin");
if (!p_pcOut->ParseHeader(streamBuffer, buffer, false))
{
ASSIMP_LOG_DEBUG("PLY::DOM::ParseInstance() failure");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseInstance() failure");
return false;
}
@ -656,10 +656,10 @@ bool PLY::DOM::ParseInstance(IOStreamBuffer<char> &streamBuffer, DOM* p_pcOut, P
streamBuffer.getNextLine(buffer);
if (!p_pcOut->ParseElementInstanceLists(streamBuffer, buffer, loader))
{
ASSIMP_LOG_DEBUG("PLY::DOM::ParseInstance() failure");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseInstance() failure");
return false;
}
ASSIMP_LOG_DEBUG("PLY::DOM::ParseInstance() succeeded");
ASSIMP_LOG_VERBOSE_DEBUG("PLY::DOM::ParseInstance() succeeded");
return true;
}

View File

@ -1395,6 +1395,7 @@ inline void AssetMetadata::Read(Document &doc) {
//
inline void Asset::ReadBinaryHeader(IOStream &stream, std::vector<char> &sceneData) {
ASSIMP_LOG_DEBUG("Reading GLTF2 binary");
GLB_Header header;
if (stream.Read(&header, sizeof(header), 1) != 1) {
throw DeadlyImportError("GLTF: Unable to read the file header");
@ -1458,6 +1459,7 @@ inline void Asset::ReadBinaryHeader(IOStream &stream, std::vector<char> &sceneDa
}
inline void Asset::Load(const std::string &pFile, bool isBinary) {
ASSIMP_LOG_DEBUG("Loading GLTF2 asset");
mCurrentAssetDir.clear();
/*int pos = std::max(int(pFile.rfind('/')), int(pFile.rfind('\\')));
if (pos != int(std::string::npos)) */
@ -1489,7 +1491,7 @@ inline void Asset::Load(const std::string &pFile, bool isBinary) {
}
// parse the JSON document
ASSIMP_LOG_DEBUG("Parsing GLTF2 JSON");
Document doc;
doc.ParseInsitu(&sceneData[0]);

View File

@ -286,6 +286,7 @@ static aiMaterial *ImportMaterial(std::vector<int> &embeddedTexIdxs, Asset &r, M
void glTF2Importer::ImportMaterials(glTF2::Asset &r) {
const unsigned int numImportedMaterials = unsigned(r.materials.Size());
ASSIMP_LOG_DEBUG_F("Importing ", numImportedMaterials, " materials");
Material defaultMaterial;
mScene->mNumMaterials = numImportedMaterials + 1;
@ -333,6 +334,7 @@ static inline bool CheckValidFacesIndices(aiFace *faces, unsigned nFaces, unsign
#endif // ASSIMP_BUILD_DEBUG
void glTF2Importer::ImportMeshes(glTF2::Asset &r) {
ASSIMP_LOG_DEBUG_F("Importing ", r.meshes.Size(), " meshes");
std::vector<aiMesh *> meshes;
unsigned int k = 0;
@ -662,10 +664,12 @@ void glTF2Importer::ImportMeshes(glTF2::Asset &r) {
void glTF2Importer::ImportCameras(glTF2::Asset &r) {
if (!r.cameras.Size()) return;
mScene->mNumCameras = r.cameras.Size();
mScene->mCameras = new aiCamera *[r.cameras.Size()];
const unsigned int numCameras = r.cameras.Size();
ASSIMP_LOG_DEBUG_F("Importing ", numCameras, " cameras");
mScene->mNumCameras = numCameras;
mScene->mCameras = new aiCamera *[numCameras];
for (size_t i = 0; i < r.cameras.Size(); ++i) {
for (size_t i = 0; i < numCameras; ++i) {
Camera &cam = r.cameras[i];
aiCamera *aicam = mScene->mCameras[i] = new aiCamera();
@ -696,10 +700,12 @@ void glTF2Importer::ImportLights(glTF2::Asset &r) {
if (!r.lights.Size())
return;
mScene->mNumLights = r.lights.Size();
mScene->mLights = new aiLight *[r.lights.Size()];
const unsigned int numLights = r.lights.Size();
ASSIMP_LOG_DEBUG_F("Importing ", numLights, " lights");
mScene->mNumLights = numLights;
mScene->mLights = new aiLight *[numLights];
for (size_t i = 0; i < r.lights.Size(); ++i) {
for (size_t i = 0; i < numLights; ++i) {
Light &light = r.lights[i];
aiLight *ail = mScene->mLights[i] = new aiLight();
@ -958,6 +964,7 @@ void glTF2Importer::ImportNodes(glTF2::Asset &r) {
if (!r.scene) {
throw DeadlyImportError("GLTF: No scene");
}
ASSIMP_LOG_DEBUG("Importing nodes");
std::vector<Ref<Node>> rootNodes = r.scene->nodes;
@ -1137,13 +1144,15 @@ std::unordered_map<unsigned int, AnimationSamplers> GatherSamplers(Animation &an
void glTF2Importer::ImportAnimations(glTF2::Asset &r) {
if (!r.scene) return;
mScene->mNumAnimations = r.animations.Size();
const unsigned numAnimations = r.animations.Size();
ASSIMP_LOG_DEBUG_F("Importing ", numAnimations, " animations");
mScene->mNumAnimations = numAnimations;
if (mScene->mNumAnimations == 0) {
return;
}
mScene->mAnimations = new aiAnimation *[mScene->mNumAnimations];
for (unsigned int i = 0; i < r.animations.Size(); ++i) {
mScene->mAnimations = new aiAnimation *[numAnimations];
for (unsigned int i = 0; i < numAnimations; ++i) {
Animation &anim = r.animations[i];
aiAnimation *ai_anim = new aiAnimation();
@ -1249,6 +1258,8 @@ void glTF2Importer::ImportEmbeddedTextures(glTF2::Asset &r) {
if (numEmbeddedTexs == 0)
return;
ASSIMP_LOG_DEBUG_F("Importing ", numEmbeddedTexs, " embedded textures");
mScene->mTextures = new aiTexture *[numEmbeddedTexs];
// Add the embedded textures
@ -1288,6 +1299,7 @@ void glTF2Importer::ImportEmbeddedTextures(glTF2::Asset &r) {
}
void glTF2Importer::ImportCommonMetadata(glTF2::Asset& a) {
ASSIMP_LOG_DEBUG("Importing metadata");
ai_assert(mScene->mMetaData == nullptr);
const bool hasVersion = !a.asset.version.empty();
const bool hasGenerator = !a.asset.generator.empty();
@ -1307,6 +1319,9 @@ void glTF2Importer::ImportCommonMetadata(glTF2::Asset& a) {
}
void glTF2Importer::InternReadFile(const std::string &pFile, aiScene *pScene, IOSystem *pIOHandler) {
ASSIMP_LOG_DEBUG("Reading GLTF2 file");
// clean all member arrays
meshOffsets.clear();
embeddedTexIdxs.clear();

View File

@ -392,7 +392,7 @@ ASSIMP_API aiReturn aiDetachLogStream(const aiLogStream *stream) {
if (it == gActiveLogStreams.end()) {
return AI_FAILURE;
}
DefaultLogger::get()->detatchStream(it->second);
DefaultLogger::get()->detachStream( it->second );
delete it->second;
gActiveLogStreams.erase(it);
@ -416,7 +416,7 @@ ASSIMP_API void aiDetachAllLogStreams(void) {
}
for (LogStreamMap::iterator it = gActiveLogStreams.begin(); it != gActiveLogStreams.end(); ++it) {
logger->detatchStream(it->second);
logger->detachStream( it->second );
delete it->second;
}
gActiveLogStreams.clear();

View File

@ -177,6 +177,18 @@ void Logger::debug(const char* message) {
return OnDebug(message);
}
// ----------------------------------------------------------------------------------
void Logger::verboseDebug(const char *message) {
// SECURITY FIX: otherwise it's easy to produce overruns since
// sometimes importers will include data from the input file
// (i.e. node names) in their messages.
if (strlen(message) > MAX_LOG_MESSAGE_LENGTH) {
return;
}
return OnVerboseDebug(message);
}
// ----------------------------------------------------------------------------------
void Logger::info(const char* message) {
@ -251,7 +263,7 @@ void DefaultLogger::kill() {
// ----------------------------------------------------------------------------------
// Debug message
void DefaultLogger::OnDebug( const char* message ) {
if ( m_Severity == Logger::NORMAL ) {
if ( m_Severity < Logger::DEBUG ) {
return;
}
@ -262,6 +274,19 @@ void DefaultLogger::OnDebug( const char* message ) {
WriteToStreams( msg, Logger::Debugging );
}
// Verbose debug message
void DefaultLogger::OnVerboseDebug(const char *message) {
if (m_Severity < Logger::VERBOSE) {
return;
}
static const size_t Size = MAX_LOG_MESSAGE_LENGTH + 16;
char msg[Size];
ai_snprintf(msg, Size, "Debug, T%u: %s", GetThreadID(), message);
WriteToStreams(msg, Logger::Debugging);
}
// ----------------------------------------------------------------------------------
// Logs an info
void DefaultLogger::OnInfo( const char* message ){
@ -320,7 +345,7 @@ bool DefaultLogger::attachStream( LogStream *pStream, unsigned int severity ) {
// ----------------------------------------------------------------------------------
// Detach a stream
bool DefaultLogger::detatchStream( LogStream *pStream, unsigned int severity ) {
bool DefaultLogger::detachStream( LogStream *pStream, unsigned int severity ) {
if ( nullptr == pStream ) {
return false;
}

View File

@ -75,7 +75,7 @@ void PrefixString(aiString& string,const char* prefix, unsigned int len) {
return;
if (len+string.length>=MAXLEN-1) {
ASSIMP_LOG_DEBUG("Can't add an unique prefix because the string is too long");
ASSIMP_LOG_VERBOSE_DEBUG("Can't add an unique prefix because the string is too long");
ai_assert(false);
return;
}

View File

@ -225,7 +225,7 @@ void ScenePreprocessor::ProcessAnimation (aiAnimation* anim)
q.mTime = 0.;
q.mValue = rotation;
ASSIMP_LOG_DEBUG("ScenePreprocessor: Dummy rotation track has been generated");
ASSIMP_LOG_VERBOSE_DEBUG("ScenePreprocessor: Dummy rotation track has been generated");
} else {
ai_assert(channel->mRotationKeys);
}
@ -240,7 +240,7 @@ void ScenePreprocessor::ProcessAnimation (aiAnimation* anim)
q.mTime = 0.;
q.mValue = scaling;
ASSIMP_LOG_DEBUG("ScenePreprocessor: Dummy scaling track has been generated");
ASSIMP_LOG_VERBOSE_DEBUG("ScenePreprocessor: Dummy scaling track has been generated");
} else {
ai_assert(channel->mScalingKeys);
}
@ -255,7 +255,7 @@ void ScenePreprocessor::ProcessAnimation (aiAnimation* anim)
q.mTime = 0.;
q.mValue = position;
ASSIMP_LOG_DEBUG("ScenePreprocessor: Dummy position track has been generated");
ASSIMP_LOG_VERBOSE_DEBUG("ScenePreprocessor: Dummy position track has been generated");
} else {
ai_assert(channel->mPositionKeys);
}
@ -264,7 +264,7 @@ void ScenePreprocessor::ProcessAnimation (aiAnimation* anim)
}
if (anim->mDuration == -1.) {
ASSIMP_LOG_DEBUG("ScenePreprocessor: Setting animation duration");
ASSIMP_LOG_VERBOSE_DEBUG("ScenePreprocessor: Setting animation duration");
anim->mDuration = last - std::min( first, 0. );
}
}

View File

@ -181,7 +181,7 @@ void CatmullClarkSubdivider::Subdivide (
aiMesh* i = smesh[s];
// FIX - mPrimitiveTypes might not yet be initialized
if (i->mPrimitiveTypes && (i->mPrimitiveTypes & (aiPrimitiveType_LINE|aiPrimitiveType_POINT))==i->mPrimitiveTypes) {
ASSIMP_LOG_DEBUG("Catmull-Clark Subdivider: Skipping pure line/point mesh");
ASSIMP_LOG_VERBOSE_DEBUG("Catmull-Clark Subdivider: Skipping pure line/point mesh");
if (discard_input) {
out[s] = i;
@ -346,7 +346,7 @@ void CatmullClarkSubdivider::InternSubdivide (
// Report the number of bad edges. bad edges are referenced by less than two
// faces in the mesh. They occur at outer model boundaries in non-closed
// shapes.
ASSIMP_LOG_DEBUG_F("Catmull-Clark Subdivider: got ", bad_cnt, " bad edges touching only one face (totally ",
ASSIMP_LOG_VERBOSE_DEBUG_F("Catmull-Clark Subdivider: got ", bad_cnt, " bad edges touching only one face (totally ",
static_cast<unsigned int>(edges.size()), " edges). ");
}}
@ -405,7 +405,7 @@ void CatmullClarkSubdivider::InternSubdivide (
}
ai_assert(haveit);
if (!haveit) {
ASSIMP_LOG_DEBUG("Catmull-Clark Subdivider: Index not used");
ASSIMP_LOG_VERBOSE_DEBUG("Catmull-Clark Subdivider: Index not used");
}
break;
}

View File

@ -82,7 +82,7 @@ void ArmaturePopulate::Execute(aiScene *out) {
for (std::pair<aiBone *, aiNode *> kvp : bone_stack) {
aiBone *bone = kvp.first;
aiNode *bone_node = kvp.second;
ASSIMP_LOG_DEBUG_F("active node lookup: ", bone->mName.C_Str());
ASSIMP_LOG_VERBOSE_DEBUG_F("active node lookup: ", bone->mName.C_Str());
// lcl transform grab - done in generate_nodes :)
// bone->mOffsetMatrix = bone_node->mTransformation;
@ -178,7 +178,7 @@ void ArmaturePopulate::BuildBoneStack(aiNode *,
if (node == nullptr) {
node_stack.clear();
BuildNodeList(root_node, node_stack);
ASSIMP_LOG_DEBUG_F("Resetting bone stack: nullptr element ", bone->mName.C_Str());
ASSIMP_LOG_VERBOSE_DEBUG_F("Resetting bone stack: nullptr element ", bone->mName.C_Str());
node = GetNodeFromStack(bone->mName, node_stack);
@ -188,7 +188,7 @@ void ArmaturePopulate::BuildBoneStack(aiNode *,
}
}
ASSIMP_LOG_DEBUG_F("Successfully added bone[", bone->mName.C_Str(), "] to stack and bone node is: ", node->mName.C_Str());
ASSIMP_LOG_VERBOSE_DEBUG_F("Successfully added bone[", bone->mName.C_Str(), "] to stack and bone node is: ", node->mName.C_Str());
bone_stack.insert(std::pair<aiBone *, aiNode *>(bone, node));
}
@ -202,7 +202,7 @@ aiNode *ArmaturePopulate::GetArmatureRoot(aiNode *bone_node,
std::vector<aiBone *> &bone_list) {
while (bone_node) {
if (!IsBoneNode(bone_node->mName, bone_list)) {
ASSIMP_LOG_DEBUG_F("GetArmatureRoot() Found valid armature: ", bone_node->mName.C_Str());
ASSIMP_LOG_VERBOSE_DEBUG_F("GetArmatureRoot() Found valid armature: ", bone_node->mName.C_Str());
return bone_node;
}

View File

@ -299,7 +299,7 @@ void FlipUVsProcess::ProcessMaterial (aiMaterial* _mat)
for (unsigned int a = 0; a < mat->mNumProperties;++a) {
aiMaterialProperty* prop = mat->mProperties[a];
if( !prop ) {
ASSIMP_LOG_DEBUG( "Property is null" );
ASSIMP_LOG_VERBOSE_DEBUG( "Property is null" );
continue;
}

View File

@ -289,7 +289,7 @@ evil_jump_outside:
if (!mesh->mNumFaces) {
//The whole mesh consists of degenerated faces
//signal upward, that this mesh should be deleted.
ASSIMP_LOG_DEBUG("FindDegeneratesProcess removed a mesh full of degenerated primitives");
ASSIMP_LOG_VERBOSE_DEBUG("FindDegeneratesProcess removed a mesh full of degenerated primitives");
return true;
}
}

View File

@ -124,7 +124,7 @@ void FindInvalidDataProcess::Execute(aiScene *pScene) {
if (2 == result) {
// remove this mesh
delete pScene->mMeshes[a];
AI_DEBUG_INVALIDATE_PTR(pScene->mMeshes[a]);
pScene->mMeshes[a] = NULL;
meshMapping[a] = UINT_MAX;
continue;

View File

@ -355,7 +355,7 @@ ai_real ImproveCacheLocalityProcess::ProcessMesh( aiMesh* pMesh, unsigned int me
// very intense verbose logging ... prepare for much text if there are many meshes
if ( DefaultLogger::get()->getLogSeverity() == Logger::VERBOSE) {
ASSIMP_LOG_DEBUG_F("Mesh %u | ACMR in: ", meshNum, " out: ", fACMR, " | ~", fACMR2, ((fACMR - fACMR2) / fACMR) * 100.f);
ASSIMP_LOG_VERBOSE_DEBUG_F("Mesh %u | ACMR in: ", meshNum, " out: ", fACMR, " | ~", fACMR2, ((fACMR - fACMR2) / fACMR) * 100.f);
}
fACMR2 *= pMesh->mNumFaces;

View File

@ -373,7 +373,7 @@ int JoinVerticesProcess::ProcessMesh( aiMesh* pMesh, unsigned int meshIndex)
}
if (!DefaultLogger::isNullLogger() && DefaultLogger::get()->getLogSeverity() == Logger::VERBOSE) {
ASSIMP_LOG_DEBUG_F(
ASSIMP_LOG_VERBOSE_DEBUG_F(
"Mesh ",meshIndex,
" (",
(pMesh->mName.length ? pMesh->mName.data : "unnamed"),

View File

@ -122,7 +122,7 @@ void RemoveRedundantMatsProcess::Execute( aiScene* pScene)
// Keep this material even if no mesh references it
abReferenced[i] = true;
ASSIMP_LOG_DEBUG_F( "Found positive match in exclusion list: \'", name.data, "\'");
ASSIMP_LOG_VERBOSE_DEBUG_F( "Found positive match in exclusion list: \'", name.data, "\'");
}
}
}

View File

@ -490,7 +490,7 @@ bool TriangulateProcess::TriangulateMesh( aiMesh* pMesh)
// drop dumb 0-area triangles - deactivated for now:
//FindDegenerates post processing step can do the same thing
//if (std::fabs(GetArea2D(temp_verts[i[0]],temp_verts[i[1]],temp_verts[i[2]])) < 1e-5f) {
// ASSIMP_LOG_DEBUG("Dropping triangle with area 0");
// ASSIMP_LOG_VERBOSE_DEBUG("Dropping triangle with area 0");
// --curOut;
// delete[] f->mIndices;

View File

@ -80,7 +80,7 @@ public:
/** @brief Creates a logging instance.
* @param name Name for log file. Only valid in combination
* with the aiDefaultLogStream_FILE flag.
* @param severity Log severity, VERBOSE turns on debug messages
* @param severity Log severity, DEBUG turns on debug messages and VERBOSE turns on all messages.
* @param defStreams Default log streams to be attached. Any bitwise
* combination of the aiDefaultLogStream enumerated values.
* If #aiDefaultLogStream_FILE is specified but an empty string is
@ -127,8 +127,8 @@ public:
unsigned int severity);
// ----------------------------------------------------------------------
/** @copydoc Logger::detatchStream */
bool detatchStream(LogStream *pStream,
/** @copydoc Logger::detachStream */
bool detachStream(LogStream *pStream,
unsigned int severity);
private:
@ -141,9 +141,12 @@ private:
/** @briefDestructor */
~DefaultLogger();
/** @brief Logs debug infos, only been written when severity level VERBOSE is set */
/** @brief Logs debug infos, only been written when severity level DEBUG or higher is set */
void OnDebug(const char* message);
/** @brief Logs debug infos, only been written when severity level VERBOSE is set */
void OnVerboseDebug(const char *message);
/** @brief Logs an info message */
void OnInfo(const char* message);

View File

@ -72,7 +72,7 @@ for(LineSplitter splitter(stream);splitter;++splitter) {
if (strtol(splitter[2]) > 5) { .. }
}
ASSIMP_LOG_DEBUG_F("Current line is: ", splitter.get_index());
ASSIMP_LOG_VERBOSE_DEBUG_F("Current line is: ", splitter.get_index());
}
@endcode
*/

View File

@ -94,6 +94,12 @@ public:
}
}
static void LogVerboseDebug(const Formatter::format& message) {
if (!DefaultLogger::isNullLogger()) {
ASSIMP_LOG_VERBOSE_DEBUG(Prefix()+(std::string)message);
}
}
// https://sourceforge.net/tracker/?func=detail&atid=1067632&aid=3358562&group_id=226462
#if !defined(__GNUC__) || !defined(__APPLE__) || __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6)
@ -125,6 +131,12 @@ public:
}
}
// ------------------------------------------------------------------------------------------------
static void LogVerboseDebug (const char* message) {
if (!DefaultLogger::isNullLogger()) {
LogVerboseDebug(Formatter::format(message));
}
}
#endif
private:

View File

@ -74,7 +74,8 @@ public:
*/
enum LogSeverity {
NORMAL, //!< Normal granularity of logging
VERBOSE //!< Debug infos will be logged, too
DEBUG, //!< Debug messages will be logged, but not verbose debug messages.
VERBOSE //!< All messages will be logged
};
// ----------------------------------------------------------------------
@ -103,6 +104,12 @@ public:
void debug(const char* message);
void debug(const std::string &message);
// ----------------------------------------------------------------------
/** @brief Writes a debug message
* @param message Debug message*/
void verboseDebug(const char *message);
void verboseDebug(const std::string &message);
// ----------------------------------------------------------------------
/** @brief Writes a info message
* @param message Info message*/
@ -154,7 +161,7 @@ public:
* if the result is 0 the stream is detached from the Logger and
* the caller retakes the possession of the stream.
* @return true if the stream has been detached, false otherwise.*/
virtual bool detatchStream(LogStream *pStream,
virtual bool detachStream(LogStream *pStream,
unsigned int severity = Debugging | Err | Warn | Info) = 0;
protected:
@ -178,6 +185,16 @@ protected:
*/
virtual void OnDebug(const char* message)= 0;
// ----------------------------------------------------------------------
/**
* @brief Called as a request to write a specific verbose debug message
* @param message Debug message. Never longer than
* MAX_LOG_MESSAGE_LENGTH characters (excluding the '0').
* @note The message string is only valid until the scope of
* the function is left.
*/
virtual void OnVerboseDebug(const char *message) = 0;
// ----------------------------------------------------------------------
/**
* @brief Called as a request to write a specific info message
@ -255,6 +272,11 @@ void Logger::debug(const std::string &message) {
return debug(message.c_str());
}
// ----------------------------------------------------------------------------------
inline void Logger::verboseDebug(const std::string &message) {
return verboseDebug(message.c_str());
}
// ----------------------------------------------------------------------------------
inline
void Logger::error(const std::string &message) {
@ -273,33 +295,37 @@ void Logger::info(const std::string &message) {
return info(message.c_str());
}
// ------------------------------------------------------------------------------------------------
#define ASSIMP_LOG_WARN_F(string,...)\
DefaultLogger::get()->warn((Formatter::format(string),__VA_ARGS__))
#define ASSIMP_LOG_ERROR_F(string,...)\
DefaultLogger::get()->error((Formatter::format(string),__VA_ARGS__))
#define ASSIMP_LOG_DEBUG_F(string,...)\
DefaultLogger::get()->debug((Formatter::format(string),__VA_ARGS__))
#define ASSIMP_LOG_INFO_F(string,...)\
DefaultLogger::get()->info((Formatter::format(string),__VA_ARGS__))
#define ASSIMP_LOG_WARN(string)\
DefaultLogger::get()->warn(string)
#define ASSIMP_LOG_ERROR(string)\
DefaultLogger::get()->error(string)
#define ASSIMP_LOG_DEBUG(string)\
DefaultLogger::get()->debug(string)
#define ASSIMP_LOG_INFO(string)\
DefaultLogger::get()->info(string)
} // Namespace Assimp
// ------------------------------------------------------------------------------------------------
#define ASSIMP_LOG_WARN_F(string, ...) \
Assimp::DefaultLogger::get()->warn((Assimp::Formatter::format(string), __VA_ARGS__))
#define ASSIMP_LOG_ERROR_F(string, ...) \
Assimp::DefaultLogger::get()->error((Assimp::Formatter::format(string), __VA_ARGS__))
#define ASSIMP_LOG_DEBUG_F(string, ...) \
Assimp::DefaultLogger::get()->debug((Assimp::Formatter::format(string), __VA_ARGS__))
#define ASSIMP_LOG_VERBOSE_DEBUG_F(string, ...) \
Assimp::DefaultLogger::get()->verboseDebug((Assimp::Formatter::format(string), __VA_ARGS__))
#define ASSIMP_LOG_INFO_F(string, ...) \
Assimp::DefaultLogger::get()->info((Assimp::Formatter::format(string), __VA_ARGS__))
#define ASSIMP_LOG_WARN(string) \
Assimp::DefaultLogger::get()->warn(string)
#define ASSIMP_LOG_ERROR(string) \
Assimp::DefaultLogger::get()->error(string)
#define ASSIMP_LOG_DEBUG(string) \
Assimp::DefaultLogger::get()->debug(string)
#define ASSIMP_LOG_VERBOSE_DEBUG(string) \
Assimp::DefaultLogger::get()->verboseDebug(string)
#define ASSIMP_LOG_INFO(string) \
Assimp::DefaultLogger::get()->info(string)
#endif // !! INCLUDED_AI_LOGGER_H

View File

@ -66,6 +66,11 @@ public:
(void)message; //this avoids compiler warnings
}
/** @brief Logs a verbose debug message */
void OnVerboseDebug(const char *message) {
(void)message; //this avoids compiler warnings
}
/** @brief Logs an info message */
void OnInfo(const char* message) {
(void)message; //this avoids compiler warnings
@ -88,7 +93,7 @@ public:
}
/** @brief Detach a still attached stream from logger */
bool detatchStream(LogStream *pStream, unsigned int severity) {
bool detachStream(LogStream *pStream, unsigned int severity) {
(void)pStream; (void)severity; //this avoids compiler warnings
return false;
}