Build formatting into DeadlyImportError
parent
974252bd8f
commit
0ffcdf160e
|
@ -71,6 +71,13 @@ static const aiImporterDesc desc = {
|
|||
"bvh"
|
||||
};
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Aborts the file reading with an exception
|
||||
template<typename... T>
|
||||
AI_WONT_RETURN void BVHLoader::ThrowException(T&&... args) {
|
||||
throw DeadlyImportError(mFileName, ":", mLine, " - ", args...);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Constructor to be privately used by Importer
|
||||
BVHLoader::BVHLoader() :
|
||||
|
@ -176,12 +183,12 @@ aiNode *BVHLoader::ReadNode() {
|
|||
// first token is name
|
||||
std::string nodeName = GetNextToken();
|
||||
if (nodeName.empty() || nodeName == "{")
|
||||
ThrowException(format() << "Expected node name, but found \"" << nodeName << "\".");
|
||||
ThrowException("Expected node name, but found \"", nodeName, "\".");
|
||||
|
||||
// then an opening brace should follow
|
||||
std::string openBrace = GetNextToken();
|
||||
if (openBrace != "{")
|
||||
ThrowException(format() << "Expected opening brace \"{\", but found \"" << openBrace << "\".");
|
||||
ThrowException("Expected opening brace \"{\", but found \"", openBrace, "\".");
|
||||
|
||||
// Create a node
|
||||
aiNode *node = new aiNode(nodeName);
|
||||
|
@ -211,7 +218,7 @@ aiNode *BVHLoader::ReadNode() {
|
|||
siteToken.clear();
|
||||
siteToken = GetNextToken();
|
||||
if (siteToken != "Site")
|
||||
ThrowException(format() << "Expected \"End Site\" keyword, but found \"" << token << " " << siteToken << "\".");
|
||||
ThrowException("Expected \"End Site\" keyword, but found \"", token, " ", siteToken, "\".");
|
||||
|
||||
aiNode *child = ReadEndSite(nodeName);
|
||||
child->mParent = node;
|
||||
|
@ -221,7 +228,7 @@ aiNode *BVHLoader::ReadNode() {
|
|||
break;
|
||||
} else {
|
||||
// everything else is a parse error
|
||||
ThrowException(format() << "Unknown keyword \"" << token << "\".");
|
||||
ThrowException("Unknown keyword \"", token, "\".");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -242,7 +249,7 @@ aiNode *BVHLoader::ReadEndSite(const std::string &pParentName) {
|
|||
// check opening brace
|
||||
std::string openBrace = GetNextToken();
|
||||
if (openBrace != "{")
|
||||
ThrowException(format() << "Expected opening brace \"{\", but found \"" << openBrace << "\".");
|
||||
ThrowException("Expected opening brace \"{\", but found \"", openBrace, "\".");
|
||||
|
||||
// Create a node
|
||||
aiNode *node = new aiNode("EndSite_" + pParentName);
|
||||
|
@ -261,7 +268,7 @@ aiNode *BVHLoader::ReadEndSite(const std::string &pParentName) {
|
|||
break;
|
||||
} else {
|
||||
// everything else is a parse error
|
||||
ThrowException(format() << "Unknown keyword \"" << token << "\".");
|
||||
ThrowException("Unknown keyword \"", token, "\".");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -307,7 +314,7 @@ void BVHLoader::ReadNodeChannels(BVHLoader::Node &pNode) {
|
|||
else if (channelToken == "Zrotation")
|
||||
pNode.mChannels.push_back(Channel_RotationZ);
|
||||
else
|
||||
ThrowException(format() << "Invalid channel specifier \"" << channelToken << "\".");
|
||||
ThrowException("Invalid channel specifier \"", channelToken, "\".");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -317,7 +324,7 @@ void BVHLoader::ReadMotion(aiScene * /*pScene*/) {
|
|||
// Read number of frames
|
||||
std::string tokenFrames = GetNextToken();
|
||||
if (tokenFrames != "Frames:")
|
||||
ThrowException(format() << "Expected frame count \"Frames:\", but found \"" << tokenFrames << "\".");
|
||||
ThrowException("Expected frame count \"Frames:\", but found \"", tokenFrames, "\".");
|
||||
|
||||
float numFramesFloat = GetNextTokenAsFloat();
|
||||
mAnimNumFrames = (unsigned int)numFramesFloat;
|
||||
|
@ -326,7 +333,7 @@ void BVHLoader::ReadMotion(aiScene * /*pScene*/) {
|
|||
std::string tokenDuration1 = GetNextToken();
|
||||
std::string tokenDuration2 = GetNextToken();
|
||||
if (tokenDuration1 != "Frame" || tokenDuration2 != "Time:")
|
||||
ThrowException(format() << "Expected frame duration \"Frame Time:\", but found \"" << tokenDuration1 << " " << tokenDuration2 << "\".");
|
||||
ThrowException("Expected frame duration \"Frame Time:\", but found \"", tokenDuration1, " ", tokenDuration2, "\".");
|
||||
|
||||
mAnimTickDuration = GetNextTokenAsFloat();
|
||||
|
||||
|
@ -393,17 +400,11 @@ float BVHLoader::GetNextTokenAsFloat() {
|
|||
ctoken = fast_atoreal_move<float>(ctoken, result);
|
||||
|
||||
if (ctoken != token.c_str() + token.length())
|
||||
ThrowException(format() << "Expected a floating point number, but found \"" << token << "\".");
|
||||
ThrowException("Expected a floating point number, but found \"", token, "\".");
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Aborts the file reading with an exception
|
||||
AI_WONT_RETURN void BVHLoader::ThrowException(const std::string &pError) {
|
||||
throw DeadlyImportError(format() << mFileName << ":" << mLine << " - " << pError);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Constructs an animation for the motion data and stores it in the given scene
|
||||
void BVHLoader::CreateAnimation(aiScene *pScene) {
|
||||
|
|
|
@ -134,7 +134,8 @@ protected:
|
|||
float GetNextTokenAsFloat();
|
||||
|
||||
/** Aborts the file reading with an exception */
|
||||
AI_WONT_RETURN void ThrowException(const std::string &pError) AI_WONT_RETURN_SUFFIX;
|
||||
template<typename... T>
|
||||
AI_WONT_RETURN void ThrowException(T&&... args) AI_WONT_RETURN_SUFFIX;
|
||||
|
||||
/** Constructs an animation for the motion data and stores it in the given scene */
|
||||
void CreateAnimation(aiScene *pScene);
|
||||
|
|
|
@ -149,7 +149,7 @@ bool isValidCustomDataType(const int cdtype) {
|
|||
|
||||
bool readCustomData(std::shared_ptr<ElemBase> &out, const int cdtype, const size_t cnt, const FileDatabase &db) {
|
||||
if (!isValidCustomDataType(cdtype)) {
|
||||
throw Error((Formatter::format(), "CustomData.type ", cdtype, " out of index"));
|
||||
throw Error("CustomData.type ", cdtype, " out of index");
|
||||
}
|
||||
|
||||
const CustomDataTypeDescription cdtd = customDataTypeDescriptions[cdtype];
|
||||
|
|
|
@ -130,9 +130,7 @@ void DNAParser::Parse() {
|
|||
|
||||
uint16_t n = stream.GetI2();
|
||||
if (n >= types.size()) {
|
||||
throw DeadlyImportError((format(),
|
||||
"BlenderDNA: Invalid type index in structure name", n,
|
||||
" (there are only ", types.size(), " entries)"));
|
||||
throw DeadlyImportError("BlenderDNA: Invalid type index in structure name", n, " (there are only ", types.size(), " entries)");
|
||||
}
|
||||
|
||||
// maintain separate indexes
|
||||
|
@ -151,9 +149,7 @@ void DNAParser::Parse() {
|
|||
|
||||
uint16_t j = stream.GetI2();
|
||||
if (j >= types.size()) {
|
||||
throw DeadlyImportError((format(),
|
||||
"BlenderDNA: Invalid type index in structure field ", j,
|
||||
" (there are only ", types.size(), " entries)"));
|
||||
throw DeadlyImportError("BlenderDNA: Invalid type index in structure field ", j, " (there are only ", types.size(), " entries)");
|
||||
}
|
||||
s.fields.push_back(Field());
|
||||
Field &f = s.fields.back();
|
||||
|
@ -164,9 +160,7 @@ void DNAParser::Parse() {
|
|||
|
||||
j = stream.GetI2();
|
||||
if (j >= names.size()) {
|
||||
throw DeadlyImportError((format(),
|
||||
"BlenderDNA: Invalid name index in structure field ", j,
|
||||
" (there are only ", names.size(), " entries)"));
|
||||
throw DeadlyImportError("BlenderDNA: Invalid name index in structure field ", j, " (there are only ", names.size(), " entries)");
|
||||
}
|
||||
|
||||
f.name = names[j];
|
||||
|
@ -188,9 +182,7 @@ void DNAParser::Parse() {
|
|||
if (*f.name.rbegin() == ']') {
|
||||
const std::string::size_type rb = f.name.find('[');
|
||||
if (rb == std::string::npos) {
|
||||
throw DeadlyImportError((format(),
|
||||
"BlenderDNA: Encountered invalid array declaration ",
|
||||
f.name));
|
||||
throw DeadlyImportError("BlenderDNA: Encountered invalid array declaration ", f.name);
|
||||
}
|
||||
|
||||
f.flags |= FieldFlag_Array;
|
||||
|
|
|
@ -83,9 +83,10 @@ class ObjectCache;
|
|||
* ancestry. */
|
||||
// -------------------------------------------------------------------------------
|
||||
struct Error : DeadlyImportError {
|
||||
Error(const std::string &s) :
|
||||
DeadlyImportError(s) {
|
||||
// empty
|
||||
template<typename... T>
|
||||
explicit Error(T&&... args)
|
||||
: DeadlyImportError(args...)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -57,9 +57,7 @@ const Field& Structure :: operator [] (const std::string& ss) const
|
|||
{
|
||||
std::map<std::string, size_t>::const_iterator it = indices.find(ss);
|
||||
if (it == indices.end()) {
|
||||
throw Error((Formatter::format(),
|
||||
"BlendDNA: Did not find a field named `",ss,"` in structure `",name,"`"
|
||||
));
|
||||
throw Error("BlendDNA: Did not find a field named `",ss,"` in structure `",name,"`");
|
||||
}
|
||||
|
||||
return fields[(*it).second];
|
||||
|
@ -76,9 +74,7 @@ const Field* Structure :: Get (const std::string& ss) const
|
|||
const Field& Structure :: operator [] (const size_t i) const
|
||||
{
|
||||
if (i >= fields.size()) {
|
||||
throw Error((Formatter::format(),
|
||||
"BlendDNA: There is no field with index `",i,"` in structure `",name,"`"
|
||||
));
|
||||
throw Error("BlendDNA: There is no field with index `",i,"` in structure `",name,"`");
|
||||
}
|
||||
|
||||
return fields[i];
|
||||
|
@ -109,9 +105,7 @@ void Structure :: ReadFieldArray(T (& out)[M], const char* name, const FileDatab
|
|||
|
||||
// is the input actually an array?
|
||||
if (!(f.flags & FieldFlag_Array)) {
|
||||
throw Error((Formatter::format(),"Field `",name,"` of structure `",
|
||||
this->name,"` ought to be an array of size ",M
|
||||
));
|
||||
throw Error("Field `",name,"` of structure `",this->name,"` ought to be an array of size ",M);
|
||||
}
|
||||
|
||||
db.reader->IncPtr(f.offset);
|
||||
|
@ -148,9 +142,9 @@ void Structure :: ReadFieldArray2(T (& out)[M][N], const char* name, const FileD
|
|||
|
||||
// is the input actually an array?
|
||||
if (!(f.flags & FieldFlag_Array)) {
|
||||
throw Error((Formatter::format(),"Field `",name,"` of structure `",
|
||||
throw Error("Field `",name,"` of structure `",
|
||||
this->name,"` ought to be an array of size ",M,"*",N
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
db.reader->IncPtr(f.offset);
|
||||
|
@ -195,8 +189,8 @@ bool Structure :: ReadFieldPtr(TOUT<T>& out, const char* name, const FileDatabas
|
|||
|
||||
// sanity check, should never happen if the genblenddna script is right
|
||||
if (!(f->flags & FieldFlag_Pointer)) {
|
||||
throw Error((Formatter::format(),"Field `",name,"` of structure `",
|
||||
this->name,"` ought to be a pointer"));
|
||||
throw Error("Field `",name,"` of structure `",
|
||||
this->name,"` ought to be a pointer");
|
||||
}
|
||||
|
||||
db.reader->IncPtr(f->offset);
|
||||
|
@ -241,8 +235,8 @@ bool Structure :: ReadFieldPtr(TOUT<T> (&out)[N], const char* name,
|
|||
#ifdef _DEBUG
|
||||
// sanity check, should never happen if the genblenddna script is right
|
||||
if ((FieldFlag_Pointer|FieldFlag_Pointer) != (f->flags & (FieldFlag_Pointer|FieldFlag_Pointer))) {
|
||||
throw Error((Formatter::format(),"Field `",name,"` of structure `",
|
||||
this->name,"` ought to be a pointer AND an array"));
|
||||
throw Error("Field `",name,"` of structure `",
|
||||
this->name,"` ought to be a pointer AND an array");
|
||||
}
|
||||
#endif // _DEBUG
|
||||
|
||||
|
@ -322,8 +316,8 @@ bool Structure::ReadCustomDataPtr(std::shared_ptr<ElemBase>&out, int cdtype, con
|
|||
|
||||
// sanity check, should never happen if the genblenddna script is right
|
||||
if (!(f->flags & FieldFlag_Pointer)) {
|
||||
throw Error((Formatter::format(), "Field `", name, "` of structure `",
|
||||
this->name, "` ought to be a pointer"));
|
||||
throw Error("Field `", name, "` of structure `",
|
||||
this->name, "` ought to be a pointer");
|
||||
}
|
||||
|
||||
db.reader->IncPtr(f->offset);
|
||||
|
@ -369,8 +363,8 @@ bool Structure::ReadFieldPtrVector(vector<TOUT<T>>&out, const char* name, const
|
|||
|
||||
// sanity check, should never happen if the genblenddna script is right
|
||||
if (!(f->flags & FieldFlag_Pointer)) {
|
||||
throw Error((Formatter::format(), "Field `", name, "` of structure `",
|
||||
this->name, "` ought to be a pointer"));
|
||||
throw Error("Field `", name, "` of structure `",
|
||||
this->name, "` ought to be a pointer");
|
||||
}
|
||||
|
||||
db.reader->IncPtr(f->offset);
|
||||
|
@ -428,9 +422,9 @@ bool Structure :: ResolvePointer(TOUT<T>& out, const Pointer & ptrval, const Fil
|
|||
// and check if it matches the type which we expect.
|
||||
const Structure& ss = db.dna[block->dna_index];
|
||||
if (ss != s) {
|
||||
throw Error((Formatter::format(),"Expected target to be of type `",s.name,
|
||||
throw Error("Expected target to be of type `",s.name,
|
||||
"` but seemingly it is a `",ss.name,"` instead"
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
// try to retrieve the object from the cache
|
||||
|
@ -614,16 +608,14 @@ const FileBlockHead* Structure :: LocateFileBlockForAddress(const Pointer & ptrv
|
|||
if (it == db.entries.end()) {
|
||||
// this is crucial, pointers may not be invalid.
|
||||
// this is either a corrupted file or an attempted attack.
|
||||
throw DeadlyImportError((Formatter::format(),"Failure resolving pointer 0x",
|
||||
std::hex,ptrval.val,", no file block falls into this address range"
|
||||
));
|
||||
throw DeadlyImportError("Failure resolving pointer 0x",
|
||||
std::hex,ptrval.val,", no file block falls into this address range");
|
||||
}
|
||||
if (ptrval.val >= (*it).address.val + (*it).size) {
|
||||
throw DeadlyImportError((Formatter::format(),"Failure resolving pointer 0x",
|
||||
throw DeadlyImportError("Failure resolving pointer 0x",
|
||||
std::hex,ptrval.val,", nearest file block starting at 0x",
|
||||
(*it).address.val," ends at 0x",
|
||||
(*it).address.val + (*it).size
|
||||
));
|
||||
(*it).address.val + (*it).size);
|
||||
}
|
||||
return &*it;
|
||||
}
|
||||
|
@ -676,7 +668,7 @@ template <typename T> inline void ConvertDispatcher(T& out, const Structure& in,
|
|||
out = static_cast<T>(db.reader->GetF8());
|
||||
}
|
||||
else {
|
||||
throw DeadlyImportError("Unknown source for conversion to primitive data type: "+in.name);
|
||||
throw DeadlyImportError("Unknown source for conversion to primitive data type: ", in.name);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -784,9 +776,7 @@ const Structure& DNA :: operator [] (const std::string& ss) const
|
|||
{
|
||||
std::map<std::string, size_t>::const_iterator it = indices.find(ss);
|
||||
if (it == indices.end()) {
|
||||
throw Error((Formatter::format(),
|
||||
"BlendDNA: Did not find a structure named `",ss,"`"
|
||||
));
|
||||
throw Error("BlendDNA: Did not find a structure named `",ss,"`");
|
||||
}
|
||||
|
||||
return structures[(*it).second];
|
||||
|
@ -803,9 +793,7 @@ const Structure* DNA :: Get (const std::string& ss) const
|
|||
const Structure& DNA :: operator [] (const size_t i) const
|
||||
{
|
||||
if (i >= structures.size()) {
|
||||
throw Error((Formatter::format(),
|
||||
"BlendDNA: There is no structure with index `",i,"`"
|
||||
));
|
||||
throw Error("BlendDNA: There is no structure with index `",i,"`");
|
||||
}
|
||||
|
||||
return structures[i];
|
||||
|
|
|
@ -748,9 +748,8 @@ void BlenderImporter::BuildMaterials(ConversionData &conv_data) {
|
|||
void BlenderImporter::CheckActualType(const ElemBase *dt, const char *check) {
|
||||
ai_assert(dt);
|
||||
if (strcmp(dt->dna_type, check)) {
|
||||
ThrowException((format(),
|
||||
"Expected object at ", std::hex, dt, " to be of type `", check,
|
||||
"`, but it claims to be a `", dt->dna_type, "`instead"));
|
||||
ThrowException("Expected object at ", std::hex, dt, " to be of type `", check,
|
||||
"`, but it claims to be a `", dt->dna_type, "`instead");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1251,7 +1251,7 @@ void ColladaLoader::CreateAnimation(aiScene *pScene, const ColladaParser &pParse
|
|||
|
||||
// time count and value count must match
|
||||
if (e.mTimeAccessor->mCount != e.mValueAccessor->mCount)
|
||||
throw DeadlyImportError(format() << "Time count / value count mismatch in animation channel \"" << e.mChannel->mTarget << "\".");
|
||||
throw DeadlyImportError("Time count / value count mismatch in animation channel \"", e.mChannel->mTarget, "\".");
|
||||
|
||||
if (e.mTimeAccessor->mCount > 0) {
|
||||
// find bounding times
|
||||
|
|
|
@ -66,6 +66,13 @@ using namespace Assimp;
|
|||
using namespace Assimp::Collada;
|
||||
using namespace Assimp::Formatter;
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Aborts the file reading with an exception
|
||||
template<typename... T>
|
||||
AI_WONT_RETURN void ColladaParser::ThrowException(T&&... args) const {
|
||||
throw DeadlyImportError("Collada: ", mFileName, " - ", args...);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Constructor to be privately used by Importer
|
||||
ColladaParser::ColladaParser(IOSystem *pIOHandler, const std::string &pFile) :
|
||||
|
@ -853,7 +860,7 @@ void ColladaParser::ReadControllerJoints(Collada::Controller &pController) {
|
|||
|
||||
// local URLS always start with a '#'. We don't support global URLs
|
||||
if (attrSource[0] != '#')
|
||||
ThrowException(format() << "Unsupported URL format in \"" << attrSource << "\" in source attribute of <joints> data <input> element");
|
||||
ThrowException("Unsupported URL format in \"", attrSource, "\" in source attribute of <joints> data <input> element");
|
||||
attrSource++;
|
||||
|
||||
// parse source URL to corresponding source
|
||||
|
@ -862,7 +869,7 @@ void ColladaParser::ReadControllerJoints(Collada::Controller &pController) {
|
|||
else if (strcmp(attrSemantic, "INV_BIND_MATRIX") == 0)
|
||||
pController.mJointOffsetMatrixSource = attrSource;
|
||||
else
|
||||
ThrowException(format() << "Unknown semantic \"" << attrSemantic << "\" in <joints> data <input> element");
|
||||
ThrowException("Unknown semantic \"", attrSemantic, "\" in <joints> data <input> element");
|
||||
|
||||
// skip inner data, if present
|
||||
if (!mReader->isEmptyElement())
|
||||
|
@ -904,7 +911,7 @@ void ColladaParser::ReadControllerWeights(Collada::Controller &pController) {
|
|||
|
||||
// local URLS always start with a '#'. We don't support global URLs
|
||||
if (attrSource[0] != '#')
|
||||
ThrowException(format() << "Unsupported URL format in \"" << attrSource << "\" in source attribute of <vertex_weights> data <input> element");
|
||||
ThrowException("Unsupported URL format in \"", attrSource, "\" in source attribute of <vertex_weights> data <input> element");
|
||||
channel.mAccessor = attrSource + 1;
|
||||
|
||||
// parse source URL to corresponding source
|
||||
|
@ -913,7 +920,7 @@ void ColladaParser::ReadControllerWeights(Collada::Controller &pController) {
|
|||
else if (strcmp(attrSemantic, "WEIGHT") == 0)
|
||||
pController.mWeightInputWeights = channel;
|
||||
else
|
||||
ThrowException(format() << "Unknown semantic \"" << attrSemantic << "\" in <vertex_weights> data <input> element");
|
||||
ThrowException("Unknown semantic \"", attrSemantic, "\" in <vertex_weights> data <input> element");
|
||||
|
||||
// skip inner data, if present
|
||||
if (!mReader->isEmptyElement())
|
||||
|
@ -1901,7 +1908,7 @@ void ColladaParser::ReadAccessor(const std::string &pID) {
|
|||
int attrSource = GetAttribute("source");
|
||||
const char *source = mReader->getAttributeValue(attrSource);
|
||||
if (source[0] != '#')
|
||||
ThrowException(format() << "Unknown reference format in url \"" << source << "\" in source attribute of <accessor> element.");
|
||||
ThrowException("Unknown reference format in url \"", source, "\" in source attribute of <accessor> element.");
|
||||
int attrCount = GetAttribute("count");
|
||||
unsigned int count = (unsigned int)mReader->getAttributeValueAsInt(attrCount);
|
||||
int attrOffset = TestAttribute("offset");
|
||||
|
@ -1968,7 +1975,7 @@ void ColladaParser::ReadAccessor(const std::string &pID) {
|
|||
else if (name == "V")
|
||||
acc.mSubOffset[1] = acc.mParams.size();
|
||||
//else
|
||||
// DefaultLogger::get()->warn( format() << "Unknown accessor parameter \"" << name << "\". Ignoring data channel." );
|
||||
// DefaultLogger::get()->warn( "Unknown accessor parameter \"", name, "\". Ignoring data channel." );
|
||||
}
|
||||
|
||||
// read data type
|
||||
|
@ -1989,7 +1996,7 @@ void ColladaParser::ReadAccessor(const std::string &pID) {
|
|||
// skip remaining stuff of this element, if any
|
||||
SkipElement();
|
||||
} else {
|
||||
ThrowException(format() << "Unexpected sub element <" << mReader->getNodeName() << "> in tag <accessor>");
|
||||
ThrowException("Unexpected sub element <", mReader->getNodeName(), "> in tag <accessor>");
|
||||
}
|
||||
} else if (mReader->getNodeType() == irr::io::EXN_ELEMENT_END) {
|
||||
if (strcmp(mReader->getNodeName(), "accessor") != 0)
|
||||
|
@ -2012,7 +2019,7 @@ void ColladaParser::ReadVertexData(Mesh &pMesh) {
|
|||
if (IsElement("input")) {
|
||||
ReadInputChannel(pMesh.mPerVertexData);
|
||||
} else {
|
||||
ThrowException(format() << "Unexpected sub element <" << mReader->getNodeName() << "> in tag <vertices>");
|
||||
ThrowException("Unexpected sub element <", mReader->getNodeName(), "> in tag <vertices>");
|
||||
}
|
||||
} else if (mReader->getNodeType() == irr::io::EXN_ELEMENT_END) {
|
||||
if (strcmp(mReader->getNodeName(), "vertices") != 0)
|
||||
|
@ -2096,11 +2103,11 @@ void ColladaParser::ReadIndexData(Mesh &pMesh) {
|
|||
} else if (IsElement("ph")) {
|
||||
SkipElement("ph");
|
||||
} else {
|
||||
ThrowException(format() << "Unexpected sub element <" << mReader->getNodeName() << "> in tag <" << elementName << ">");
|
||||
ThrowException("Unexpected sub element <", mReader->getNodeName(), "> in tag <", elementName, ">");
|
||||
}
|
||||
} else if (mReader->getNodeType() == irr::io::EXN_ELEMENT_END) {
|
||||
if (mReader->getNodeName() != elementName)
|
||||
ThrowException(format() << "Expected end of <" << elementName << "> element.");
|
||||
ThrowException("Expected end of <", elementName, "> element.");
|
||||
|
||||
break;
|
||||
}
|
||||
|
@ -2132,7 +2139,7 @@ void ColladaParser::ReadInputChannel(std::vector<InputChannel> &poChannels) {
|
|||
int attrSource = GetAttribute("source");
|
||||
const char *source = mReader->getAttributeValue(attrSource);
|
||||
if (source[0] != '#')
|
||||
ThrowException(format() << "Unknown reference format in url \"" << source << "\" in source attribute of <input> element.");
|
||||
ThrowException("Unknown reference format in url \"", source, "\" in source attribute of <input> element.");
|
||||
channel.mAccessor = source + 1; // skipping the leading #, hopefully the remaining text is the accessor ID only
|
||||
|
||||
// read index offset, if per-index <input>
|
||||
|
@ -2146,7 +2153,7 @@ void ColladaParser::ReadInputChannel(std::vector<InputChannel> &poChannels) {
|
|||
if (attrSet > -1) {
|
||||
attrSet = mReader->getAttributeValueAsInt(attrSet);
|
||||
if (attrSet < 0)
|
||||
ThrowException(format() << "Invalid index \"" << (attrSet) << "\" in set attribute of <input> element");
|
||||
ThrowException("Invalid index \"", (attrSet), "\" in set attribute of <input> element");
|
||||
|
||||
channel.mIndex = attrSet;
|
||||
}
|
||||
|
@ -2369,7 +2376,7 @@ void ColladaParser::ExtractDataObjectFromChannel(const InputChannel &pInput, siz
|
|||
|
||||
const Accessor &acc = *pInput.mResolved;
|
||||
if (pLocalIndex >= acc.mCount)
|
||||
ThrowException(format() << "Invalid data index (" << pLocalIndex << "/" << acc.mCount << ") in primitive specification");
|
||||
ThrowException("Invalid data index (", pLocalIndex, "/", acc.mCount, ") in primitive specification");
|
||||
|
||||
// get a pointer to the start of the data object referred to by the accessor and the local index
|
||||
const ai_real *dataObject = &(acc.mData->mValues[0]) + acc.mOffset + pLocalIndex * acc.mStride;
|
||||
|
@ -2781,12 +2788,6 @@ void ColladaParser::ReadScene() {
|
|||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Aborts the file reading with an exception
|
||||
AI_WONT_RETURN void ColladaParser::ThrowException(const std::string &pError) const {
|
||||
throw DeadlyImportError(format() << "Collada: " << mFileName << " - " << pError);
|
||||
}
|
||||
|
||||
void ColladaParser::ReportWarning(const char *msg, ...) {
|
||||
ai_assert(nullptr != msg);
|
||||
|
||||
|
@ -2833,17 +2834,17 @@ void ColladaParser::SkipElement(const char *pElement) {
|
|||
void ColladaParser::TestOpening(const char *pName) {
|
||||
// read element start
|
||||
if (!mReader->read()) {
|
||||
ThrowException(format() << "Unexpected end of file while beginning of <" << pName << "> element.");
|
||||
ThrowException("Unexpected end of file while beginning of <", pName, "> element.");
|
||||
}
|
||||
// whitespace in front is ok, just read again if found
|
||||
if (mReader->getNodeType() == irr::io::EXN_TEXT) {
|
||||
if (!mReader->read()) {
|
||||
ThrowException(format() << "Unexpected end of file while reading beginning of <" << pName << "> element.");
|
||||
ThrowException("Unexpected end of file while reading beginning of <", pName, "> element.");
|
||||
}
|
||||
}
|
||||
|
||||
if (mReader->getNodeType() != irr::io::EXN_ELEMENT || strcmp(mReader->getNodeName(), pName) != 0) {
|
||||
ThrowException(format() << "Expected start of <" << pName << "> element.");
|
||||
ThrowException("Expected start of <", pName, "> element.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2862,18 +2863,18 @@ void ColladaParser::TestClosing(const char *pName) {
|
|||
|
||||
// if not, read some more
|
||||
if (!mReader->read()) {
|
||||
ThrowException(format() << "Unexpected end of file while reading end of <" << pName << "> element.");
|
||||
ThrowException("Unexpected end of file while reading end of <", pName, "> element.");
|
||||
}
|
||||
// whitespace in front is ok, just read again if found
|
||||
if (mReader->getNodeType() == irr::io::EXN_TEXT) {
|
||||
if (!mReader->read()) {
|
||||
ThrowException(format() << "Unexpected end of file while reading end of <" << pName << "> element.");
|
||||
ThrowException("Unexpected end of file while reading end of <", pName, "> element.");
|
||||
}
|
||||
}
|
||||
|
||||
// but this has the be the closing tag, or we're lost
|
||||
if (mReader->getNodeType() != irr::io::EXN_ELEMENT_END || strcmp(mReader->getNodeName(), pName) != 0) {
|
||||
ThrowException(format() << "Expected end of <" << pName << "> element.");
|
||||
ThrowException("Expected end of <", pName, "> element.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2882,7 +2883,7 @@ void ColladaParser::TestClosing(const char *pName) {
|
|||
int ColladaParser::GetAttribute(const char *pAttr) const {
|
||||
int index = TestAttribute(pAttr);
|
||||
if (index == -1) {
|
||||
ThrowException(format() << "Expected attribute \"" << pAttr << "\" for element <" << mReader->getNodeName() << ">.");
|
||||
ThrowException("Expected attribute \"", pAttr, "\" for element <", mReader->getNodeName(), ">.");
|
||||
}
|
||||
|
||||
// attribute not found -> throw an exception
|
||||
|
|
|
@ -242,7 +242,9 @@ protected:
|
|||
|
||||
protected:
|
||||
/** Aborts the file reading with an exception */
|
||||
AI_WONT_RETURN void ThrowException(const std::string &pError) const AI_WONT_RETURN_SUFFIX;
|
||||
template<typename... T>
|
||||
AI_WONT_RETURN void ThrowException(T&&... args) const AI_WONT_RETURN_SUFFIX;
|
||||
|
||||
void ReportWarning(const char *msg, ...);
|
||||
|
||||
/** Skips all data until the end node of the current element */
|
||||
|
@ -383,7 +385,7 @@ template <typename Type>
|
|||
const Type &ColladaParser::ResolveLibraryReference(const std::map<std::string, Type> &pLibrary, const std::string &pURL) const {
|
||||
typename std::map<std::string, Type>::const_iterator it = pLibrary.find(pURL);
|
||||
if (it == pLibrary.end())
|
||||
ThrowException(Formatter::format() << "Unable to resolve library reference \"" << pURL << "\".");
|
||||
ThrowException("Unable to resolve library reference \"", pURL, "\".");
|
||||
return it->second;
|
||||
}
|
||||
|
||||
|
|
|
@ -187,8 +187,8 @@ Mesh *OgreBinarySerializer::ImportMesh(MemoryStreamReader *stream) {
|
|||
/// @todo Check what we can actually support.
|
||||
std::string version = serializer.ReadLine();
|
||||
if (version != MESH_VERSION_1_8) {
|
||||
throw DeadlyExportError(Formatter::format() << "Mesh version " << version << " not supported by this importer. Run OgreMeshUpgrader tool on the file and try again."
|
||||
<< " Supported versions: " << MESH_VERSION_1_8);
|
||||
throw DeadlyExportError("Mesh version ", version, " not supported by this importer. Run OgreMeshUpgrader tool on the file and try again.",
|
||||
" Supported versions: ", MESH_VERSION_1_8);
|
||||
}
|
||||
|
||||
Mesh *mesh = new Mesh();
|
||||
|
@ -471,7 +471,7 @@ void OgreBinarySerializer::ReadSubMeshNames(Mesh *mesh) {
|
|||
uint16_t submeshIndex = Read<uint16_t>();
|
||||
SubMesh *submesh = mesh->GetSubMesh(submeshIndex);
|
||||
if (!submesh) {
|
||||
throw DeadlyImportError(Formatter::format() << "Ogre Mesh does not include submesh " << submeshIndex << " referenced in M_SUBMESH_NAME_TABLE_ELEMENT. Invalid mesh file.");
|
||||
throw DeadlyImportError("Ogre Mesh does not include submesh ", submeshIndex, " referenced in M_SUBMESH_NAME_TABLE_ELEMENT. Invalid mesh file.");
|
||||
}
|
||||
|
||||
submesh->name = ReadLine();
|
||||
|
@ -803,8 +803,8 @@ void OgreBinarySerializer::ReadSkeleton(Skeleton *skeleton) {
|
|||
// This deserialization supports both versions of the skeleton spec
|
||||
std::string version = ReadLine();
|
||||
if (version != SKELETON_VERSION_1_8 && version != SKELETON_VERSION_1_1) {
|
||||
throw DeadlyExportError(Formatter::format() << "Skeleton version " << version << " not supported by this importer."
|
||||
<< " Supported versions: " << SKELETON_VERSION_1_8 << " and " << SKELETON_VERSION_1_1);
|
||||
throw DeadlyExportError("Skeleton version ", version, " not supported by this importer.",
|
||||
" Supported versions: ", SKELETON_VERSION_1_8, " and ", SKELETON_VERSION_1_1);
|
||||
}
|
||||
|
||||
ASSIMP_LOG_VERBOSE_DEBUG("Reading Skeleton");
|
||||
|
@ -871,7 +871,7 @@ void OgreBinarySerializer::ReadBone(Skeleton *skeleton) {
|
|||
|
||||
// Bone indexes need to start from 0 and be contiguous
|
||||
if (bone->id != skeleton->bones.size()) {
|
||||
throw DeadlyImportError(Formatter::format() << "Ogre Skeleton bone indexes not contiguous. Error at bone index " << bone->id);
|
||||
throw DeadlyImportError("Ogre Skeleton bone indexes not contiguous. Error at bone index ", bone->id);
|
||||
}
|
||||
|
||||
ASSIMP_LOG_VERBOSE_DEBUG_F(" ", bone->id, " ", bone->name);
|
||||
|
@ -889,7 +889,7 @@ void OgreBinarySerializer::ReadBoneParent(Skeleton *skeleton) {
|
|||
if (child && parent)
|
||||
parent->AddChild(child);
|
||||
else
|
||||
throw DeadlyImportError(Formatter::format() << "Failed to find bones for parenting: Child id " << childId << " for parent id " << parentId);
|
||||
throw DeadlyImportError("Failed to find bones for parenting: Child id ", childId, " for parent id ", parentId);
|
||||
}
|
||||
|
||||
void OgreBinarySerializer::ReadSkeletonAnimation(Skeleton *skeleton) {
|
||||
|
@ -926,7 +926,7 @@ void OgreBinarySerializer::ReadSkeletonAnimationTrack(Skeleton * /*skeleton*/, A
|
|||
uint16_t boneId = Read<uint16_t>();
|
||||
Bone *bone = dest->parentSkeleton->BoneById(boneId);
|
||||
if (!bone) {
|
||||
throw DeadlyImportError(Formatter::format() << "Cannot read animation track, target bone " << boneId << " not in target Skeleton");
|
||||
throw DeadlyImportError("Cannot read animation track, target bone ", boneId, " not in target Skeleton");
|
||||
}
|
||||
|
||||
VertexAnimationTrack track;
|
||||
|
|
|
@ -476,7 +476,7 @@ void SubMesh::Reset(){
|
|||
|
||||
aiMesh *SubMesh::ConvertToAssimpMesh(Mesh *parent) {
|
||||
if (operationType != OT_TRIANGLE_LIST) {
|
||||
throw DeadlyImportError(Formatter::format() << "Only mesh operation type OT_TRIANGLE_LIST is supported. Found " << operationType);
|
||||
throw DeadlyImportError("Only mesh operation type OT_TRIANGLE_LIST is supported. Found ", operationType);
|
||||
}
|
||||
|
||||
aiMesh *dest = new aiMesh();
|
||||
|
@ -944,7 +944,7 @@ void Bone::AddChild(Bone *bone) {
|
|||
if (!bone)
|
||||
return;
|
||||
if (bone->IsParented())
|
||||
throw DeadlyImportError("Attaching child Bone that is already parented: " + bone->name);
|
||||
throw DeadlyImportError("Attaching child Bone that is already parented: ", bone->name);
|
||||
|
||||
bone->parent = this;
|
||||
bone->parentId = id;
|
||||
|
@ -963,7 +963,7 @@ void Bone::CalculateWorldMatrixAndDefaultPose(Skeleton *skeleton) {
|
|||
for (auto boneId : children) {
|
||||
Bone *child = skeleton->BoneById(boneId);
|
||||
if (!child) {
|
||||
throw DeadlyImportError(Formatter::format() << "CalculateWorldMatrixAndDefaultPose: Failed to find child bone " << boneId << " for parent " << id << " " << name);
|
||||
throw DeadlyImportError("CalculateWorldMatrixAndDefaultPose: Failed to find child bone ", boneId, " for parent ", id, " ", name);
|
||||
}
|
||||
child->CalculateWorldMatrixAndDefaultPose(skeleton);
|
||||
}
|
||||
|
@ -983,7 +983,7 @@ aiNode *Bone::ConvertToAssimpNode(Skeleton *skeleton, aiNode *parentNode) {
|
|||
for (size_t i = 0, len = children.size(); i < len; ++i) {
|
||||
Bone *child = skeleton->BoneById(children[i]);
|
||||
if (!child) {
|
||||
throw DeadlyImportError(Formatter::format() << "ConvertToAssimpNode: Failed to find child bone " << children[i] << " for parent " << id << " " << name);
|
||||
throw DeadlyImportError("ConvertToAssimpNode: Failed to find child bone ", children[i], " for parent ", id, " ", name);
|
||||
}
|
||||
node->mChildren[i] = child->ConvertToAssimpNode(skeleton, node);
|
||||
}
|
||||
|
@ -1022,7 +1022,7 @@ aiNodeAnim *VertexAnimationTrack::ConvertToAssimpAnimationNode(Skeleton *skeleto
|
|||
|
||||
Bone *bone = skeleton->BoneByName(boneName);
|
||||
if (!bone) {
|
||||
throw DeadlyImportError("VertexAnimationTrack::ConvertToAssimpAnimationNode: Failed to find bone " + boneName + " from parent Skeleton");
|
||||
throw DeadlyImportError("VertexAnimationTrack::ConvertToAssimpAnimationNode: Failed to find bone ", boneName, " from parent Skeleton");
|
||||
}
|
||||
|
||||
// Keyframes
|
||||
|
|
|
@ -59,9 +59,9 @@ namespace Ogre {
|
|||
AI_WONT_RETURN void ThrowAttibuteError(const XmlReader *reader, const std::string &name, const std::string &error = "") AI_WONT_RETURN_SUFFIX;
|
||||
AI_WONT_RETURN void ThrowAttibuteError(const XmlReader *reader, const std::string &name, const std::string &error) {
|
||||
if (!error.empty()) {
|
||||
throw DeadlyImportError(error + " in node '" + std::string(reader->getNodeName()) + "' and attribute '" + name + "'");
|
||||
throw DeadlyImportError(error, " in node '", std::string(reader->getNodeName()), "' and attribute '", name, "'");
|
||||
} else {
|
||||
throw DeadlyImportError("Attribute '" + name + "' does not exist in node '" + std::string(reader->getNodeName()) + "'");
|
||||
throw DeadlyImportError("Attribute '", name, "' does not exist in node '", std::string(reader->getNodeName()), "'");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -265,7 +265,7 @@ MeshXml *OgreXmlSerializer::ImportMesh(XmlReader *reader) {
|
|||
|
||||
void OgreXmlSerializer::ReadMesh(MeshXml *mesh) {
|
||||
if (NextNode() != nnMesh) {
|
||||
throw DeadlyImportError("Root node is <" + m_currentNodeName + "> expecting <mesh>");
|
||||
throw DeadlyImportError("Root node is <", m_currentNodeName, "> expecting <mesh>");
|
||||
}
|
||||
|
||||
ASSIMP_LOG_VERBOSE_DEBUG("Reading Mesh");
|
||||
|
@ -430,18 +430,18 @@ void OgreXmlSerializer::ReadGeometryVertexBuffer(VertexDataXml *dest) {
|
|||
|
||||
// Sanity checks
|
||||
if (dest->positions.size() != dest->count) {
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << dest->positions.size() << " positions when should have read " << dest->count);
|
||||
throw DeadlyImportError("Read only ", dest->positions.size(), " positions when should have read ", dest->count);
|
||||
}
|
||||
if (normals && dest->normals.size() != dest->count) {
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << dest->normals.size() << " normals when should have read " << dest->count);
|
||||
throw DeadlyImportError("Read only ", dest->normals.size(), " normals when should have read ", dest->count);
|
||||
}
|
||||
if (tangents && dest->tangents.size() != dest->count) {
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << dest->tangents.size() << " tangents when should have read " << dest->count);
|
||||
throw DeadlyImportError("Read only ", dest->tangents.size(), " tangents when should have read ", dest->count);
|
||||
}
|
||||
for (unsigned int i = 0; i < dest->uvs.size(); ++i) {
|
||||
if (dest->uvs[i].size() != dest->count) {
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << dest->uvs[i].size()
|
||||
<< " uvs for uv index " << i << " when should have read " << dest->count);
|
||||
throw DeadlyImportError("Read only ", dest->uvs[i].size(),
|
||||
" uvs for uv index ", i, " when should have read ", dest->count);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -507,7 +507,7 @@ void OgreXmlSerializer::ReadSubMesh(MeshXml *mesh) {
|
|||
if (submesh->indexData->faces.size() == submesh->indexData->faceCount) {
|
||||
ASSIMP_LOG_VERBOSE_DEBUG_F(" - Faces ", submesh->indexData->faceCount);
|
||||
} else {
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << submesh->indexData->faces.size() << " faces when should have read " << submesh->indexData->faceCount);
|
||||
throw DeadlyImportError("Read only ", submesh->indexData->faces.size(), " faces when should have read ", submesh->indexData->faceCount);
|
||||
}
|
||||
} else if (m_currentNodeName == nnGeometry) {
|
||||
if (submesh->usesSharedVertexData) {
|
||||
|
@ -632,20 +632,20 @@ XmlReaderPtr OgreXmlSerializer::OpenReader(Assimp::IOSystem *pIOHandler, const s
|
|||
|
||||
std::unique_ptr<IOStream> file(pIOHandler->Open(filename));
|
||||
if (!file.get()) {
|
||||
throw DeadlyImportError("Failed to open skeleton file " + filename);
|
||||
throw DeadlyImportError("Failed to open skeleton file ", filename);
|
||||
}
|
||||
|
||||
std::unique_ptr<CIrrXML_IOStreamReader> stream(new CIrrXML_IOStreamReader(file.get()));
|
||||
XmlReaderPtr reader = XmlReaderPtr(irr::io::createIrrXMLReader(stream.get()));
|
||||
if (!reader.get()) {
|
||||
throw DeadlyImportError("Failed to create XML reader for skeleton file " + filename);
|
||||
throw DeadlyImportError("Failed to create XML reader for skeleton file ", filename);
|
||||
}
|
||||
return reader;
|
||||
}
|
||||
|
||||
void OgreXmlSerializer::ReadSkeleton(Skeleton *skeleton) {
|
||||
if (NextNode() != nnSkeleton) {
|
||||
throw DeadlyImportError("Root node is <" + m_currentNodeName + "> expecting <skeleton>");
|
||||
throw DeadlyImportError("Root node is <", m_currentNodeName, "> expecting <skeleton>");
|
||||
}
|
||||
|
||||
ASSIMP_LOG_VERBOSE_DEBUG("Reading Skeleton");
|
||||
|
@ -687,7 +687,7 @@ void OgreXmlSerializer::ReadAnimations(Skeleton *skeleton) {
|
|||
anim->length = ReadAttribute<float>("length");
|
||||
|
||||
if (NextNode() != nnTracks) {
|
||||
throw DeadlyImportError(Formatter::format() << "No <tracks> found in <animation> " << anim->name);
|
||||
throw DeadlyImportError("No <tracks> found in <animation> ", anim->name);
|
||||
}
|
||||
|
||||
ReadAnimationTracks(anim);
|
||||
|
@ -705,7 +705,7 @@ void OgreXmlSerializer::ReadAnimationTracks(Animation *dest) {
|
|||
track.boneName = ReadAttribute<std::string>("bone");
|
||||
|
||||
if (NextNode() != nnKeyFrames) {
|
||||
throw DeadlyImportError(Formatter::format() << "No <keyframes> found in <track> " << dest->name);
|
||||
throw DeadlyImportError("No <keyframes> found in <track> ", dest->name);
|
||||
}
|
||||
|
||||
ReadAnimationKeyFrames(dest, &track);
|
||||
|
@ -732,7 +732,7 @@ void OgreXmlSerializer::ReadAnimationKeyFrames(Animation *anim, VertexAnimationT
|
|||
float angle = ReadAttribute<float>("angle");
|
||||
|
||||
if (NextNode() != nnAxis) {
|
||||
throw DeadlyImportError("No axis specified for keyframe rotation in animation " + anim->name);
|
||||
throw DeadlyImportError("No axis specified for keyframe rotation in animation ", anim->name);
|
||||
}
|
||||
|
||||
aiVector3D axis;
|
||||
|
@ -774,7 +774,7 @@ void OgreXmlSerializer::ReadBoneHierarchy(Skeleton *skeleton) {
|
|||
if (bone && parent)
|
||||
parent->AddChild(bone);
|
||||
else
|
||||
throw DeadlyImportError("Failed to find bones for parenting: Child " + name + " for parent " + parentName);
|
||||
throw DeadlyImportError("Failed to find bones for parenting: Child ", name, " for parent ", parentName);
|
||||
}
|
||||
|
||||
// Calculate bone matrices for root bones. Recursively calculates their children.
|
||||
|
@ -813,7 +813,7 @@ void OgreXmlSerializer::ReadBones(Skeleton *skeleton) {
|
|||
float angle = ReadAttribute<float>("angle");
|
||||
|
||||
if (NextNode() != nnAxis) {
|
||||
throw DeadlyImportError(Formatter::format() << "No axis specified for bone rotation in bone " << bone->id);
|
||||
throw DeadlyImportError("No axis specified for bone rotation in bone ", bone->id);
|
||||
}
|
||||
|
||||
aiVector3D axis;
|
||||
|
@ -854,7 +854,7 @@ void OgreXmlSerializer::ReadBones(Skeleton *skeleton) {
|
|||
ASSIMP_LOG_VERBOSE_DEBUG_F(" ", b->id, " ", b->name);
|
||||
|
||||
if (b->id != static_cast<uint16_t>(i)) {
|
||||
throw DeadlyImportError(Formatter::format() << "Bone ids are not in sequence starting from 0. Missing index " << i);
|
||||
throw DeadlyImportError("Bone ids are not in sequence starting from 0. Missing index ", i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -82,6 +82,17 @@ static void dummy_free(void * /*opaque*/, void *address) {
|
|||
|
||||
#endif // !! ASSIMP_BUILD_NO_COMPRESSED_X
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Throws an exception with a line number and the given text.
|
||||
template<typename... T>
|
||||
AI_WONT_RETURN void XFileParser::ThrowException(T&&... args) {
|
||||
if (mIsBinaryFormat) {
|
||||
throw DeadlyImportError(args...);
|
||||
} else {
|
||||
throw DeadlyImportError("Line ", mLineNumber, ": ", args...);
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Constructor. Creates a data structure out of the XFile given in the memory block.
|
||||
XFileParser::XFileParser(const std::vector<char> &pBuffer) :
|
||||
|
@ -122,13 +133,13 @@ XFileParser::XFileParser(const std::vector<char> &pBuffer) :
|
|||
mIsBinaryFormat = true;
|
||||
compressed = true;
|
||||
} else
|
||||
ThrowException(format() << "Unsupported xfile format '" << mP[8] << mP[9] << mP[10] << mP[11] << "'");
|
||||
ThrowException("Unsupported xfile format '", mP[8], mP[9], mP[10], mP[11], "'");
|
||||
|
||||
// float size
|
||||
mBinaryFloatSize = (unsigned int)(mP[12] - 48) * 1000 + (unsigned int)(mP[13] - 48) * 100 + (unsigned int)(mP[14] - 48) * 10 + (unsigned int)(mP[15] - 48);
|
||||
|
||||
if (mBinaryFloatSize != 32 && mBinaryFloatSize != 64)
|
||||
ThrowException(format() << "Unknown float size " << mBinaryFloatSize << " specified in xfile header.");
|
||||
ThrowException("Unknown float size ", mBinaryFloatSize, " specified in xfile header.");
|
||||
|
||||
// The x format specifies size in bits, but we work in bytes
|
||||
mBinaryFloatSize /= 8;
|
||||
|
@ -864,7 +875,7 @@ void XFileParser::ParseDataObjectAnimationKey(AnimBone *pAnimBone) {
|
|||
}
|
||||
|
||||
default:
|
||||
ThrowException(format() << "Unknown key type " << keyType << " in animation.");
|
||||
ThrowException("Unknown key type ", keyType, " in animation.");
|
||||
break;
|
||||
} // end switch
|
||||
|
||||
|
@ -1355,16 +1366,6 @@ aiColor3D XFileParser::ReadRGB() {
|
|||
return color;
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Throws an exception with a line number and the given text.
|
||||
AI_WONT_RETURN void XFileParser::ThrowException(const std::string &pText) {
|
||||
if (mIsBinaryFormat) {
|
||||
throw DeadlyImportError(pText);
|
||||
} else {
|
||||
throw DeadlyImportError(format() << "Line " << mLineNumber << ": " << pText);
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Filters the imported hierarchy for some degenerated cases that some exporters produce.
|
||||
void XFileParser::FilterHierarchy(XFile::Node *pNode) {
|
||||
|
|
|
@ -133,7 +133,8 @@ protected:
|
|||
aiColor4D ReadRGBA();
|
||||
|
||||
/** Throws an exception with a line number and the given text. */
|
||||
AI_WONT_RETURN void ThrowException( const std::string& pText) AI_WONT_RETURN_SUFFIX;
|
||||
template<typename... T>
|
||||
AI_WONT_RETURN void ThrowException(T&&... args) AI_WONT_RETURN_SUFFIX;
|
||||
|
||||
/**
|
||||
* @brief Filters the imported hierarchy for some degenerated cases that some exporters produce.
|
||||
|
|
|
@ -200,6 +200,7 @@ SET( Common_SRCS
|
|||
Common/simd.cpp
|
||||
Common/material.cpp
|
||||
Common/AssertHandler.cpp
|
||||
Common/Exceptional.cpp
|
||||
)
|
||||
SOURCE_GROUP(Common FILES ${Common_SRCS})
|
||||
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
/*
|
||||
---------------------------------------------------------------------------
|
||||
Open Asset Import Library (assimp)
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2006-2020, assimp team
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
with or without modification, are permitted provided that the following
|
||||
conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer in the documentation and/or other
|
||||
materials provided with the distribution.
|
||||
|
||||
* Neither the name of the assimp team, nor the names of its
|
||||
contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior
|
||||
written permission of the assimp team.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
---------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
/** @file Exceptional.cpp
|
||||
|
||||
Implementations of the exception classes.
|
||||
|
||||
*/
|
||||
|
||||
#include <assimp/Exceptional.h>
|
||||
#include <assimp/TinyFormatter.h>
|
||||
|
||||
DeadlyErrorBase::DeadlyErrorBase(const std::string& errorText)
|
||||
: runtime_error(errorText)
|
||||
{}
|
||||
|
||||
DeadlyErrorBase::DeadlyErrorBase(Assimp::Formatter::format f)
|
||||
: DeadlyErrorBase(std::string(f))
|
||||
{
|
||||
}
|
|
@ -47,6 +47,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#endif
|
||||
|
||||
#include <assimp/DefaultIOStream.h>
|
||||
#include <assimp/TinyFormatter.h>
|
||||
#include <stdexcept>
|
||||
|
||||
using std::runtime_error;
|
||||
|
@ -55,25 +56,41 @@ using std::runtime_error;
|
|||
#pragma warning(disable : 4275)
|
||||
#endif
|
||||
|
||||
class ASSIMP_API DeadlyErrorBase : public runtime_error {
|
||||
protected:
|
||||
/** Constructor with arguments */
|
||||
explicit DeadlyErrorBase(const std::string& errorText);
|
||||
|
||||
explicit DeadlyErrorBase(Assimp::Formatter::format f);
|
||||
|
||||
template<typename... T, typename U>
|
||||
explicit DeadlyErrorBase(Assimp::Formatter::format f, U&& u, T&&... args)
|
||||
: DeadlyErrorBase(std::move(f << u), args...)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
/** FOR IMPORTER PLUGINS ONLY: Simple exception class to be thrown if an
|
||||
* unrecoverable error occurs while importing. Loading APIs return
|
||||
* nullptr instead of a valid aiScene then. */
|
||||
class DeadlyImportError : public runtime_error {
|
||||
class ASSIMP_API DeadlyImportError : public DeadlyErrorBase {
|
||||
public:
|
||||
/** Constructor with arguments */
|
||||
explicit DeadlyImportError(const std::string &errorText) :
|
||||
runtime_error(errorText) {
|
||||
// empty
|
||||
template<typename... T>
|
||||
explicit DeadlyImportError(T&&... args)
|
||||
: DeadlyErrorBase(Assimp::Formatter::format(), args...)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
class DeadlyExportError : public runtime_error {
|
||||
class ASSIMP_API DeadlyExportError : public DeadlyErrorBase {
|
||||
public:
|
||||
/** Constructor with arguments */
|
||||
explicit DeadlyExportError(const std::string &errorText) :
|
||||
runtime_error(errorText) {
|
||||
// empty
|
||||
template<typename... T>
|
||||
explicit DeadlyExportError(T&&... args)
|
||||
: DeadlyErrorBase(Assimp::Formatter::format(), args...)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -61,9 +61,10 @@ template<class TDeriving>
|
|||
class LogFunctions {
|
||||
public:
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
static void ThrowException(const std::string& msg)
|
||||
template<typename... T>
|
||||
static void ThrowException(T&&... args)
|
||||
{
|
||||
throw DeadlyImportError(Prefix()+msg);
|
||||
throw DeadlyImportError(Prefix(), args...);
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -88,6 +88,9 @@ public:
|
|||
underlying << sin;
|
||||
}
|
||||
|
||||
basic_formatter(basic_formatter&& other)
|
||||
: underlying(std::move(other.underlying)) {
|
||||
}
|
||||
|
||||
// The problem described here:
|
||||
// https://sourceforge.net/tracker/?func=detail&atid=1067632&aid=3358562&group_id=226462
|
||||
|
|
Loading…
Reference in New Issue