size_t is 32bit on some platforms
Also assert if size_t is smaller than uint32_t (probably not necessary) Note: 32bit builds will crash OOM if a really large model is loaded, as cannot allocate that much in total, let alone contiguously.pull/4160/head
parent
9433fc526a
commit
aaae3e3a10
|
@ -1147,6 +1147,7 @@ inline void Asset::ReadBinaryHeader(IOStream &stream) {
|
||||||
AI_SWAP4(header.length);
|
AI_SWAP4(header.length);
|
||||||
AI_SWAP4(header.sceneLength);
|
AI_SWAP4(header.sceneLength);
|
||||||
|
|
||||||
|
static_assert(std::numeric_limits<uint32_t>::max() <= std::numeric_limits<size_t>::max(), "size_t must be at least 32bits");
|
||||||
mSceneLength = static_cast<size_t>(header.sceneLength); // Can't be larger than 4GB (max. uint32_t)
|
mSceneLength = static_cast<size_t>(header.sceneLength); // Can't be larger than 4GB (max. uint32_t)
|
||||||
|
|
||||||
mBodyOffset = sizeof(header) + mSceneLength;
|
mBodyOffset = sizeof(header) + mSceneLength;
|
||||||
|
@ -1184,7 +1185,7 @@ inline void Asset::Load(const std::string &pFile, bool isBinary) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Binary format only supports up to 4GB of JSON so limit it there to avoid extreme memory allocation
|
// Binary format only supports up to 4GB of JSON so limit it there to avoid extreme memory allocation
|
||||||
if (mSceneLength > std::numeric_limits<uint32_t>::max()) {
|
if (mSceneLength >= std::numeric_limits<uint32_t>::max()) {
|
||||||
throw DeadlyImportError("GLTF: JSON size greater than 4GB");
|
throw DeadlyImportError("GLTF: JSON size greater than 4GB");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1777,7 +1777,7 @@ inline void Asset::ReadBinaryHeader(IOStream &stream, std::vector<char> &sceneDa
|
||||||
}
|
}
|
||||||
|
|
||||||
// read the scene data, ensure null termination
|
// read the scene data, ensure null termination
|
||||||
|
static_assert(std::numeric_limits<uint32_t>::max() <= std::numeric_limits<size_t>::max(), "size_t must be at least 32bits");
|
||||||
mSceneLength = chunk.chunkLength; // Can't be larger than 4GB (max. uint32_t)
|
mSceneLength = chunk.chunkLength; // Can't be larger than 4GB (max. uint32_t)
|
||||||
sceneData.resize(mSceneLength + 1);
|
sceneData.resize(mSceneLength + 1);
|
||||||
sceneData[mSceneLength] = '\0';
|
sceneData[mSceneLength] = '\0';
|
||||||
|
@ -1836,7 +1836,7 @@ inline void Asset::Load(const std::string &pFile, bool isBinary) {
|
||||||
mBodyLength = 0;
|
mBodyLength = 0;
|
||||||
|
|
||||||
// Binary format only supports up to 4GB of JSON, use that as a maximum
|
// Binary format only supports up to 4GB of JSON, use that as a maximum
|
||||||
if (mSceneLength > std::numeric_limits<uint32_t>::max()) {
|
if (mSceneLength >= std::numeric_limits<uint32_t>::max()) {
|
||||||
throw DeadlyImportError("GLTF: JSON size greater than 4GB");
|
throw DeadlyImportError("GLTF: JSON size greater than 4GB");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue