Merge branch 'master' into deprecated_gltfpbr_macros
commit
75e7d0f6ef
|
@ -56,7 +56,7 @@ IF(ASSIMP_HUNTER_ENABLED)
|
|||
add_definitions(-DASSIMP_USE_HUNTER)
|
||||
ENDIF()
|
||||
|
||||
PROJECT(Assimp VERSION 5.1.0)
|
||||
PROJECT(Assimp VERSION 5.1.3)
|
||||
|
||||
# All supported options ###############################################
|
||||
|
||||
|
|
|
@ -318,13 +318,53 @@ void BlenderImporter::ExtractScene(Scene &out, const FileDatabase &file) {
|
|||
#endif
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void BlenderImporter::ParseSubCollection(const Blender::Scene &in, aiNode *root, std::shared_ptr<Collection> collection, ConversionData &conv_data) {
|
||||
|
||||
std::deque<Object *> root_objects;
|
||||
// Count number of objects
|
||||
for (std::shared_ptr<CollectionObject> cur = std::static_pointer_cast<CollectionObject>(collection->gobject.first); cur; cur = cur->next) {
|
||||
if (cur->ob) {
|
||||
root_objects.push_back(cur->ob);
|
||||
}
|
||||
}
|
||||
std::deque<Collection *> root_children;
|
||||
// Count number of child nodes
|
||||
for (std::shared_ptr<CollectionChild> cur = std::static_pointer_cast<CollectionChild>(collection->children.first); cur; cur = cur->next) {
|
||||
if (cur->collection) {
|
||||
root_children.push_back(cur->collection.get());
|
||||
}
|
||||
}
|
||||
root->mNumChildren = static_cast<unsigned int>(root_objects.size() + root_children.size());
|
||||
root->mChildren = new aiNode *[root->mNumChildren]();
|
||||
|
||||
for (unsigned int i = 0; i < static_cast<unsigned int>(root_objects.size()); ++i) {
|
||||
root->mChildren[i] = ConvertNode(in, root_objects[i], conv_data, aiMatrix4x4());
|
||||
root->mChildren[i]->mParent = root;
|
||||
}
|
||||
|
||||
// For each subcollection create a new node to represent it
|
||||
unsigned int iterator = static_cast<unsigned int>(root_objects.size());
|
||||
for (std::shared_ptr<CollectionChild> cur = std::static_pointer_cast<CollectionChild>(collection->children.first); cur; cur = cur->next) {
|
||||
if (cur->collection) {
|
||||
root->mChildren[iterator] = new aiNode(cur->collection->id.name + 2); // skip over the name prefix 'OB'
|
||||
root->mChildren[iterator]->mParent = root;
|
||||
ParseSubCollection(in, root->mChildren[iterator], cur->collection, conv_data);
|
||||
}
|
||||
iterator += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void BlenderImporter::ConvertBlendFile(aiScene *out, const Scene &in, const FileDatabase &file) {
|
||||
ConversionData conv(file);
|
||||
|
||||
// FIXME it must be possible to take the hierarchy directly from
|
||||
// the file. This is terrible. Here, we're first looking for
|
||||
// all objects which don't have parent objects at all -
|
||||
aiNode *root = out->mRootNode = new aiNode("<BlenderRoot>");
|
||||
// Iterate over all objects directly under master_collection,
|
||||
// If in.master_collection == null, then we're parsing something older.
|
||||
if (in.master_collection) {
|
||||
ParseSubCollection(in, root, in.master_collection, conv);
|
||||
} else {
|
||||
std::deque<const Object *> no_parents;
|
||||
for (std::shared_ptr<Base> cur = std::static_pointer_cast<Base>(in.base.first); cur; cur = cur->next) {
|
||||
if (cur->object) {
|
||||
|
@ -347,14 +387,13 @@ void BlenderImporter::ConvertBlendFile(aiScene *out, const Scene &in, const File
|
|||
ThrowException("Expected at least one object with no parent");
|
||||
}
|
||||
|
||||
aiNode *root = out->mRootNode = new aiNode("<BlenderRoot>");
|
||||
|
||||
root->mNumChildren = static_cast<unsigned int>(no_parents.size());
|
||||
root->mChildren = new aiNode *[root->mNumChildren]();
|
||||
for (unsigned int i = 0; i < root->mNumChildren; ++i) {
|
||||
root->mChildren[i] = ConvertNode(in, no_parents[i], conv, aiMatrix4x4());
|
||||
root->mChildren[i]->mParent = root;
|
||||
}
|
||||
}
|
||||
|
||||
BuildMaterials(conv);
|
||||
|
||||
|
|
|
@ -78,6 +78,7 @@ struct ElemBase;
|
|||
namespace Blender {
|
||||
struct Scene;
|
||||
struct Object;
|
||||
struct Collection;
|
||||
struct Mesh;
|
||||
struct Camera;
|
||||
struct Lamp;
|
||||
|
@ -116,6 +117,7 @@ protected:
|
|||
void InternReadFile(const std::string &pFile, aiScene *pScene, IOSystem *pIOHandler) override;
|
||||
void ParseBlendFile(Blender::FileDatabase &out, std::shared_ptr<IOStream> stream);
|
||||
void ExtractScene(Blender::Scene &out, const Blender::FileDatabase &file);
|
||||
void ParseSubCollection(const Blender::Scene &in, aiNode *root, std::shared_ptr<Blender::Collection> collection, Blender::ConversionData &conv_data);
|
||||
void ConvertBlendFile(aiScene *out, const Blender::Scene &in, const Blender::FileDatabase &file);
|
||||
|
||||
private:
|
||||
|
|
|
@ -94,6 +94,52 @@ void Structure ::Convert<Group>(
|
|||
db.reader->IncPtr(size);
|
||||
}
|
||||
|
||||
//--------------------------------------------------------------------------------
|
||||
template <>
|
||||
void Structure::Convert<CollectionObject>(
|
||||
CollectionObject &dest,
|
||||
const FileDatabase &db) const {
|
||||
|
||||
ReadFieldPtr<ErrorPolicy_Fail>(dest.next, "*next", db);
|
||||
{
|
||||
//std::shared_ptr<CollectionObject> prev;
|
||||
//ReadFieldPtr<ErrorPolicy_Fail>(prev, "*prev", db);
|
||||
//dest.prev = prev.get();
|
||||
|
||||
std::shared_ptr<Object> ob;
|
||||
ReadFieldPtr<ErrorPolicy_Igno>(ob, "*ob", db);
|
||||
dest.ob = ob.get();
|
||||
}
|
||||
|
||||
db.reader->IncPtr(size);
|
||||
}
|
||||
|
||||
//--------------------------------------------------------------------------------
|
||||
template <>
|
||||
void Structure::Convert<CollectionChild>(
|
||||
CollectionChild &dest,
|
||||
const FileDatabase &db) const {
|
||||
|
||||
ReadFieldPtr<ErrorPolicy_Fail>(dest.prev, "*prev", db);
|
||||
ReadFieldPtr<ErrorPolicy_Fail>(dest.next, "*next", db);
|
||||
ReadFieldPtr<ErrorPolicy_Igno>(dest.collection, "*collection", db);
|
||||
|
||||
db.reader->IncPtr(size);
|
||||
}
|
||||
|
||||
//--------------------------------------------------------------------------------
|
||||
template <>
|
||||
void Structure::Convert<Collection>(
|
||||
Collection &dest,
|
||||
const FileDatabase &db) const {
|
||||
|
||||
ReadField<ErrorPolicy_Fail>(dest.id, "id", db);
|
||||
ReadField<ErrorPolicy_Fail>(dest.gobject, "gobject", db);
|
||||
ReadField<ErrorPolicy_Fail>(dest.children, "children", db);
|
||||
|
||||
db.reader->IncPtr(size);
|
||||
}
|
||||
|
||||
//--------------------------------------------------------------------------------
|
||||
template <>
|
||||
void Structure ::Convert<MTex>(
|
||||
|
@ -660,6 +706,7 @@ void Structure ::Convert<Scene>(
|
|||
ReadFieldPtr<ErrorPolicy_Warn>(dest.camera, "*camera", db);
|
||||
ReadFieldPtr<ErrorPolicy_Warn>(dest.world, "*world", db);
|
||||
ReadFieldPtr<ErrorPolicy_Warn>(dest.basact, "*basact", db);
|
||||
ReadFieldPtr<ErrorPolicy_Warn>(dest.master_collection, "*master_collection", db);
|
||||
ReadField<ErrorPolicy_Igno>(dest.base, "base", db);
|
||||
|
||||
db.reader->IncPtr(size);
|
||||
|
@ -833,6 +880,9 @@ void DNA::RegisterConverters() {
|
|||
converters["Image"] = DNA::FactoryPair(&Structure::Allocate<Image>, &Structure::Convert<Image>);
|
||||
converters["CustomData"] = DNA::FactoryPair(&Structure::Allocate<CustomData>, &Structure::Convert<CustomData>);
|
||||
converters["CustomDataLayer"] = DNA::FactoryPair(&Structure::Allocate<CustomDataLayer>, &Structure::Convert<CustomDataLayer>);
|
||||
converters["Collection"] = DNA::FactoryPair(&Structure::Allocate<Collection>, &Structure::Convert<Collection>);
|
||||
converters["CollectionChild"] = DNA::FactoryPair(&Structure::Allocate<CollectionChild>, &Structure::Convert<CollectionChild>);
|
||||
converters["CollectionObject"] = DNA::FactoryPair(&Structure::Allocate<CollectionObject>, &Structure::Convert<CollectionObject>);
|
||||
}
|
||||
|
||||
#endif // ASSIMP_BUILD_NO_BLEND_IMPORTER
|
||||
|
|
|
@ -107,6 +107,7 @@ namespace Blender {
|
|||
struct Object;
|
||||
struct MTex;
|
||||
struct Image;
|
||||
struct Collection;
|
||||
|
||||
#include <memory>
|
||||
|
||||
|
@ -147,6 +148,26 @@ struct Group : ElemBase {
|
|||
std::shared_ptr<GroupObject> gobject;
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
struct CollectionObject : ElemBase {
|
||||
//CollectionObject* prev;
|
||||
std::shared_ptr<CollectionObject> next;
|
||||
Object *ob;
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
struct CollectionChild : ElemBase {
|
||||
std::shared_ptr<CollectionChild> next, prev;
|
||||
std::shared_ptr<Collection> collection;
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
struct Collection : ElemBase {
|
||||
ID id FAIL;
|
||||
ListBase gobject; // CollectionObject
|
||||
ListBase children; // CollectionChild
|
||||
};
|
||||
|
||||
// -------------------------------------------------------------------------------
|
||||
struct World : ElemBase {
|
||||
ID id FAIL;
|
||||
|
@ -729,11 +750,12 @@ struct Scene : ElemBase {
|
|||
std::shared_ptr<Object> camera WARN;
|
||||
std::shared_ptr<World> world WARN;
|
||||
std::shared_ptr<Base> basact WARN;
|
||||
std::shared_ptr<Collection> master_collection WARN;
|
||||
|
||||
ListBase base;
|
||||
|
||||
Scene() :
|
||||
ElemBase(), camera(), world(), basact() {
|
||||
ElemBase(), camera(), world(), basact(), master_collection() {
|
||||
// empty
|
||||
}
|
||||
};
|
||||
|
|
|
@ -62,6 +62,12 @@ template <> void Structure :: Convert<Group> (
|
|||
) const
|
||||
;
|
||||
|
||||
template <> void Structure::Convert<Collection>(
|
||||
Collection& dest,
|
||||
const FileDatabase& db
|
||||
) const
|
||||
;
|
||||
|
||||
template <> void Structure :: Convert<MTex> (
|
||||
MTex& dest,
|
||||
const FileDatabase& db
|
||||
|
|
|
@ -3127,7 +3127,12 @@ aiNodeAnim* FBXConverter::GenerateSimpleNodeAnim(const std::string& name,
|
|||
if (chain[i] == iterEnd)
|
||||
continue;
|
||||
|
||||
if (i == TransformationComp_Rotation || i == TransformationComp_PreRotation
|
||||
|| i == TransformationComp_PostRotation || i == TransformationComp_GeometricRotation) {
|
||||
keyframeLists[i] = GetRotationKeyframeList((*chain[i]).second, start, stop);
|
||||
} else {
|
||||
keyframeLists[i] = GetKeyframeList((*chain[i]).second, start, stop);
|
||||
}
|
||||
|
||||
for (KeyFrameListList::const_iterator it = keyframeLists[i].begin(); it != keyframeLists[i].end(); ++it) {
|
||||
const KeyTimeList& times = *std::get<0>(*it);
|
||||
|
@ -3274,6 +3279,79 @@ FBXConverter::KeyFrameListList FBXConverter::GetKeyframeList(const std::vector<c
|
|||
return inputs; // pray for NRVO :-)
|
||||
}
|
||||
|
||||
FBXConverter::KeyFrameListList FBXConverter::GetRotationKeyframeList(const std::vector<const AnimationCurveNode *> &nodes,
|
||||
int64_t start, int64_t stop) {
|
||||
KeyFrameListList inputs;
|
||||
inputs.reserve(nodes.size() * 3);
|
||||
|
||||
//give some breathing room for rounding errors
|
||||
const int64_t adj_start = start - 10000;
|
||||
const int64_t adj_stop = stop + 10000;
|
||||
|
||||
for (const AnimationCurveNode *node : nodes) {
|
||||
ai_assert(node);
|
||||
|
||||
const AnimationCurveMap &curves = node->Curves();
|
||||
for (const AnimationCurveMap::value_type &kv : curves) {
|
||||
|
||||
unsigned int mapto;
|
||||
if (kv.first == "d|X") {
|
||||
mapto = 0;
|
||||
} else if (kv.first == "d|Y") {
|
||||
mapto = 1;
|
||||
} else if (kv.first == "d|Z") {
|
||||
mapto = 2;
|
||||
} else {
|
||||
FBXImporter::LogWarn("ignoring scale animation curve, did not recognize target component");
|
||||
continue;
|
||||
}
|
||||
|
||||
const AnimationCurve *const curve = kv.second;
|
||||
ai_assert(curve->GetKeys().size() == curve->GetValues().size());
|
||||
ai_assert(curve->GetKeys().size());
|
||||
|
||||
//get values within the start/stop time window
|
||||
std::shared_ptr<KeyTimeList> Keys(new KeyTimeList());
|
||||
std::shared_ptr<KeyValueList> Values(new KeyValueList());
|
||||
const size_t count = curve->GetKeys().size();
|
||||
|
||||
int64_t tp = curve->GetKeys().at(0);
|
||||
float vp = curve->GetValues().at(0);
|
||||
Keys->push_back(tp);
|
||||
Values->push_back(vp);
|
||||
if (count > 1) {
|
||||
int64_t tc = curve->GetKeys().at(1);
|
||||
float vc = curve->GetValues().at(1);
|
||||
for (size_t n = 1; n < count; n++) {
|
||||
while (std::abs(vc - vp) >= 180.0f) {
|
||||
float step = std::floor(float(tc - tp) / (vc - vp) * 179.0f);
|
||||
int64_t tnew = tp + int64_t(step);
|
||||
float vnew = vp + (vc - vp) * step / float(tc - tp);
|
||||
if (tnew >= adj_start && tnew <= adj_stop) {
|
||||
Keys->push_back(tnew);
|
||||
Values->push_back(vnew);
|
||||
}
|
||||
tp = tnew;
|
||||
vp = vnew;
|
||||
}
|
||||
if (tc >= adj_start && tc <= adj_stop) {
|
||||
Keys->push_back(tc);
|
||||
Values->push_back(vc);
|
||||
}
|
||||
if (n + 1 < count) {
|
||||
tp = tc;
|
||||
vp = vc;
|
||||
tc = curve->GetKeys().at(n + 1);
|
||||
vc = curve->GetValues().at(n + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
inputs.push_back(std::make_tuple(Keys, Values, mapto));
|
||||
}
|
||||
}
|
||||
return inputs;
|
||||
}
|
||||
|
||||
KeyTimeList FBXConverter::GetKeyTimeList(const KeyFrameListList &inputs) {
|
||||
ai_assert(!inputs.empty());
|
||||
|
||||
|
@ -3464,7 +3542,7 @@ void FBXConverter::ConvertRotationKeys(aiNodeAnim *na, const std::vector<const A
|
|||
ai_assert(nodes.size());
|
||||
|
||||
// XXX see notes in ConvertScaleKeys()
|
||||
const std::vector<KeyFrameList> &inputs = GetKeyframeList(nodes, start, stop);
|
||||
const std::vector<KeyFrameList> &inputs = GetRotationKeyframeList(nodes, start, stop);
|
||||
const KeyTimeList &keys = GetKeyTimeList(inputs);
|
||||
|
||||
na->mNumRotationKeys = static_cast<unsigned int>(keys.size());
|
||||
|
|
|
@ -361,6 +361,7 @@ private:
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
KeyFrameListList GetKeyframeList(const std::vector<const AnimationCurveNode*>& nodes, int64_t start, int64_t stop);
|
||||
KeyFrameListList GetRotationKeyframeList(const std::vector<const AnimationCurveNode*>& nodes, int64_t start, int64_t stop);
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
KeyTimeList GetKeyTimeList(const KeyFrameListList& inputs);
|
||||
|
|
|
@ -235,11 +235,9 @@ void X3DImporter::ParseFile(const std::string &file, IOSystem *pIOHandler) {
|
|||
|
||||
bool X3DImporter::CanRead(const std::string &pFile, IOSystem * /*pIOHandler*/, bool checkSig) const {
|
||||
if (checkSig) {
|
||||
std::string::size_type pos = pFile.find_last_of(".x3d");
|
||||
if (pos != std::string::npos) {
|
||||
if (GetExtension(pFile) == "x3d")
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ void X3DImporter::startReadGroup(XmlNode &node) {
|
|||
|
||||
// if "USE" defined then find already defined element.
|
||||
if (!use.empty()) {
|
||||
X3DNodeElementBase *ne = nullptr;
|
||||
X3DNodeElementBase *ne(nullptr);
|
||||
ne = MACRO_USE_CHECKANDAPPLY(node, def, use, ENET_Group, ne);
|
||||
} else {
|
||||
ParseHelper_Group_Begin(); // create new grouping element and go deeper if node has children.
|
||||
|
@ -110,7 +110,7 @@ void X3DImporter::startReadStaticGroup(XmlNode &node) {
|
|||
|
||||
// if "USE" defined then find already defined element.
|
||||
if (!use.empty()) {
|
||||
X3DNodeElementBase *ne = nullptr;
|
||||
X3DNodeElementBase *ne(nullptr);
|
||||
|
||||
ne = MACRO_USE_CHECKANDAPPLY(node, def, use, ENET_Group, ne);
|
||||
} else {
|
||||
|
@ -153,7 +153,7 @@ void X3DImporter::startReadSwitch(XmlNode &node) {
|
|||
|
||||
// if "USE" defined then find already defined element.
|
||||
if (!use.empty()) {
|
||||
X3DNodeElementBase *ne=nullptr;
|
||||
X3DNodeElementBase *ne(nullptr);
|
||||
|
||||
ne = MACRO_USE_CHECKANDAPPLY(node, def, use, ENET_Group, ne);
|
||||
} else {
|
||||
|
@ -226,8 +226,13 @@ void X3DImporter::startReadTransform(XmlNode &node) {
|
|||
// if "USE" defined then find already defined element.
|
||||
if (!use.empty()) {
|
||||
X3DNodeElementBase *ne(nullptr);
|
||||
|
||||
bool newgroup = (nullptr == mNodeElementCur);
|
||||
if(newgroup)
|
||||
ParseHelper_Group_Begin();
|
||||
ne = MACRO_USE_CHECKANDAPPLY(node, def, use, ENET_Group, ne);
|
||||
if (newgroup && isNodeEmpty(node)) {
|
||||
ParseHelper_Node_Exit();
|
||||
}
|
||||
} else {
|
||||
ParseHelper_Group_Begin(); // create new grouping element and go deeper if node has children.
|
||||
// at this place new group mode created and made current, so we can name it.
|
||||
|
|
|
@ -60,14 +60,12 @@ namespace Assimp {
|
|||
/// \param [in] pType - type of element to find.
|
||||
/// \param [out] pNE - pointer to found node element.
|
||||
inline X3DNodeElementBase *X3DImporter::MACRO_USE_CHECKANDAPPLY(XmlNode &node, std::string pDEF, std::string pUSE, X3DElemType pType, X3DNodeElementBase *pNE) {
|
||||
if (nullptr == mNodeElementCur) {
|
||||
printf("here\n");
|
||||
}
|
||||
checkNodeMustBeEmpty(node);
|
||||
if (!pDEF.empty())
|
||||
Assimp::Throw_DEF_And_USE(node.name());
|
||||
if (!FindNodeElement(pUSE, pType, &pNE))
|
||||
Assimp::Throw_USE_NotFound(node.name(), pUSE);
|
||||
ai_assert(nullptr != mNodeElementCur);
|
||||
mNodeElementCur->Children.push_back(pNE); /* add found object as child to current element */
|
||||
|
||||
return pNE;
|
||||
|
|
|
@ -300,7 +300,7 @@ public:
|
|||
|
||||
inline unsigned int GetIndex() const { return index; }
|
||||
|
||||
operator bool() const { return vector != 0; }
|
||||
operator bool() const { return vector != nullptr && index < vector->size(); }
|
||||
|
||||
T *operator->() { return (*vector)[index]; }
|
||||
|
||||
|
|
|
@ -600,6 +600,10 @@ inline void Buffer::Read(Value &obj, Asset &r) {
|
|||
inline bool Buffer::LoadFromStream(IOStream &stream, size_t length, size_t baseOffset) {
|
||||
byteLength = length ? length : stream.FileSize();
|
||||
|
||||
if (byteLength > stream.FileSize()) {
|
||||
throw DeadlyImportError("GLTF: Invalid byteLength exceeds size of actual data.");
|
||||
}
|
||||
|
||||
if (baseOffset) {
|
||||
stream.Seek(baseOffset, aiOrigin_SET);
|
||||
}
|
||||
|
@ -809,6 +813,11 @@ inline void Accessor::Sparse::PatchData(unsigned int elementSize) {
|
|||
}
|
||||
|
||||
offset *= elementSize;
|
||||
|
||||
if (offset + elementSize > data.size()) {
|
||||
throw DeadlyImportError("Invalid sparse accessor. Byte offset for patching points outside allocated memory.");
|
||||
}
|
||||
|
||||
std::memcpy(data.data() + offset, pValues, elementSize);
|
||||
|
||||
pValues += elementSize;
|
||||
|
|
|
@ -135,7 +135,9 @@ public:
|
|||
/** Extract a particular vertex from a anim mesh and interleave all components */
|
||||
explicit Vertex(const aiAnimMesh* msh, unsigned int idx) {
|
||||
ai_assert(idx < msh->mNumVertices);
|
||||
if (msh->HasPositions()) {
|
||||
position = msh->mVertices[idx];
|
||||
}
|
||||
|
||||
if (msh->HasNormals()) {
|
||||
normal = msh->mNormals[idx];
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 3.4 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,11 +0,0 @@
|
|||
All 'mirror' files are not absolutely correct. That's mainly
|
||||
because it's difficult convert Max' handling of mirroring to
|
||||
our's.
|
||||
|
||||
In other words: TO DO, but only if someone REALLY needs it.
|
||||
|
||||
-------------------------------------------------------------
|
||||
|
||||
To see how it should look like - test/ReferenceImages
|
||||
Note that the viewer has no 'decal' texture mapping mode, so
|
||||
the usual clamping is used.
|
Binary file not shown.
Binary file not shown.
|
@ -114,6 +114,12 @@ TEST(utBlenderImporter, importBlenderDefault271) {
|
|||
ASSERT_NE(nullptr, scene);
|
||||
}
|
||||
|
||||
TEST(utBlenderImporter, importBlenderDefault293) {
|
||||
Assimp::Importer importer;
|
||||
const aiScene *scene = importer.ReadFile(ASSIMP_TEST_MODELS_DIR "/BLEND/BlenderDefault_276.blend", aiProcess_ValidateDataStructure);
|
||||
ASSERT_NE(nullptr, scene);
|
||||
}
|
||||
|
||||
TEST(utBlenderImporter, importCubeHierarchy_248) {
|
||||
Assimp::Importer importer;
|
||||
const aiScene *scene = importer.ReadFile(ASSIMP_TEST_MODELS_DIR "/BLEND/CubeHierarchy_248.blend", aiProcess_ValidateDataStructure);
|
||||
|
|
Loading…
Reference in New Issue