Merge branch 'master' into fix-pbrt-exporter
commit
0fbd2f5894
|
@ -44,7 +44,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#ifndef ASSIMP_BUILD_NO_ASE_IMPORTER
|
#ifndef ASSIMP_BUILD_NO_ASE_IMPORTER
|
||||||
|
|
||||||
#ifndef ASSIMP_BUILD_NO_3DS_IMPORTER
|
#ifndef ASSIMP_BUILD_NO_3DS_IMPORTER
|
||||||
|
|
||||||
// internal headers
|
// internal headers
|
||||||
|
@ -322,21 +321,6 @@ void ASEImporter::BuildAnimations(const std::vector<BaseNode *> &nodes) {
|
||||||
aiNodeAnim *nd = pcAnim->mChannels[iNum++] = new aiNodeAnim();
|
aiNodeAnim *nd = pcAnim->mChannels[iNum++] = new aiNodeAnim();
|
||||||
nd->mNodeName.Set(me->mName + ".Target");
|
nd->mNodeName.Set(me->mName + ".Target");
|
||||||
|
|
||||||
// If there is no input position channel we will need
|
|
||||||
// to supply the default position from the node's
|
|
||||||
// local transformation matrix.
|
|
||||||
/*TargetAnimationHelper helper;
|
|
||||||
if (me->mAnim.akeyPositions.empty())
|
|
||||||
{
|
|
||||||
aiMatrix4x4& mat = (*i)->mTransform;
|
|
||||||
helper.SetFixedMainAnimationChannel(aiVector3D(
|
|
||||||
mat.a4, mat.b4, mat.c4));
|
|
||||||
}
|
|
||||||
else helper.SetMainAnimationChannel (&me->mAnim.akeyPositions);
|
|
||||||
helper.SetTargetAnimationChannel (&me->mTargetAnim.akeyPositions);
|
|
||||||
|
|
||||||
helper.Process(&me->mTargetAnim.akeyPositions);*/
|
|
||||||
|
|
||||||
// Allocate the key array and fill it
|
// Allocate the key array and fill it
|
||||||
nd->mNumPositionKeys = (unsigned int)me->mTargetAnim.akeyPositions.size();
|
nd->mNumPositionKeys = (unsigned int)me->mTargetAnim.akeyPositions.size();
|
||||||
nd->mPositionKeys = new aiVectorKey[nd->mNumPositionKeys];
|
nd->mPositionKeys = new aiVectorKey[nd->mNumPositionKeys];
|
||||||
|
|
|
@ -342,8 +342,7 @@ void ReadData(const char*& sbegin_out, const char*& send_out, const char* input,
|
||||||
|
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor, const char* end, bool const is64bits)
|
bool ReadScope(TokenList &output_tokens, StackAllocator &token_allocator, const char *input, const char *&cursor, const char *end, bool const is64bits) {
|
||||||
{
|
|
||||||
// the first word contains the offset at which this block ends
|
// the first word contains the offset at which this block ends
|
||||||
const uint64_t end_offset = is64bits ? ReadDoubleWord(input, cursor, end) : ReadWord(input, cursor, end);
|
const uint64_t end_offset = is64bits ? ReadDoubleWord(input, cursor, end) : ReadWord(input, cursor, end);
|
||||||
|
|
||||||
|
@ -409,7 +408,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
|
||||||
|
|
||||||
// XXX this is vulnerable to stack overflowing ..
|
// XXX this is vulnerable to stack overflowing ..
|
||||||
while(Offset(input, cursor) < end_offset - sentinel_block_length) {
|
while(Offset(input, cursor) < end_offset - sentinel_block_length) {
|
||||||
ReadScope(output_tokens, input, cursor, input + end_offset - sentinel_block_length, is64bits);
|
ReadScope(output_tokens, token_allocator, input, cursor, input + end_offset - sentinel_block_length, is64bits);
|
||||||
}
|
}
|
||||||
output_tokens.push_back(new_Token(cursor, cursor + 1, TokenType_CLOSE_BRACKET, Offset(input, cursor) ));
|
output_tokens.push_back(new_Token(cursor, cursor + 1, TokenType_CLOSE_BRACKET, Offset(input, cursor) ));
|
||||||
|
|
||||||
|
@ -432,8 +431,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
|
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
|
||||||
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
|
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &token_allocator) {
|
||||||
{
|
|
||||||
ai_assert(input);
|
ai_assert(input);
|
||||||
ASSIMP_LOG_DEBUG("Tokenizing binary FBX file");
|
ASSIMP_LOG_DEBUG("Tokenizing binary FBX file");
|
||||||
|
|
||||||
|
@ -466,7 +464,7 @@ void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
while (cursor < end ) {
|
while (cursor < end ) {
|
||||||
if (!ReadScope(output_tokens, input, cursor, input + length, is64bits)) {
|
if (!ReadScope(output_tokens, token_allocator, input, cursor, input + length, is64bits)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -243,7 +243,7 @@ FileGlobalSettings::FileGlobalSettings(const Document &doc, std::shared_ptr<cons
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Document::Document(const Parser& parser, const ImportSettings& settings) :
|
Document::Document(Parser& parser, const ImportSettings& settings) :
|
||||||
settings(settings), parser(parser) {
|
settings(settings), parser(parser) {
|
||||||
ASSIMP_LOG_DEBUG("Creating FBX Document");
|
ASSIMP_LOG_DEBUG("Creating FBX Document");
|
||||||
|
|
||||||
|
@ -265,13 +265,17 @@ Document::Document(const Parser& parser, const ImportSettings& settings) :
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Document::~Document() {
|
Document::~Document()
|
||||||
|
{
|
||||||
|
// The document does not own the memory for the following objects, but we need to call their d'tor
|
||||||
|
// so they can properly free memory like string members:
|
||||||
|
|
||||||
for (ObjectMap::value_type &v : objects) {
|
for (ObjectMap::value_type &v : objects) {
|
||||||
delete v.second;
|
delete_LazyObject(v.second);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (ConnectionMap::value_type &v : src_connections) {
|
for (ConnectionMap::value_type &v : src_connections) {
|
||||||
delete v.second;
|
delete_Connection(v.second);
|
||||||
}
|
}
|
||||||
// |dest_connections| contain the same Connection objects as the |src_connections|
|
// |dest_connections| contain the same Connection objects as the |src_connections|
|
||||||
}
|
}
|
||||||
|
@ -356,9 +360,11 @@ void Document::ReadObjects() {
|
||||||
DOMError("no Objects dictionary found");
|
DOMError("no Objects dictionary found");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
|
|
||||||
// add a dummy entry to represent the Model::RootNode object (id 0),
|
// add a dummy entry to represent the Model::RootNode object (id 0),
|
||||||
// which is only indirectly defined in the input file
|
// which is only indirectly defined in the input file
|
||||||
objects[0] = new LazyObject(0L, *eobjects, *this);
|
objects[0] = new_LazyObject(0L, *eobjects, *this);
|
||||||
|
|
||||||
const Scope& sobjects = *eobjects->Compound();
|
const Scope& sobjects = *eobjects->Compound();
|
||||||
for(const ElementMap::value_type& el : sobjects.Elements()) {
|
for(const ElementMap::value_type& el : sobjects.Elements()) {
|
||||||
|
@ -387,7 +393,7 @@ void Document::ReadObjects() {
|
||||||
delete foundObject->second;
|
delete foundObject->second;
|
||||||
}
|
}
|
||||||
|
|
||||||
objects[id] = new LazyObject(id, *el.second, *this);
|
objects[id] = new_LazyObject(id, *el.second, *this);
|
||||||
|
|
||||||
// grab all animation stacks upfront since there is no listing of them
|
// grab all animation stacks upfront since there is no listing of them
|
||||||
if(!strcmp(el.first.c_str(),"AnimationStack")) {
|
if(!strcmp(el.first.c_str(),"AnimationStack")) {
|
||||||
|
@ -454,7 +460,9 @@ void Document::ReadPropertyTemplates() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
void Document::ReadConnections() {
|
void Document::ReadConnections()
|
||||||
|
{
|
||||||
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
const Scope &sc = parser.GetRootScope();
|
const Scope &sc = parser.GetRootScope();
|
||||||
// read property templates from "Definitions" section
|
// read property templates from "Definitions" section
|
||||||
const Element* const econns = sc["Connections"];
|
const Element* const econns = sc["Connections"];
|
||||||
|
@ -494,7 +502,7 @@ void Document::ReadConnections() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// add new connection
|
// add new connection
|
||||||
const Connection* const c = new Connection(insertionOrder++,src,dest,prop,*this);
|
const Connection* const c = new_Connection(insertionOrder++,src,dest,prop,*this);
|
||||||
src_connections.insert(ConnectionMap::value_type(src,c));
|
src_connections.insert(ConnectionMap::value_type(src,c));
|
||||||
dest_connections.insert(ConnectionMap::value_type(dest,c));
|
dest_connections.insert(ConnectionMap::value_type(dest,c));
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,6 +81,10 @@ class BlendShape;
|
||||||
class Skin;
|
class Skin;
|
||||||
class Cluster;
|
class Cluster;
|
||||||
|
|
||||||
|
#define new_LazyObject new (allocator.Allocate(sizeof(LazyObject))) LazyObject
|
||||||
|
#define new_Connection new (allocator.Allocate(sizeof(Connection))) Connection
|
||||||
|
#define delete_LazyObject(_p) (_p)->~LazyObject()
|
||||||
|
#define delete_Connection(_p) (_p)->~Connection()
|
||||||
|
|
||||||
/** Represents a delay-parsed FBX objects. Many objects in the scene
|
/** Represents a delay-parsed FBX objects. Many objects in the scene
|
||||||
* are not needed by assimp, so it makes no sense to parse them
|
* are not needed by assimp, so it makes no sense to parse them
|
||||||
|
@ -1073,7 +1077,7 @@ private:
|
||||||
/** DOM root for a FBX file */
|
/** DOM root for a FBX file */
|
||||||
class Document {
|
class Document {
|
||||||
public:
|
public:
|
||||||
Document(const Parser& parser, const ImportSettings& settings);
|
Document(Parser& parser, const ImportSettings& settings);
|
||||||
|
|
||||||
~Document();
|
~Document();
|
||||||
|
|
||||||
|
@ -1157,7 +1161,7 @@ private:
|
||||||
const ImportSettings& settings;
|
const ImportSettings& settings;
|
||||||
|
|
||||||
ObjectMap objects;
|
ObjectMap objects;
|
||||||
const Parser& parser;
|
Parser& parser;
|
||||||
|
|
||||||
PropertyTemplateMap templates;
|
PropertyTemplateMap templates;
|
||||||
ConnectionMap src_connections;
|
ConnectionMap src_connections;
|
||||||
|
|
|
@ -152,19 +152,19 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
||||||
// broad-phase tokenized pass in which we identify the core
|
// broad-phase tokenized pass in which we identify the core
|
||||||
// syntax elements of FBX (brackets, commas, key:value mappings)
|
// syntax elements of FBX (brackets, commas, key:value mappings)
|
||||||
TokenList tokens;
|
TokenList tokens;
|
||||||
|
Assimp::StackAllocator tempAllocator;
|
||||||
try {
|
try {
|
||||||
|
|
||||||
bool is_binary = false;
|
bool is_binary = false;
|
||||||
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
|
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
|
||||||
is_binary = true;
|
is_binary = true;
|
||||||
TokenizeBinary(tokens, begin, contents.size());
|
TokenizeBinary(tokens, begin, contents.size(), tempAllocator);
|
||||||
} else {
|
} else {
|
||||||
Tokenize(tokens, begin);
|
Tokenize(tokens, begin, tempAllocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
// use this information to construct a very rudimentary
|
// use this information to construct a very rudimentary
|
||||||
// parse-tree representing the FBX scope structure
|
// parse-tree representing the FBX scope structure
|
||||||
Parser parser(tokens, is_binary);
|
Parser parser(tokens, tempAllocator, is_binary);
|
||||||
|
|
||||||
// take the raw parse-tree and convert it to a FBX DOM
|
// take the raw parse-tree and convert it to a FBX DOM
|
||||||
Document doc(parser, mSettings);
|
Document doc(parser, mSettings);
|
||||||
|
@ -183,9 +183,11 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
||||||
// assimp universal format (M)
|
// assimp universal format (M)
|
||||||
SetFileScale(size_relative_to_cm * 0.01f);
|
SetFileScale(size_relative_to_cm * 0.01f);
|
||||||
|
|
||||||
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
|
// This collection does not own the memory for the tokens, but we need to call their d'tor
|
||||||
|
std::for_each(tokens.begin(), tokens.end(), Util::destructor_fun<Token>());
|
||||||
|
|
||||||
} catch (std::exception &) {
|
} catch (std::exception &) {
|
||||||
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
|
std::for_each(tokens.begin(), tokens.end(), Util::destructor_fun<Token>());
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,8 +116,11 @@ namespace Assimp {
|
||||||
namespace FBX {
|
namespace FBX {
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Element::Element(const Token& key_token, Parser& parser) : key_token(key_token) {
|
Element::Element(const Token& key_token, Parser& parser) :
|
||||||
|
key_token(key_token), compound(nullptr)
|
||||||
|
{
|
||||||
TokenPtr n = nullptr;
|
TokenPtr n = nullptr;
|
||||||
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
do {
|
do {
|
||||||
n = parser.AdvanceToNextToken();
|
n = parser.AdvanceToNextToken();
|
||||||
if(!n) {
|
if(!n) {
|
||||||
|
@ -146,7 +149,7 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (n->Type() == TokenType_OPEN_BRACKET) {
|
if (n->Type() == TokenType_OPEN_BRACKET) {
|
||||||
compound.reset(new Scope(parser));
|
compound = new_Scope(parser);
|
||||||
|
|
||||||
// current token should be a TOK_CLOSE_BRACKET
|
// current token should be a TOK_CLOSE_BRACKET
|
||||||
n = parser.CurrentToken();
|
n = parser.CurrentToken();
|
||||||
|
@ -164,6 +167,15 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
|
Element::~Element()
|
||||||
|
{
|
||||||
|
if (compound) {
|
||||||
|
delete_Scope(compound);
|
||||||
|
}
|
||||||
|
|
||||||
|
// no need to delete tokens, they are owned by the parser
|
||||||
|
}
|
||||||
|
|
||||||
Scope::Scope(Parser& parser,bool topLevel)
|
Scope::Scope(Parser& parser,bool topLevel)
|
||||||
{
|
{
|
||||||
if(!topLevel) {
|
if(!topLevel) {
|
||||||
|
@ -173,6 +185,7 @@ Scope::Scope(Parser& parser,bool topLevel)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
TokenPtr n = parser.AdvanceToNextToken();
|
TokenPtr n = parser.AdvanceToNextToken();
|
||||||
if (n == nullptr) {
|
if (n == nullptr) {
|
||||||
ParseError("unexpected end of file");
|
ParseError("unexpected end of file");
|
||||||
|
@ -207,22 +220,27 @@ Scope::Scope(Parser& parser,bool topLevel)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Scope::~Scope() {
|
Scope::~Scope()
|
||||||
|
{
|
||||||
|
// This collection does not own the memory for the elements, but we need to call their d'tor:
|
||||||
|
|
||||||
for (ElementMap::value_type &v : elements) {
|
for (ElementMap::value_type &v : elements) {
|
||||||
delete v.second;
|
delete_Element(v.second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Parser::Parser (const TokenList& tokens, bool is_binary)
|
Parser::Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary) :
|
||||||
: tokens(tokens)
|
tokens(tokens), allocator(allocator), last(), current(), cursor(tokens.begin()), is_binary(is_binary)
|
||||||
, last()
|
|
||||||
, current()
|
|
||||||
, cursor(tokens.begin())
|
|
||||||
, is_binary(is_binary)
|
|
||||||
{
|
{
|
||||||
ASSIMP_LOG_DEBUG("Parsing FBX tokens");
|
ASSIMP_LOG_DEBUG("Parsing FBX tokens");
|
||||||
root.reset(new Scope(*this,true));
|
root = new_Scope(*this, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ------------------------------------------------------------------------------------------------
|
||||||
|
Parser::~Parser()
|
||||||
|
{
|
||||||
|
delete_Scope(root);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -52,6 +52,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
#include <assimp/LogAux.h>
|
#include <assimp/LogAux.h>
|
||||||
#include <assimp/fast_atof.h>
|
#include <assimp/fast_atof.h>
|
||||||
|
|
||||||
|
#include "Common/StackAllocator.h"
|
||||||
#include "FBXCompileConfig.h"
|
#include "FBXCompileConfig.h"
|
||||||
#include "FBXTokenizer.h"
|
#include "FBXTokenizer.h"
|
||||||
|
|
||||||
|
@ -63,14 +64,14 @@ class Parser;
|
||||||
class Element;
|
class Element;
|
||||||
|
|
||||||
// XXX should use C++11's unique_ptr - but assimp's need to keep working with 03
|
// XXX should use C++11's unique_ptr - but assimp's need to keep working with 03
|
||||||
typedef std::vector< Scope* > ScopeList;
|
using ScopeList = std::vector<Scope*>;
|
||||||
typedef std::fbx_unordered_multimap< std::string, Element* > ElementMap;
|
using ElementMap = std::fbx_unordered_multimap< std::string, Element*>;
|
||||||
|
using ElementCollection = std::pair<ElementMap::const_iterator,ElementMap::const_iterator>;
|
||||||
typedef std::pair<ElementMap::const_iterator,ElementMap::const_iterator> ElementCollection;
|
|
||||||
|
|
||||||
# define new_Scope new Scope
|
|
||||||
# define new_Element new Element
|
|
||||||
|
|
||||||
|
#define new_Scope new (allocator.Allocate(sizeof(Scope))) Scope
|
||||||
|
#define new_Element new (allocator.Allocate(sizeof(Element))) Element
|
||||||
|
#define delete_Scope(_p) (_p)->~Scope()
|
||||||
|
#define delete_Element(_p) (_p)->~Element()
|
||||||
|
|
||||||
/** FBX data entity that consists of a key:value tuple.
|
/** FBX data entity that consists of a key:value tuple.
|
||||||
*
|
*
|
||||||
|
@ -82,15 +83,16 @@ typedef std::pair<ElementMap::const_iterator,ElementMap::const_iterator> Element
|
||||||
* @endverbatim
|
* @endverbatim
|
||||||
*
|
*
|
||||||
* As can be seen in this sample, elements can contain nested #Scope
|
* As can be seen in this sample, elements can contain nested #Scope
|
||||||
* as their trailing member. **/
|
* as their trailing member.
|
||||||
|
**/
|
||||||
class Element
|
class Element
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
Element(const Token& key_token, Parser& parser);
|
Element(const Token& key_token, Parser& parser);
|
||||||
~Element() = default;
|
~Element();
|
||||||
|
|
||||||
const Scope* Compound() const {
|
const Scope* Compound() const {
|
||||||
return compound.get();
|
return compound;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Token& KeyToken() const {
|
const Token& KeyToken() const {
|
||||||
|
@ -104,7 +106,7 @@ public:
|
||||||
private:
|
private:
|
||||||
const Token& key_token;
|
const Token& key_token;
|
||||||
TokenList tokens;
|
TokenList tokens;
|
||||||
std::unique_ptr<Scope> compound;
|
Scope* compound;
|
||||||
};
|
};
|
||||||
|
|
||||||
/** FBX data entity that consists of a 'scope', a collection
|
/** FBX data entity that consists of a 'scope', a collection
|
||||||
|
@ -159,8 +161,8 @@ class Parser
|
||||||
public:
|
public:
|
||||||
/** Parse given a token list. Does not take ownership of the tokens -
|
/** Parse given a token list. Does not take ownership of the tokens -
|
||||||
* the objects must persist during the entire parser lifetime */
|
* the objects must persist during the entire parser lifetime */
|
||||||
Parser (const TokenList& tokens,bool is_binary);
|
Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary);
|
||||||
~Parser() = default;
|
~Parser();
|
||||||
|
|
||||||
const Scope& GetRootScope() const {
|
const Scope& GetRootScope() const {
|
||||||
return *root;
|
return *root;
|
||||||
|
@ -170,6 +172,10 @@ public:
|
||||||
return is_binary;
|
return is_binary;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StackAllocator &GetAllocator() {
|
||||||
|
return allocator;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
friend class Scope;
|
friend class Scope;
|
||||||
friend class Element;
|
friend class Element;
|
||||||
|
@ -180,10 +186,10 @@ private:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
const TokenList& tokens;
|
const TokenList& tokens;
|
||||||
|
StackAllocator &allocator;
|
||||||
TokenPtr last, current;
|
TokenPtr last, current;
|
||||||
TokenList::const_iterator cursor;
|
TokenList::const_iterator cursor;
|
||||||
std::unique_ptr<Scope> root;
|
Scope *root;
|
||||||
|
|
||||||
const bool is_binary;
|
const bool is_binary;
|
||||||
};
|
};
|
||||||
|
|
|
@ -94,7 +94,8 @@ AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int line,
|
||||||
|
|
||||||
// process a potential data token up to 'cur', adding it to 'output_tokens'.
|
// process a potential data token up to 'cur', adding it to 'output_tokens'.
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*& end,
|
void ProcessDataToken(TokenList &output_tokens, StackAllocator &token_allocator,
|
||||||
|
const char*& start, const char*& end,
|
||||||
unsigned int line,
|
unsigned int line,
|
||||||
unsigned int column,
|
unsigned int column,
|
||||||
TokenType type = TokenType_DATA,
|
TokenType type = TokenType_DATA,
|
||||||
|
@ -131,8 +132,7 @@ void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
void Tokenize(TokenList& output_tokens, const char* input)
|
void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &token_allocator) {
|
||||||
{
|
|
||||||
ai_assert(input);
|
ai_assert(input);
|
||||||
ASSIMP_LOG_DEBUG("Tokenizing ASCII FBX file");
|
ASSIMP_LOG_DEBUG("Tokenizing ASCII FBX file");
|
||||||
|
|
||||||
|
@ -164,7 +164,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
||||||
in_double_quotes = false;
|
in_double_quotes = false;
|
||||||
token_end = cur;
|
token_end = cur;
|
||||||
|
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
||||||
pending_data_token = false;
|
pending_data_token = false;
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
@ -181,30 +181,30 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case ';':
|
case ';':
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
||||||
comment = true;
|
comment = true;
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case '{':
|
case '{':
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end, line, column);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
||||||
output_tokens.push_back(new_Token(cur,cur+1,TokenType_OPEN_BRACKET,line,column));
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_OPEN_BRACKET,line,column));
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case '}':
|
case '}':
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
||||||
output_tokens.push_back(new_Token(cur,cur+1,TokenType_CLOSE_BRACKET,line,column));
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_CLOSE_BRACKET,line,column));
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case ',':
|
case ',':
|
||||||
if (pending_data_token) {
|
if (pending_data_token) {
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_DATA,true);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_DATA, true);
|
||||||
}
|
}
|
||||||
output_tokens.push_back(new_Token(cur,cur+1,TokenType_COMMA,line,column));
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_COMMA,line,column));
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case ':':
|
case ':':
|
||||||
if (pending_data_token) {
|
if (pending_data_token) {
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_KEY,true);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_KEY, true);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
TokenizeError("unexpected colon", line, column);
|
TokenizeError("unexpected colon", line, column);
|
||||||
|
@ -226,7 +226,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column,type);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, type);
|
||||||
}
|
}
|
||||||
|
|
||||||
pending_data_token = false;
|
pending_data_token = false;
|
||||||
|
|
|
@ -47,6 +47,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
#define INCLUDED_AI_FBX_TOKENIZER_H
|
#define INCLUDED_AI_FBX_TOKENIZER_H
|
||||||
|
|
||||||
#include "FBXCompileConfig.h"
|
#include "FBXCompileConfig.h"
|
||||||
|
#include "Common/StackAllocator.h"
|
||||||
#include <assimp/ai_assert.h>
|
#include <assimp/ai_assert.h>
|
||||||
#include <assimp/defs.h>
|
#include <assimp/defs.h>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
@ -157,7 +158,8 @@ private:
|
||||||
typedef const Token* TokenPtr;
|
typedef const Token* TokenPtr;
|
||||||
typedef std::vector< TokenPtr > TokenList;
|
typedef std::vector< TokenPtr > TokenList;
|
||||||
|
|
||||||
#define new_Token new Token
|
#define new_Token new (token_allocator.Allocate(sizeof(Token))) Token
|
||||||
|
#define delete_Token(_p) (_p)->~Token()
|
||||||
|
|
||||||
|
|
||||||
/** Main FBX tokenizer function. Transform input buffer into a list of preprocessed tokens.
|
/** Main FBX tokenizer function. Transform input buffer into a list of preprocessed tokens.
|
||||||
|
@ -167,7 +169,7 @@ typedef std::vector< TokenPtr > TokenList;
|
||||||
* @param output_tokens Receives a list of all tokens in the input data.
|
* @param output_tokens Receives a list of all tokens in the input data.
|
||||||
* @param input_buffer Textual input buffer to be processed, 0-terminated.
|
* @param input_buffer Textual input buffer to be processed, 0-terminated.
|
||||||
* @throw DeadlyImportError if something goes wrong */
|
* @throw DeadlyImportError if something goes wrong */
|
||||||
void Tokenize(TokenList& output_tokens, const char* input);
|
void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &tokenAllocator);
|
||||||
|
|
||||||
|
|
||||||
/** Tokenizer function for binary FBX files.
|
/** Tokenizer function for binary FBX files.
|
||||||
|
@ -178,7 +180,7 @@ void Tokenize(TokenList& output_tokens, const char* input);
|
||||||
* @param input_buffer Binary input buffer to be processed.
|
* @param input_buffer Binary input buffer to be processed.
|
||||||
* @param length Length of input buffer, in bytes. There is no 0-terminal.
|
* @param length Length of input buffer, in bytes. There is no 0-terminal.
|
||||||
* @throw DeadlyImportError if something goes wrong */
|
* @throw DeadlyImportError if something goes wrong */
|
||||||
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length);
|
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &tokenAllocator);
|
||||||
|
|
||||||
|
|
||||||
} // ! FBX
|
} // ! FBX
|
||||||
|
|
|
@ -66,6 +66,17 @@ struct delete_fun
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** helper for std::for_each to call the destructor on all items in a container without freeing their heap*/
|
||||||
|
template <typename T>
|
||||||
|
struct destructor_fun {
|
||||||
|
void operator()(const volatile T* del) {
|
||||||
|
if (del) {
|
||||||
|
del->~T();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
/** Get a string representation for a #TokenType. */
|
/** Get a string representation for a #TokenType. */
|
||||||
const char* TokenTypeString(TokenType t);
|
const char* TokenTypeString(TokenType t);
|
||||||
|
|
||||||
|
|
|
@ -578,7 +578,7 @@ void XFileImporter::ConvertMaterials( aiScene* pScene, std::vector<XFile::Materi
|
||||||
aiString name;
|
aiString name;
|
||||||
pScene->mMaterials[b]->Get( AI_MATKEY_NAME, name);
|
pScene->mMaterials[b]->Get( AI_MATKEY_NAME, name);
|
||||||
if( strcmp( name.C_Str(), oldMat.mName.data()) == 0 ) {
|
if( strcmp( name.C_Str(), oldMat.mName.data()) == 0 ) {
|
||||||
oldMat.sceneIndex = a;
|
oldMat.sceneIndex = b;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,6 +44,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
*
|
*
|
||||||
* glTF Extensions Support:
|
* glTF Extensions Support:
|
||||||
* KHR_materials_pbrSpecularGlossiness full
|
* KHR_materials_pbrSpecularGlossiness full
|
||||||
|
* KHR_materials_specular full
|
||||||
* KHR_materials_unlit full
|
* KHR_materials_unlit full
|
||||||
* KHR_lights_punctual full
|
* KHR_lights_punctual full
|
||||||
* KHR_materials_sheen full
|
* KHR_materials_sheen full
|
||||||
|
@ -710,6 +711,7 @@ const vec4 defaultBaseColor = { 1, 1, 1, 1 };
|
||||||
const vec3 defaultEmissiveFactor = { 0, 0, 0 };
|
const vec3 defaultEmissiveFactor = { 0, 0, 0 };
|
||||||
const vec4 defaultDiffuseFactor = { 1, 1, 1, 1 };
|
const vec4 defaultDiffuseFactor = { 1, 1, 1, 1 };
|
||||||
const vec3 defaultSpecularFactor = { 1, 1, 1 };
|
const vec3 defaultSpecularFactor = { 1, 1, 1 };
|
||||||
|
const vec3 defaultSpecularColorFactor = { 0, 0, 0 };
|
||||||
const vec3 defaultSheenFactor = { 0, 0, 0 };
|
const vec3 defaultSheenFactor = { 0, 0, 0 };
|
||||||
const vec3 defaultAttenuationColor = { 1, 1, 1 };
|
const vec3 defaultAttenuationColor = { 1, 1, 1 };
|
||||||
|
|
||||||
|
@ -753,6 +755,16 @@ struct PbrSpecularGlossiness {
|
||||||
void SetDefaults();
|
void SetDefaults();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct MaterialSpecular {
|
||||||
|
float specularFactor;
|
||||||
|
vec3 specularColorFactor;
|
||||||
|
TextureInfo specularTexture;
|
||||||
|
TextureInfo specularColorTexture;
|
||||||
|
|
||||||
|
MaterialSpecular() { SetDefaults(); }
|
||||||
|
void SetDefaults();
|
||||||
|
};
|
||||||
|
|
||||||
struct MaterialSheen {
|
struct MaterialSheen {
|
||||||
vec3 sheenColorFactor;
|
vec3 sheenColorFactor;
|
||||||
float sheenRoughnessFactor;
|
float sheenRoughnessFactor;
|
||||||
|
@ -817,6 +829,9 @@ struct Material : public Object {
|
||||||
//extension: KHR_materials_pbrSpecularGlossiness
|
//extension: KHR_materials_pbrSpecularGlossiness
|
||||||
Nullable<PbrSpecularGlossiness> pbrSpecularGlossiness;
|
Nullable<PbrSpecularGlossiness> pbrSpecularGlossiness;
|
||||||
|
|
||||||
|
//extension: KHR_materials_specular
|
||||||
|
Nullable<MaterialSpecular> materialSpecular;
|
||||||
|
|
||||||
//extension: KHR_materials_sheen
|
//extension: KHR_materials_sheen
|
||||||
Nullable<MaterialSheen> materialSheen;
|
Nullable<MaterialSheen> materialSheen;
|
||||||
|
|
||||||
|
@ -1099,6 +1114,7 @@ public:
|
||||||
//! Keeps info about the enabled extensions
|
//! Keeps info about the enabled extensions
|
||||||
struct Extensions {
|
struct Extensions {
|
||||||
bool KHR_materials_pbrSpecularGlossiness;
|
bool KHR_materials_pbrSpecularGlossiness;
|
||||||
|
bool KHR_materials_specular;
|
||||||
bool KHR_materials_unlit;
|
bool KHR_materials_unlit;
|
||||||
bool KHR_lights_punctual;
|
bool KHR_lights_punctual;
|
||||||
bool KHR_texture_transform;
|
bool KHR_texture_transform;
|
||||||
|
@ -1114,6 +1130,7 @@ public:
|
||||||
|
|
||||||
Extensions() :
|
Extensions() :
|
||||||
KHR_materials_pbrSpecularGlossiness(false),
|
KHR_materials_pbrSpecularGlossiness(false),
|
||||||
|
KHR_materials_specular(false),
|
||||||
KHR_materials_unlit(false),
|
KHR_materials_unlit(false),
|
||||||
KHR_lights_punctual(false),
|
KHR_lights_punctual(false),
|
||||||
KHR_texture_transform(false),
|
KHR_texture_transform(false),
|
||||||
|
|
|
@ -1264,6 +1264,19 @@ inline void Material::Read(Value &material, Asset &r) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (r.extensionsUsed.KHR_materials_specular) {
|
||||||
|
if (Value *curMatSpecular = FindObject(*extensions, "KHR_materials_specular")) {
|
||||||
|
MaterialSpecular specular;
|
||||||
|
|
||||||
|
ReadMember(*curMatSpecular, "specularFactor", specular.specularFactor);
|
||||||
|
ReadTextureProperty(r, *curMatSpecular, "specularTexture", specular.specularTexture);
|
||||||
|
ReadMember(*curMatSpecular, "specularColorFactor", specular.specularColorFactor);
|
||||||
|
ReadTextureProperty(r, *curMatSpecular, "specularColorTexture", specular.specularColorTexture);
|
||||||
|
|
||||||
|
this->materialSpecular = Nullable<MaterialSpecular>(specular);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Extension KHR_texture_transform is handled in ReadTextureProperty
|
// Extension KHR_texture_transform is handled in ReadTextureProperty
|
||||||
|
|
||||||
if (r.extensionsUsed.KHR_materials_sheen) {
|
if (r.extensionsUsed.KHR_materials_sheen) {
|
||||||
|
@ -1361,6 +1374,12 @@ inline void PbrSpecularGlossiness::SetDefaults() {
|
||||||
glossinessFactor = 1.0f;
|
glossinessFactor = 1.0f;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline void MaterialSpecular::SetDefaults() {
|
||||||
|
//KHR_materials_specular properties
|
||||||
|
SetVector(specularColorFactor, defaultSpecularColorFactor);
|
||||||
|
specularFactor = 0.f;
|
||||||
|
}
|
||||||
|
|
||||||
inline void MaterialSheen::SetDefaults() {
|
inline void MaterialSheen::SetDefaults() {
|
||||||
//KHR_materials_sheen properties
|
//KHR_materials_sheen properties
|
||||||
SetVector(sheenColorFactor, defaultSheenFactor);
|
SetVector(sheenColorFactor, defaultSheenFactor);
|
||||||
|
@ -2047,6 +2066,7 @@ inline void Asset::ReadExtensionsUsed(Document &doc) {
|
||||||
}
|
}
|
||||||
|
|
||||||
CHECK_EXT(KHR_materials_pbrSpecularGlossiness);
|
CHECK_EXT(KHR_materials_pbrSpecularGlossiness);
|
||||||
|
CHECK_EXT(KHR_materials_specular);
|
||||||
CHECK_EXT(KHR_materials_unlit);
|
CHECK_EXT(KHR_materials_unlit);
|
||||||
CHECK_EXT(KHR_lights_punctual);
|
CHECK_EXT(KHR_lights_punctual);
|
||||||
CHECK_EXT(KHR_texture_transform);
|
CHECK_EXT(KHR_texture_transform);
|
||||||
|
|
|
@ -45,6 +45,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
*
|
*
|
||||||
* glTF Extensions Support:
|
* glTF Extensions Support:
|
||||||
* KHR_materials_pbrSpecularGlossiness: full
|
* KHR_materials_pbrSpecularGlossiness: full
|
||||||
|
* KHR_materials_specular: full
|
||||||
* KHR_materials_unlit: full
|
* KHR_materials_unlit: full
|
||||||
* KHR_materials_sheen: full
|
* KHR_materials_sheen: full
|
||||||
* KHR_materials_clearcoat: full
|
* KHR_materials_clearcoat: full
|
||||||
|
|
|
@ -418,6 +418,26 @@ namespace glTF2 {
|
||||||
exts.AddMember("KHR_materials_unlit", unlit, w.mAl);
|
exts.AddMember("KHR_materials_unlit", unlit, w.mAl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (m.materialSpecular.isPresent) {
|
||||||
|
Value materialSpecular(rapidjson::Type::kObjectType);
|
||||||
|
materialSpecular.SetObject();
|
||||||
|
|
||||||
|
MaterialSpecular &specular = m.materialSpecular.value;
|
||||||
|
|
||||||
|
if (specular.specularFactor != 0.0f) {
|
||||||
|
WriteFloat(materialSpecular, specular.specularFactor, "specularFactor", w.mAl);
|
||||||
|
WriteTex(materialSpecular, specular.specularTexture, "specularTexture", w.mAl);
|
||||||
|
}
|
||||||
|
if (specular.specularColorFactor[0] != defaultSpecularColorFactor[0] && specular.specularColorFactor[1] != defaultSpecularColorFactor[1] && specular.specularColorFactor[2] != defaultSpecularColorFactor[2]) {
|
||||||
|
WriteVec(materialSpecular, specular.specularColorFactor, "specularColorFactor", w.mAl);
|
||||||
|
WriteTex(materialSpecular, specular.specularColorTexture, "specularColorTexture", w.mAl);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!materialSpecular.ObjectEmpty()) {
|
||||||
|
exts.AddMember("KHR_materials_specular", materialSpecular, w.mAl);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (m.materialSheen.isPresent) {
|
if (m.materialSheen.isPresent) {
|
||||||
Value materialSheen(rapidjson::Type::kObjectType);
|
Value materialSheen(rapidjson::Type::kObjectType);
|
||||||
|
|
||||||
|
@ -929,6 +949,10 @@ namespace glTF2 {
|
||||||
exts.PushBack(StringRef("KHR_materials_unlit"), mAl);
|
exts.PushBack(StringRef("KHR_materials_unlit"), mAl);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this->mAsset.extensionsUsed.KHR_materials_specular) {
|
||||||
|
exts.PushBack(StringRef("KHR_materials_specular"), mAl);
|
||||||
|
}
|
||||||
|
|
||||||
if (this->mAsset.extensionsUsed.KHR_materials_sheen) {
|
if (this->mAsset.extensionsUsed.KHR_materials_sheen) {
|
||||||
exts.PushBack(StringRef("KHR_materials_sheen"), mAl);
|
exts.PushBack(StringRef("KHR_materials_sheen"), mAl);
|
||||||
}
|
}
|
||||||
|
|
|
@ -640,11 +640,10 @@ aiReturn glTF2Exporter::GetMatColor(const aiMaterial &mat, vec3 &prop, const cha
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This extension has been deprecated, only export with the specific flag enabled, defaults to false. Uses KHR_material_specular default.
|
||||||
bool glTF2Exporter::GetMatSpecGloss(const aiMaterial &mat, glTF2::PbrSpecularGlossiness &pbrSG) {
|
bool glTF2Exporter::GetMatSpecGloss(const aiMaterial &mat, glTF2::PbrSpecularGlossiness &pbrSG) {
|
||||||
bool result = false;
|
bool result = false;
|
||||||
// If has Glossiness, a Specular Color or Specular Texture, use the KHR_materials_pbrSpecularGlossiness extension
|
// If has Glossiness, a Specular Color or Specular Texture, use the KHR_materials_pbrSpecularGlossiness extension
|
||||||
// NOTE: This extension is being considered for deprecation (Dec 2020), may be replaced by KHR_material_specular
|
|
||||||
|
|
||||||
if (mat.Get(AI_MATKEY_GLOSSINESS_FACTOR, pbrSG.glossinessFactor) == AI_SUCCESS) {
|
if (mat.Get(AI_MATKEY_GLOSSINESS_FACTOR, pbrSG.glossinessFactor) == AI_SUCCESS) {
|
||||||
result = true;
|
result = true;
|
||||||
} else {
|
} else {
|
||||||
|
@ -674,6 +673,25 @@ bool glTF2Exporter::GetMatSpecGloss(const aiMaterial &mat, glTF2::PbrSpecularGlo
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool glTF2Exporter::GetMatSpecular(const aiMaterial &mat, glTF2::MaterialSpecular &specular) {
|
||||||
|
// Specular requires either/or, default factors of zero disables specular, so do not export
|
||||||
|
if (GetMatColor(mat, specular.specularColorFactor, AI_MATKEY_COLOR_SPECULAR) != AI_SUCCESS || mat.Get(AI_MATKEY_SPECULAR_FACTOR, specular.specularFactor) != AI_SUCCESS) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// The spec states that the default is 1.0 and [1.0, 1.0, 1.0]. We if both are 0, which should disable specular. Otherwise, if one is 0, set to 1.0
|
||||||
|
const bool colorFactorIsZero = specular.specularColorFactor[0] == defaultSpecularColorFactor[0] && specular.specularColorFactor[1] == defaultSpecularColorFactor[1] && specular.specularColorFactor[2] == defaultSpecularColorFactor[2];
|
||||||
|
if (specular.specularFactor == 0.0f && colorFactorIsZero) {
|
||||||
|
return false;
|
||||||
|
} else if (specular.specularFactor == 0.0f) {
|
||||||
|
specular.specularFactor = 1.0f;
|
||||||
|
} else if (colorFactorIsZero) {
|
||||||
|
specular.specularColorFactor[0] = specular.specularColorFactor[1] = specular.specularColorFactor[2] = 1.0f;
|
||||||
|
}
|
||||||
|
GetMatTex(mat, specular.specularColorTexture, aiTextureType_SPECULAR);
|
||||||
|
GetMatTex(mat, specular.specularTexture, aiTextureType_SPECULAR);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
bool glTF2Exporter::GetMatSheen(const aiMaterial &mat, glTF2::MaterialSheen &sheen) {
|
bool glTF2Exporter::GetMatSheen(const aiMaterial &mat, glTF2::MaterialSheen &sheen) {
|
||||||
// Return true if got any valid Sheen properties or textures
|
// Return true if got any valid Sheen properties or textures
|
||||||
if (GetMatColor(mat, sheen.sheenColorFactor, AI_MATKEY_SHEEN_COLOR_FACTOR) != aiReturn_SUCCESS) {
|
if (GetMatColor(mat, sheen.sheenColorFactor, AI_MATKEY_SHEEN_COLOR_FACTOR) != aiReturn_SUCCESS) {
|
||||||
|
@ -818,9 +836,9 @@ void glTF2Exporter::ExportMaterials() {
|
||||||
m->alphaMode = alphaMode.C_Str();
|
m->alphaMode = alphaMode.C_Str();
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
// This extension has been deprecated, only export with the specific flag enabled, defaults to false. Uses KHR_material_specular default.
|
||||||
|
if (mProperties->GetPropertyBool(AI_CONFIG_USE_GLTF_PBR_SPECULAR_GLOSSINESS)) {
|
||||||
// KHR_materials_pbrSpecularGlossiness extension
|
// KHR_materials_pbrSpecularGlossiness extension
|
||||||
// NOTE: This extension is being considered for deprecation (Dec 2020)
|
|
||||||
PbrSpecularGlossiness pbrSG;
|
PbrSpecularGlossiness pbrSG;
|
||||||
if (GetMatSpecGloss(mat, pbrSG)) {
|
if (GetMatSpecGloss(mat, pbrSG)) {
|
||||||
mAsset->extensionsUsed.KHR_materials_pbrSpecularGlossiness = true;
|
mAsset->extensionsUsed.KHR_materials_pbrSpecularGlossiness = true;
|
||||||
|
@ -837,7 +855,12 @@ void glTF2Exporter::ExportMaterials() {
|
||||||
} else {
|
} else {
|
||||||
// These extensions are not compatible with KHR_materials_unlit or KHR_materials_pbrSpecularGlossiness
|
// These extensions are not compatible with KHR_materials_unlit or KHR_materials_pbrSpecularGlossiness
|
||||||
if (!m->pbrSpecularGlossiness.isPresent) {
|
if (!m->pbrSpecularGlossiness.isPresent) {
|
||||||
// Sheen
|
MaterialSpecular specular;
|
||||||
|
if (GetMatSpecular(mat, specular)) {
|
||||||
|
mAsset->extensionsUsed.KHR_materials_specular = true;
|
||||||
|
m->materialSpecular = Nullable<MaterialSpecular>(specular);
|
||||||
|
}
|
||||||
|
|
||||||
MaterialSheen sheen;
|
MaterialSheen sheen;
|
||||||
if (GetMatSheen(mat, sheen)) {
|
if (GetMatSheen(mat, sheen)) {
|
||||||
mAsset->extensionsUsed.KHR_materials_sheen = true;
|
mAsset->extensionsUsed.KHR_materials_sheen = true;
|
||||||
|
|
|
@ -76,6 +76,7 @@ struct OcclusionTextureInfo;
|
||||||
struct Node;
|
struct Node;
|
||||||
struct Texture;
|
struct Texture;
|
||||||
struct PbrSpecularGlossiness;
|
struct PbrSpecularGlossiness;
|
||||||
|
struct MaterialSpecular;
|
||||||
struct MaterialSheen;
|
struct MaterialSheen;
|
||||||
struct MaterialClearcoat;
|
struct MaterialClearcoat;
|
||||||
struct MaterialTransmission;
|
struct MaterialTransmission;
|
||||||
|
@ -117,6 +118,7 @@ protected:
|
||||||
aiReturn GetMatColor(const aiMaterial &mat, glTF2::vec4 &prop, const char *propName, int type, int idx) const;
|
aiReturn GetMatColor(const aiMaterial &mat, glTF2::vec4 &prop, const char *propName, int type, int idx) const;
|
||||||
aiReturn GetMatColor(const aiMaterial &mat, glTF2::vec3 &prop, const char *propName, int type, int idx) const;
|
aiReturn GetMatColor(const aiMaterial &mat, glTF2::vec3 &prop, const char *propName, int type, int idx) const;
|
||||||
bool GetMatSpecGloss(const aiMaterial &mat, glTF2::PbrSpecularGlossiness &pbrSG);
|
bool GetMatSpecGloss(const aiMaterial &mat, glTF2::PbrSpecularGlossiness &pbrSG);
|
||||||
|
bool GetMatSpecular(const aiMaterial &mat, glTF2::MaterialSpecular &specular);
|
||||||
bool GetMatSheen(const aiMaterial &mat, glTF2::MaterialSheen &sheen);
|
bool GetMatSheen(const aiMaterial &mat, glTF2::MaterialSheen &sheen);
|
||||||
bool GetMatClearcoat(const aiMaterial &mat, glTF2::MaterialClearcoat &clearcoat);
|
bool GetMatClearcoat(const aiMaterial &mat, glTF2::MaterialClearcoat &clearcoat);
|
||||||
bool GetMatTransmission(const aiMaterial &mat, glTF2::MaterialTransmission &transmission);
|
bool GetMatTransmission(const aiMaterial &mat, glTF2::MaterialTransmission &transmission);
|
||||||
|
|
|
@ -278,8 +278,19 @@ static aiMaterial *ImportMaterial(std::vector<int> &embeddedTexIdxs, Asset &r, M
|
||||||
aimat->AddProperty(&alphaMode, AI_MATKEY_GLTF_ALPHAMODE);
|
aimat->AddProperty(&alphaMode, AI_MATKEY_GLTF_ALPHAMODE);
|
||||||
aimat->AddProperty(&mat.alphaCutoff, 1, AI_MATKEY_GLTF_ALPHACUTOFF);
|
aimat->AddProperty(&mat.alphaCutoff, 1, AI_MATKEY_GLTF_ALPHACUTOFF);
|
||||||
|
|
||||||
|
// KHR_materials_specular
|
||||||
|
if (mat.materialSpecular.isPresent) {
|
||||||
|
MaterialSpecular &specular = mat.materialSpecular.value;
|
||||||
|
// Default values of zero disables Specular
|
||||||
|
if (std::memcmp(specular.specularColorFactor, defaultSpecularColorFactor, sizeof(glTFCommon::vec3)) != 0 || specular.specularFactor != 0.0f) {
|
||||||
|
SetMaterialColorProperty(r, specular.specularColorFactor, aimat, AI_MATKEY_COLOR_SPECULAR);
|
||||||
|
aimat->AddProperty(&specular.specularFactor, 1, AI_MATKEY_SPECULAR_FACTOR);
|
||||||
|
SetMaterialTextureProperty(embeddedTexIdxs, r, specular.specularTexture, aimat, aiTextureType_SPECULAR);
|
||||||
|
SetMaterialTextureProperty(embeddedTexIdxs, r, specular.specularColorTexture, aimat, aiTextureType_SPECULAR);
|
||||||
|
}
|
||||||
|
}
|
||||||
// pbrSpecularGlossiness
|
// pbrSpecularGlossiness
|
||||||
if (mat.pbrSpecularGlossiness.isPresent) {
|
else if (mat.pbrSpecularGlossiness.isPresent) {
|
||||||
PbrSpecularGlossiness &pbrSG = mat.pbrSpecularGlossiness.value;
|
PbrSpecularGlossiness &pbrSG = mat.pbrSpecularGlossiness.value;
|
||||||
|
|
||||||
SetMaterialColorProperty(r, pbrSG.diffuseFactor, aimat, AI_MATKEY_COLOR_DIFFUSE);
|
SetMaterialColorProperty(r, pbrSG.diffuseFactor, aimat, AI_MATKEY_COLOR_DIFFUSE);
|
||||||
|
|
|
@ -194,6 +194,8 @@ SET( Common_SRCS
|
||||||
Common/ScenePreprocessor.cpp
|
Common/ScenePreprocessor.cpp
|
||||||
Common/ScenePreprocessor.h
|
Common/ScenePreprocessor.h
|
||||||
Common/SkeletonMeshBuilder.cpp
|
Common/SkeletonMeshBuilder.cpp
|
||||||
|
Common/StackAllocator.h
|
||||||
|
Common/StackAllocator.inl
|
||||||
Common/StandardShapes.cpp
|
Common/StandardShapes.cpp
|
||||||
Common/TargetAnimation.cpp
|
Common/TargetAnimation.cpp
|
||||||
Common/TargetAnimation.h
|
Common/TargetAnimation.h
|
||||||
|
|
|
@ -1349,6 +1349,9 @@ void SceneCombiner::Copy(aiMetadata **_dest, const aiMetadata *src) {
|
||||||
case AI_AIVECTOR3D:
|
case AI_AIVECTOR3D:
|
||||||
out.mData = new aiVector3D(*static_cast<aiVector3D *>(in.mData));
|
out.mData = new aiVector3D(*static_cast<aiVector3D *>(in.mData));
|
||||||
break;
|
break;
|
||||||
|
case AI_AIMETADATA:
|
||||||
|
out.mData = new aiMetadata(*static_cast<aiMetadata *>(in.mData));
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
ai_assert(false);
|
ai_assert(false);
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
/*
|
||||||
|
Open Asset Import Library (assimp)
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 2006-2022, assimp team
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use of this software in source and binary forms,
|
||||||
|
with or without modification, are permitted provided that the
|
||||||
|
following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above
|
||||||
|
copyright notice, this list of conditions and the
|
||||||
|
following disclaimer.
|
||||||
|
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the
|
||||||
|
following disclaimer in the documentation and/or other
|
||||||
|
materials provided with the distribution.
|
||||||
|
|
||||||
|
* Neither the name of the assimp team, nor the names of its
|
||||||
|
contributors may be used to endorse or promote products
|
||||||
|
derived from this software without specific prior
|
||||||
|
written permission of the assimp team.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
*/
|
||||||
|
|
||||||
|
/** @file StackAllocator.h
|
||||||
|
* @brief A very bare-bone allocator class that is suitable when
|
||||||
|
* allocating many small objects, e.g. during parsing.
|
||||||
|
* Individual objects are not freed, instead only the whole memory
|
||||||
|
* can be deallocated.
|
||||||
|
*/
|
||||||
|
#ifndef AI_STACK_ALLOCATOR_H_INC
|
||||||
|
#define AI_STACK_ALLOCATOR_H_INC
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
|
#include <stdint.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
|
||||||
|
namespace Assimp {
|
||||||
|
|
||||||
|
/** @brief A very bare-bone allocator class that is suitable when
|
||||||
|
* allocating many small objects, e.g. during parsing.
|
||||||
|
* Individual objects are not freed, instead only the whole memory
|
||||||
|
* can be deallocated.
|
||||||
|
*/
|
||||||
|
class StackAllocator {
|
||||||
|
public:
|
||||||
|
/// @brief Constructs the allocator
|
||||||
|
inline StackAllocator();
|
||||||
|
/// @brief Destructs the allocator and frees all memory
|
||||||
|
inline ~StackAllocator();
|
||||||
|
|
||||||
|
// non copyable
|
||||||
|
StackAllocator(const StackAllocator &) = delete;
|
||||||
|
StackAllocator &operator=(const StackAllocator &) = delete;
|
||||||
|
|
||||||
|
/// @brief Returns a pointer to byteSize bytes of heap memory that persists
|
||||||
|
/// for the lifetime of the allocator (or until FreeAll is called).
|
||||||
|
inline void *Allocate(size_t byteSize);
|
||||||
|
|
||||||
|
/// @brief Releases all the memory owned by this allocator.
|
||||||
|
// Memory provided through function Allocate is not valid anymore after this function has been called.
|
||||||
|
inline void FreeAll();
|
||||||
|
|
||||||
|
private:
|
||||||
|
constexpr const static size_t g_maxBytesPerBlock = 64 * 1024 * 1024; // The maximum size (in bytes) of a block
|
||||||
|
constexpr const static size_t g_startBytesPerBlock = 16 * 1024; // Size of the first block. Next blocks will double in size until maximum size of g_maxBytesPerBlock
|
||||||
|
size_t m_blockAllocationSize = g_startBytesPerBlock; // Block size of the current block
|
||||||
|
size_t m_subIndex = g_maxBytesPerBlock; // The current byte offset in the current block
|
||||||
|
std::vector<uint8_t *> m_storageBlocks; // A list of blocks
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace Assimp
|
||||||
|
|
||||||
|
#include "StackAllocator.inl"
|
||||||
|
|
||||||
|
#endif // include guard
|
|
@ -0,0 +1,82 @@
|
||||||
|
/*
|
||||||
|
Open Asset Import Library (assimp)
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 2006-2022, assimp team
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use of this software in source and binary forms,
|
||||||
|
with or without modification, are permitted provided that the
|
||||||
|
following conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above
|
||||||
|
copyright notice, this list of conditions and the
|
||||||
|
following disclaimer.
|
||||||
|
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the
|
||||||
|
following disclaimer in the documentation and/or other
|
||||||
|
materials provided with the distribution.
|
||||||
|
|
||||||
|
* Neither the name of the assimp team, nor the names of its
|
||||||
|
contributors may be used to endorse or promote products
|
||||||
|
derived from this software without specific prior
|
||||||
|
written permission of the assimp team.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "StackAllocator.h"
|
||||||
|
#include <assimp/ai_assert.h>
|
||||||
|
|
||||||
|
using namespace Assimp;
|
||||||
|
|
||||||
|
inline StackAllocator::StackAllocator() {
|
||||||
|
}
|
||||||
|
|
||||||
|
inline StackAllocator::~StackAllocator() {
|
||||||
|
FreeAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void *StackAllocator::Allocate(size_t byteSize) {
|
||||||
|
if (m_subIndex + byteSize > m_blockAllocationSize) // start a new block
|
||||||
|
{
|
||||||
|
// double block size every time, up to maximum of g_maxBytesPerBlock.
|
||||||
|
// Block size must be at least as large as byteSize, but we want to use this for small allocations anyway.
|
||||||
|
m_blockAllocationSize = std::max(std::min(m_blockAllocationSize * 2, g_maxBytesPerBlock), byteSize);
|
||||||
|
uint8_t *data = new uint8_t[m_blockAllocationSize];
|
||||||
|
m_storageBlocks.emplace_back(data);
|
||||||
|
m_subIndex = byteSize;
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t *data = m_storageBlocks.back();
|
||||||
|
data += m_subIndex;
|
||||||
|
m_subIndex += byteSize;
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void StackAllocator::FreeAll() {
|
||||||
|
for (size_t i = 0; i < m_storageBlocks.size(); i++) {
|
||||||
|
delete [] m_storageBlocks[i];
|
||||||
|
}
|
||||||
|
std::vector<uint8_t *> empty;
|
||||||
|
m_storageBlocks.swap(empty);
|
||||||
|
// start over:
|
||||||
|
m_blockAllocationSize = g_startBytesPerBlock;
|
||||||
|
m_subIndex = g_maxBytesPerBlock;
|
||||||
|
}
|
|
@ -40,6 +40,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
*/
|
*/
|
||||||
#include <assimp/cimport.h>
|
#include <assimp/cimport.h>
|
||||||
#include <assimp/Importer.hpp>
|
#include <assimp/Importer.hpp>
|
||||||
|
#include <assimp/Exporter.hpp>
|
||||||
#include <assimp/scene.h>
|
#include <assimp/scene.h>
|
||||||
#include <assimp/postprocess.h>
|
#include <assimp/postprocess.h>
|
||||||
|
|
||||||
|
@ -53,6 +54,13 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t dataSize) {
|
||||||
const aiScene *sc = importer.ReadFileFromMemory(data, dataSize,
|
const aiScene *sc = importer.ReadFileFromMemory(data, dataSize,
|
||||||
aiProcessPreset_TargetRealtime_Quality, nullptr );
|
aiProcessPreset_TargetRealtime_Quality, nullptr );
|
||||||
|
|
||||||
|
if (sc == nullptr) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Exporter exporter;
|
||||||
|
exporter.ExportToBlob(sc, "fbx");
|
||||||
|
|
||||||
aiDetachLogStream(&stream);
|
aiDetachLogStream(&stream);
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
|
|
|
@ -1065,6 +1065,17 @@ enum aiComponent
|
||||||
*/
|
*/
|
||||||
#define AI_CONFIG_EXPORT_POINT_CLOUDS "EXPORT_POINT_CLOUDS"
|
#define AI_CONFIG_EXPORT_POINT_CLOUDS "EXPORT_POINT_CLOUDS"
|
||||||
|
|
||||||
|
/** @brief Specifies whether to use the deprecated KHR_materials_pbrSpecularGlossiness extension
|
||||||
|
*
|
||||||
|
* When this flag is undefined any material with specularity will use the new KHR_materials_specular
|
||||||
|
* extension. Enabling this flag will revert to the deprecated extension. Note that exporting
|
||||||
|
* KHR_materials_pbrSpecularGlossiness with extensions other than KHR_materials_unlit is unsupported,
|
||||||
|
* including the basic pbrMetallicRoughness spec.
|
||||||
|
*
|
||||||
|
* Property type: Bool. Default value: false.
|
||||||
|
*/
|
||||||
|
#define AI_CONFIG_USE_GLTF_PBR_SPECULAR_GLOSSINESS "USE_GLTF_PBR_SPECULAR_GLOSSINESS"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Specifies the blob name, assimp uses for exporting.
|
* @brief Specifies the blob name, assimp uses for exporting.
|
||||||
*
|
*
|
||||||
|
|
|
@ -1,38 +1,71 @@
|
||||||
#include "UnitTestPCH.h"
|
/*
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
Open Asset Import Library (assimp)
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 2006-2022, assimp team
|
||||||
|
|
||||||
|
All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use of this software in source and binary forms,
|
||||||
|
with or without modification, are permitted provided that the following
|
||||||
|
conditions are met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above
|
||||||
|
copyright notice, this list of conditions and the
|
||||||
|
following disclaimer.
|
||||||
|
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the
|
||||||
|
following disclaimer in the documentation and/or other
|
||||||
|
materials provided with the distribution.
|
||||||
|
|
||||||
|
* Neither the name of the assimp team, nor the names of its
|
||||||
|
contributors may be used to endorse or promote products
|
||||||
|
derived from this software without specific prior
|
||||||
|
written permission of the assimp team.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
*/
|
||||||
|
#include "UnitTestPCH.h"
|
||||||
#include <assimp/cexport.h>
|
#include <assimp/cexport.h>
|
||||||
#include <assimp/Exporter.hpp>
|
#include <assimp/Exporter.hpp>
|
||||||
|
|
||||||
|
|
||||||
#ifndef ASSIMP_BUILD_NO_EXPORT
|
#ifndef ASSIMP_BUILD_NO_EXPORT
|
||||||
|
|
||||||
class ExporterTest : public ::testing::Test {
|
class ExporterTest : public ::testing::Test {
|
||||||
public:
|
public:
|
||||||
|
void SetUp() override {
|
||||||
virtual void SetUp()
|
|
||||||
{
|
|
||||||
ex = new Assimp::Exporter();
|
ex = new Assimp::Exporter();
|
||||||
im = new Assimp::Importer();
|
im = new Assimp::Importer();
|
||||||
|
|
||||||
pTest = im->ReadFile(ASSIMP_TEST_MODELS_DIR "/X/test.x", aiProcess_ValidateDataStructure);
|
pTest = im->ReadFile(ASSIMP_TEST_MODELS_DIR "/X/test.x", aiProcess_ValidateDataStructure);
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual void TearDown()
|
void TearDown() override {
|
||||||
{
|
|
||||||
delete ex;
|
delete ex;
|
||||||
delete im;
|
delete im;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
const aiScene* pTest;
|
const aiScene* pTest;
|
||||||
Assimp::Exporter* ex;
|
Assimp::Exporter* ex;
|
||||||
Assimp::Importer* im;
|
Assimp::Importer* im;
|
||||||
};
|
};
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
TEST_F(ExporterTest, testExportToFile)
|
TEST_F(ExporterTest, testExportToFile) {
|
||||||
{
|
|
||||||
const char* file = "unittest_output.dae";
|
const char* file = "unittest_output.dae";
|
||||||
EXPECT_EQ(AI_SUCCESS,ex->Export(pTest,"collada",file));
|
EXPECT_EQ(AI_SUCCESS,ex->Export(pTest,"collada",file));
|
||||||
|
|
||||||
|
@ -41,8 +74,7 @@ TEST_F(ExporterTest, testExportToFile)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
TEST_F(ExporterTest, testExportToBlob)
|
TEST_F(ExporterTest, testExportToBlob) {
|
||||||
{
|
|
||||||
const aiExportDataBlob* blob = ex->ExportToBlob(pTest,"collada");
|
const aiExportDataBlob* blob = ex->ExportToBlob(pTest,"collada");
|
||||||
ASSERT_TRUE(blob);
|
ASSERT_TRUE(blob);
|
||||||
EXPECT_TRUE(blob->data);
|
EXPECT_TRUE(blob->data);
|
||||||
|
@ -56,8 +88,7 @@ TEST_F(ExporterTest, testExportToBlob)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
TEST_F(ExporterTest, testCppExportInterface)
|
TEST_F(ExporterTest, testCppExportInterface) {
|
||||||
{
|
|
||||||
EXPECT_TRUE(ex->GetExportFormatCount() > 0);
|
EXPECT_TRUE(ex->GetExportFormatCount() > 0);
|
||||||
for(size_t i = 0; i < ex->GetExportFormatCount(); ++i) {
|
for(size_t i = 0; i < ex->GetExportFormatCount(); ++i) {
|
||||||
const aiExportFormatDesc* const desc = ex->GetExportFormatDescription(i);
|
const aiExportFormatDesc* const desc = ex->GetExportFormatDescription(i);
|
||||||
|
@ -71,14 +102,13 @@ TEST_F(ExporterTest, testCppExportInterface)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
TEST_F(ExporterTest, testCExportInterface)
|
TEST_F(ExporterTest, testCExportInterface) {
|
||||||
{
|
|
||||||
EXPECT_TRUE(aiGetExportFormatCount() > 0);
|
EXPECT_TRUE(aiGetExportFormatCount() > 0);
|
||||||
for(size_t i = 0; i < aiGetExportFormatCount(); ++i) {
|
for(size_t i = 0; i < aiGetExportFormatCount(); ++i) {
|
||||||
const aiExportFormatDesc* const desc = aiGetExportFormatDescription(i);
|
const aiExportFormatDesc* const desc = aiGetExportFormatDescription(i);
|
||||||
EXPECT_TRUE(desc);
|
EXPECT_TRUE(desc);
|
||||||
// rest has already been validated by testCppExportInterface
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -219,9 +219,15 @@ TEST_F(utglTF2ImportExport, importglTF2AndExport_KHR_materials_pbrSpecularGlossi
|
||||||
const aiScene *scene = importer.ReadFile(ASSIMP_TEST_MODELS_DIR "/glTF2/BoxTextured-glTF-pbrSpecularGlossiness/BoxTextured.gltf",
|
const aiScene *scene = importer.ReadFile(ASSIMP_TEST_MODELS_DIR "/glTF2/BoxTextured-glTF-pbrSpecularGlossiness/BoxTextured.gltf",
|
||||||
aiProcess_ValidateDataStructure);
|
aiProcess_ValidateDataStructure);
|
||||||
EXPECT_NE(nullptr, scene);
|
EXPECT_NE(nullptr, scene);
|
||||||
// Export
|
|
||||||
|
// Export with specular glossiness disabled
|
||||||
EXPECT_EQ(aiReturn_SUCCESS, exporter.Export(scene, "glb2", ASSIMP_TEST_MODELS_DIR "/glTF2/BoxTextured-glTF-pbrSpecularGlossiness/BoxTextured_out.glb"));
|
EXPECT_EQ(aiReturn_SUCCESS, exporter.Export(scene, "glb2", ASSIMP_TEST_MODELS_DIR "/glTF2/BoxTextured-glTF-pbrSpecularGlossiness/BoxTextured_out.glb"));
|
||||||
|
|
||||||
|
// Export with specular glossiness enabled
|
||||||
|
ExportProperties props;
|
||||||
|
props.SetPropertyBool(AI_CONFIG_USE_GLTF_PBR_SPECULAR_GLOSSINESS, true);
|
||||||
|
EXPECT_EQ(aiReturn_SUCCESS, exporter.Export(scene, "glb2", ASSIMP_TEST_MODELS_DIR "/glTF2/BoxTextured-glTF-pbrSpecularGlossiness/BoxTextured_out.glb", 0, &props));
|
||||||
|
|
||||||
// And re-import
|
// And re-import
|
||||||
EXPECT_TRUE(importerMatTest(ASSIMP_TEST_MODELS_DIR "/glTF2/BoxTextured-glTF-pbrSpecularGlossiness/BoxTextured_out.glb", true));
|
EXPECT_TRUE(importerMatTest(ASSIMP_TEST_MODELS_DIR "/glTF2/BoxTextured-glTF-pbrSpecularGlossiness/BoxTextured_out.glb", true));
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue