All allocation changes
parent
1d6ed840fb
commit
2b3c49cb93
|
@ -341,8 +341,7 @@ void ReadData(const char*& sbegin_out, const char*& send_out, const char* input,
|
||||||
|
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor, const char* end, bool const is64bits)
|
bool ReadScope(TokenList &output_tokens, StackAllocator &token_allocator, const char *input, const char *&cursor, const char *end, bool const is64bits) {
|
||||||
{
|
|
||||||
// the first word contains the offset at which this block ends
|
// the first word contains the offset at which this block ends
|
||||||
const uint64_t end_offset = is64bits ? ReadDoubleWord(input, cursor, end) : ReadWord(input, cursor, end);
|
const uint64_t end_offset = is64bits ? ReadDoubleWord(input, cursor, end) : ReadWord(input, cursor, end);
|
||||||
|
|
||||||
|
@ -408,7 +407,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
|
||||||
|
|
||||||
// XXX this is vulnerable to stack overflowing ..
|
// XXX this is vulnerable to stack overflowing ..
|
||||||
while(Offset(input, cursor) < end_offset - sentinel_block_length) {
|
while(Offset(input, cursor) < end_offset - sentinel_block_length) {
|
||||||
ReadScope(output_tokens, input, cursor, input + end_offset - sentinel_block_length, is64bits);
|
ReadScope(output_tokens, token_allocator, input, cursor, input + end_offset - sentinel_block_length, is64bits);
|
||||||
}
|
}
|
||||||
output_tokens.push_back(new_Token(cursor, cursor + 1, TokenType_CLOSE_BRACKET, Offset(input, cursor) ));
|
output_tokens.push_back(new_Token(cursor, cursor + 1, TokenType_CLOSE_BRACKET, Offset(input, cursor) ));
|
||||||
|
|
||||||
|
@ -431,8 +430,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
|
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
|
||||||
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
|
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &token_allocator) {
|
||||||
{
|
|
||||||
ai_assert(input);
|
ai_assert(input);
|
||||||
ASSIMP_LOG_DEBUG("Tokenizing binary FBX file");
|
ASSIMP_LOG_DEBUG("Tokenizing binary FBX file");
|
||||||
|
|
||||||
|
@ -465,7 +463,7 @@ void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
while (cursor < end ) {
|
while (cursor < end ) {
|
||||||
if (!ReadScope(output_tokens, input, cursor, input + length, is64bits)) {
|
if (!ReadScope(output_tokens, token_allocator, input, cursor, input + length, is64bits)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -235,7 +235,7 @@ FileGlobalSettings::FileGlobalSettings(const Document &doc, std::shared_ptr<cons
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Document::Document(const Parser& parser, const ImportSettings& settings) :
|
Document::Document(Parser& parser, const ImportSettings& settings) :
|
||||||
settings(settings), parser(parser) {
|
settings(settings), parser(parser) {
|
||||||
ASSIMP_LOG_DEBUG("Creating FBX Document");
|
ASSIMP_LOG_DEBUG("Creating FBX Document");
|
||||||
|
|
||||||
|
@ -258,13 +258,6 @@ Document::Document(const Parser& parser, const ImportSettings& settings) :
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Document::~Document() {
|
Document::~Document() {
|
||||||
for(ObjectMap::value_type& v : objects) {
|
|
||||||
delete v.second;
|
|
||||||
}
|
|
||||||
|
|
||||||
for(ConnectionMap::value_type& v : src_connections) {
|
|
||||||
delete v.second;
|
|
||||||
}
|
|
||||||
// |dest_connections| contain the same Connection objects as the |src_connections|
|
// |dest_connections| contain the same Connection objects as the |src_connections|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -348,9 +341,11 @@ void Document::ReadObjects() {
|
||||||
DOMError("no Objects dictionary found");
|
DOMError("no Objects dictionary found");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
|
|
||||||
// add a dummy entry to represent the Model::RootNode object (id 0),
|
// add a dummy entry to represent the Model::RootNode object (id 0),
|
||||||
// which is only indirectly defined in the input file
|
// which is only indirectly defined in the input file
|
||||||
objects[0] = new LazyObject(0L, *eobjects, *this);
|
objects[0] = new_LazyObject(0L, *eobjects, *this);
|
||||||
|
|
||||||
const Scope& sobjects = *eobjects->Compound();
|
const Scope& sobjects = *eobjects->Compound();
|
||||||
for(const ElementMap::value_type& el : sobjects.Elements()) {
|
for(const ElementMap::value_type& el : sobjects.Elements()) {
|
||||||
|
@ -377,7 +372,7 @@ void Document::ReadObjects() {
|
||||||
DOMWarning("encountered duplicate object id, ignoring first occurrence",el.second);
|
DOMWarning("encountered duplicate object id, ignoring first occurrence",el.second);
|
||||||
}
|
}
|
||||||
|
|
||||||
objects[id] = new LazyObject(id, *el.second, *this);
|
objects[id] = new_LazyObject(id, *el.second, *this);
|
||||||
|
|
||||||
// grab all animation stacks upfront since there is no listing of them
|
// grab all animation stacks upfront since there is no listing of them
|
||||||
if(!strcmp(el.first.c_str(),"AnimationStack")) {
|
if(!strcmp(el.first.c_str(),"AnimationStack")) {
|
||||||
|
@ -444,8 +439,10 @@ void Document::ReadPropertyTemplates() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
void Document::ReadConnections() {
|
void Document::ReadConnections()
|
||||||
const Scope& sc = parser.GetRootScope();
|
{
|
||||||
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
|
const Scope &sc = parser.GetRootScope();
|
||||||
// read property templates from "Definitions" section
|
// read property templates from "Definitions" section
|
||||||
const Element* const econns = sc["Connections"];
|
const Element* const econns = sc["Connections"];
|
||||||
if(!econns || !econns->Compound()) {
|
if(!econns || !econns->Compound()) {
|
||||||
|
@ -484,7 +481,7 @@ void Document::ReadConnections() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// add new connection
|
// add new connection
|
||||||
const Connection* const c = new Connection(insertionOrder++,src,dest,prop,*this);
|
const Connection* const c = new_Connection(insertionOrder++,src,dest,prop,*this);
|
||||||
src_connections.insert(ConnectionMap::value_type(src,c));
|
src_connections.insert(ConnectionMap::value_type(src,c));
|
||||||
dest_connections.insert(ConnectionMap::value_type(dest,c));
|
dest_connections.insert(ConnectionMap::value_type(dest,c));
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,6 +80,8 @@ class BlendShape;
|
||||||
class Skin;
|
class Skin;
|
||||||
class Cluster;
|
class Cluster;
|
||||||
|
|
||||||
|
#define new_LazyObject new (allocator.Allocate(sizeof(LazyObject))) LazyObject
|
||||||
|
#define new_Connection new (allocator.Allocate(sizeof(Connection))) Connection
|
||||||
|
|
||||||
/** Represents a delay-parsed FBX objects. Many objects in the scene
|
/** Represents a delay-parsed FBX objects. Many objects in the scene
|
||||||
* are not needed by assimp, so it makes no sense to parse them
|
* are not needed by assimp, so it makes no sense to parse them
|
||||||
|
|
|
@ -160,17 +160,18 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
||||||
TokenList tokens;
|
TokenList tokens;
|
||||||
try {
|
try {
|
||||||
|
|
||||||
|
Assimp::StackAllocator tempAllocator;
|
||||||
bool is_binary = false;
|
bool is_binary = false;
|
||||||
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
|
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
|
||||||
is_binary = true;
|
is_binary = true;
|
||||||
TokenizeBinary(tokens, begin, contents.size());
|
TokenizeBinary(tokens, begin, contents.size(), tempAllocator);
|
||||||
} else {
|
} else {
|
||||||
Tokenize(tokens, begin);
|
Tokenize(tokens, begin, tempAllocator);
|
||||||
}
|
}
|
||||||
|
|
||||||
// use this information to construct a very rudimentary
|
// use this information to construct a very rudimentary
|
||||||
// parse-tree representing the FBX scope structure
|
// parse-tree representing the FBX scope structure
|
||||||
Parser parser(tokens, is_binary);
|
Parser parser(tokens, tempAllocator, is_binary);
|
||||||
|
|
||||||
// take the raw parse-tree and convert it to a FBX DOM
|
// take the raw parse-tree and convert it to a FBX DOM
|
||||||
Document doc(parser, settings);
|
Document doc(parser, settings);
|
||||||
|
@ -189,10 +190,7 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
||||||
// Set FBX file scale is relative to CM must be converted to M for
|
// Set FBX file scale is relative to CM must be converted to M for
|
||||||
// assimp universal format (M)
|
// assimp universal format (M)
|
||||||
SetFileScale(size_relative_to_cm * 0.01f);
|
SetFileScale(size_relative_to_cm * 0.01f);
|
||||||
|
|
||||||
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
|
|
||||||
} catch (std::exception &) {
|
} catch (std::exception &) {
|
||||||
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
|
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -117,6 +117,7 @@ namespace FBX {
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Element::Element(const Token& key_token, Parser& parser) : key_token(key_token) {
|
Element::Element(const Token& key_token, Parser& parser) : key_token(key_token) {
|
||||||
TokenPtr n = nullptr;
|
TokenPtr n = nullptr;
|
||||||
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
do {
|
do {
|
||||||
n = parser.AdvanceToNextToken();
|
n = parser.AdvanceToNextToken();
|
||||||
if(!n) {
|
if(!n) {
|
||||||
|
@ -145,7 +146,7 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (n->Type() == TokenType_OPEN_BRACKET) {
|
if (n->Type() == TokenType_OPEN_BRACKET) {
|
||||||
compound.reset(new Scope(parser));
|
compound.reset(new_Scope(parser));
|
||||||
|
|
||||||
// current token should be a TOK_CLOSE_BRACKET
|
// current token should be a TOK_CLOSE_BRACKET
|
||||||
n = parser.CurrentToken();
|
n = parser.CurrentToken();
|
||||||
|
@ -178,6 +179,7 @@ Scope::Scope(Parser& parser,bool topLevel)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
TokenPtr n = parser.AdvanceToNextToken();
|
TokenPtr n = parser.AdvanceToNextToken();
|
||||||
if (n == nullptr) {
|
if (n == nullptr) {
|
||||||
ParseError("unexpected end of file");
|
ParseError("unexpected end of file");
|
||||||
|
@ -208,22 +210,16 @@ Scope::Scope(Parser& parser,bool topLevel)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Scope::~Scope() {
|
Scope::~Scope()
|
||||||
for(ElementMap::value_type& v : elements) {
|
{
|
||||||
delete v.second;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Parser::Parser (const TokenList& tokens, bool is_binary)
|
Parser::Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary) :
|
||||||
: tokens(tokens)
|
tokens(tokens), allocator(allocator), last(), current(), cursor(tokens.begin()), is_binary(is_binary)
|
||||||
, last()
|
|
||||||
, current()
|
|
||||||
, cursor(tokens.begin())
|
|
||||||
, is_binary(is_binary)
|
|
||||||
{
|
{
|
||||||
ASSIMP_LOG_DEBUG("Parsing FBX tokens");
|
ASSIMP_LOG_DEBUG("Parsing FBX tokens");
|
||||||
root.reset(new Scope(*this,true));
|
root = new_Scope(*this, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -52,6 +52,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
#include <assimp/LogAux.h>
|
#include <assimp/LogAux.h>
|
||||||
#include <assimp/fast_atof.h>
|
#include <assimp/fast_atof.h>
|
||||||
|
|
||||||
|
#include "Common/StackAllocator.h"
|
||||||
#include "FBXCompileConfig.h"
|
#include "FBXCompileConfig.h"
|
||||||
#include "FBXTokenizer.h"
|
#include "FBXTokenizer.h"
|
||||||
|
|
||||||
|
@ -68,8 +69,8 @@ typedef std::fbx_unordered_multimap< std::string, Element* > ElementMap;
|
||||||
|
|
||||||
typedef std::pair<ElementMap::const_iterator,ElementMap::const_iterator> ElementCollection;
|
typedef std::pair<ElementMap::const_iterator,ElementMap::const_iterator> ElementCollection;
|
||||||
|
|
||||||
# define new_Scope new Scope
|
#define new_Scope new (allocator.Allocate(sizeof(Scope))) Scope
|
||||||
# define new_Element new Element
|
#define new_Element new (allocator.Allocate(sizeof(Element))) Element
|
||||||
|
|
||||||
|
|
||||||
/** FBX data entity that consists of a key:value tuple.
|
/** FBX data entity that consists of a key:value tuple.
|
||||||
|
@ -159,17 +160,21 @@ class Parser
|
||||||
public:
|
public:
|
||||||
/** Parse given a token list. Does not take ownership of the tokens -
|
/** Parse given a token list. Does not take ownership of the tokens -
|
||||||
* the objects must persist during the entire parser lifetime */
|
* the objects must persist during the entire parser lifetime */
|
||||||
Parser (const TokenList& tokens,bool is_binary);
|
Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary);
|
||||||
~Parser();
|
~Parser();
|
||||||
|
|
||||||
const Scope& GetRootScope() const {
|
const Scope& GetRootScope() const {
|
||||||
return *root.get();
|
return *root;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool IsBinary() const {
|
bool IsBinary() const {
|
||||||
return is_binary;
|
return is_binary;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StackAllocator &GetAllocator() {
|
||||||
|
return allocator;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
friend class Scope;
|
friend class Scope;
|
||||||
friend class Element;
|
friend class Element;
|
||||||
|
@ -180,10 +185,10 @@ private:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
const TokenList& tokens;
|
const TokenList& tokens;
|
||||||
|
StackAllocator &allocator;
|
||||||
TokenPtr last, current;
|
TokenPtr last, current;
|
||||||
TokenList::const_iterator cursor;
|
TokenList::const_iterator cursor;
|
||||||
std::unique_ptr<Scope> root;
|
Scope *root;
|
||||||
|
|
||||||
const bool is_binary;
|
const bool is_binary;
|
||||||
};
|
};
|
||||||
|
|
|
@ -96,7 +96,8 @@ AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int line,
|
||||||
|
|
||||||
// process a potential data token up to 'cur', adding it to 'output_tokens'.
|
// process a potential data token up to 'cur', adding it to 'output_tokens'.
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*& end,
|
void ProcessDataToken(TokenList &output_tokens, StackAllocator &token_allocator,
|
||||||
|
const char*& start, const char*& end,
|
||||||
unsigned int line,
|
unsigned int line,
|
||||||
unsigned int column,
|
unsigned int column,
|
||||||
TokenType type = TokenType_DATA,
|
TokenType type = TokenType_DATA,
|
||||||
|
@ -133,8 +134,7 @@ void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
void Tokenize(TokenList& output_tokens, const char* input)
|
void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &token_allocator) {
|
||||||
{
|
|
||||||
ai_assert(input);
|
ai_assert(input);
|
||||||
ASSIMP_LOG_DEBUG("Tokenizing ASCII FBX file");
|
ASSIMP_LOG_DEBUG("Tokenizing ASCII FBX file");
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
||||||
in_double_quotes = false;
|
in_double_quotes = false;
|
||||||
token_end = cur;
|
token_end = cur;
|
||||||
|
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
||||||
pending_data_token = false;
|
pending_data_token = false;
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
|
@ -183,30 +183,30 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case ';':
|
case ';':
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
||||||
comment = true;
|
comment = true;
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case '{':
|
case '{':
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end, line, column);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
||||||
output_tokens.push_back(new_Token(cur,cur+1,TokenType_OPEN_BRACKET,line,column));
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_OPEN_BRACKET,line,column));
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case '}':
|
case '}':
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
|
||||||
output_tokens.push_back(new_Token(cur,cur+1,TokenType_CLOSE_BRACKET,line,column));
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_CLOSE_BRACKET,line,column));
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case ',':
|
case ',':
|
||||||
if (pending_data_token) {
|
if (pending_data_token) {
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_DATA,true);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_DATA, true);
|
||||||
}
|
}
|
||||||
output_tokens.push_back(new_Token(cur,cur+1,TokenType_COMMA,line,column));
|
output_tokens.push_back(new_Token(cur,cur+1,TokenType_COMMA,line,column));
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
case ':':
|
case ':':
|
||||||
if (pending_data_token) {
|
if (pending_data_token) {
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_KEY,true);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_KEY, true);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
TokenizeError("unexpected colon", line, column);
|
TokenizeError("unexpected colon", line, column);
|
||||||
|
@ -228,7 +228,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ProcessDataToken(output_tokens,token_begin,token_end,line,column,type);
|
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, type);
|
||||||
}
|
}
|
||||||
|
|
||||||
pending_data_token = false;
|
pending_data_token = false;
|
||||||
|
|
|
@ -47,6 +47,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
#define INCLUDED_AI_FBX_TOKENIZER_H
|
#define INCLUDED_AI_FBX_TOKENIZER_H
|
||||||
|
|
||||||
#include "FBXCompileConfig.h"
|
#include "FBXCompileConfig.h"
|
||||||
|
#include "Common/StackAllocator.h"
|
||||||
#include <assimp/ai_assert.h>
|
#include <assimp/ai_assert.h>
|
||||||
#include <assimp/defs.h>
|
#include <assimp/defs.h>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
@ -158,7 +159,7 @@ private:
|
||||||
typedef const Token* TokenPtr;
|
typedef const Token* TokenPtr;
|
||||||
typedef std::vector< TokenPtr > TokenList;
|
typedef std::vector< TokenPtr > TokenList;
|
||||||
|
|
||||||
#define new_Token new Token
|
#define new_Token new (token_allocator.Allocate(sizeof(Token))) Token
|
||||||
|
|
||||||
|
|
||||||
/** Main FBX tokenizer function. Transform input buffer into a list of preprocessed tokens.
|
/** Main FBX tokenizer function. Transform input buffer into a list of preprocessed tokens.
|
||||||
|
@ -168,7 +169,7 @@ typedef std::vector< TokenPtr > TokenList;
|
||||||
* @param output_tokens Receives a list of all tokens in the input data.
|
* @param output_tokens Receives a list of all tokens in the input data.
|
||||||
* @param input_buffer Textual input buffer to be processed, 0-terminated.
|
* @param input_buffer Textual input buffer to be processed, 0-terminated.
|
||||||
* @throw DeadlyImportError if something goes wrong */
|
* @throw DeadlyImportError if something goes wrong */
|
||||||
void Tokenize(TokenList& output_tokens, const char* input);
|
void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &tokenAllocator);
|
||||||
|
|
||||||
|
|
||||||
/** Tokenizer function for binary FBX files.
|
/** Tokenizer function for binary FBX files.
|
||||||
|
@ -179,7 +180,7 @@ void Tokenize(TokenList& output_tokens, const char* input);
|
||||||
* @param input_buffer Binary input buffer to be processed.
|
* @param input_buffer Binary input buffer to be processed.
|
||||||
* @param length Length of input buffer, in bytes. There is no 0-terminal.
|
* @param length Length of input buffer, in bytes. There is no 0-terminal.
|
||||||
* @throw DeadlyImportError if something goes wrong */
|
* @throw DeadlyImportError if something goes wrong */
|
||||||
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length);
|
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &tokenAllocator);
|
||||||
|
|
||||||
|
|
||||||
} // ! FBX
|
} // ! FBX
|
||||||
|
|
|
@ -193,6 +193,8 @@ SET( Common_SRCS
|
||||||
Common/ScenePreprocessor.cpp
|
Common/ScenePreprocessor.cpp
|
||||||
Common/ScenePreprocessor.h
|
Common/ScenePreprocessor.h
|
||||||
Common/SkeletonMeshBuilder.cpp
|
Common/SkeletonMeshBuilder.cpp
|
||||||
|
Common/StackAllocator.h
|
||||||
|
Common/StackAllocator.cpp
|
||||||
Common/StandardShapes.cpp
|
Common/StandardShapes.cpp
|
||||||
Common/TargetAnimation.cpp
|
Common/TargetAnimation.cpp
|
||||||
Common/TargetAnimation.h
|
Common/TargetAnimation.h
|
||||||
|
|
Loading…
Reference in New Issue