All allocation changes

pull/4494/head
Florian Born 2022-04-20 12:33:39 +02:00
parent 1d6ed840fb
commit 2b3c49cb93
9 changed files with 55 additions and 56 deletions

View File

@ -341,8 +341,7 @@ void ReadData(const char*& sbegin_out, const char*& send_out, const char* input,
// ------------------------------------------------------------------------------------------------
bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor, const char* end, bool const is64bits)
{
bool ReadScope(TokenList &output_tokens, StackAllocator &token_allocator, const char *input, const char *&cursor, const char *end, bool const is64bits) {
// the first word contains the offset at which this block ends
const uint64_t end_offset = is64bits ? ReadDoubleWord(input, cursor, end) : ReadWord(input, cursor, end);
@ -408,7 +407,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
// XXX this is vulnerable to stack overflowing ..
while(Offset(input, cursor) < end_offset - sentinel_block_length) {
ReadScope(output_tokens, input, cursor, input + end_offset - sentinel_block_length, is64bits);
ReadScope(output_tokens, token_allocator, input, cursor, input + end_offset - sentinel_block_length, is64bits);
}
output_tokens.push_back(new_Token(cursor, cursor + 1, TokenType_CLOSE_BRACKET, Offset(input, cursor) ));
@ -431,8 +430,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
// ------------------------------------------------------------------------------------------------
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
{
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &token_allocator) {
ai_assert(input);
ASSIMP_LOG_DEBUG("Tokenizing binary FBX file");
@ -465,7 +463,7 @@ void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
try
{
while (cursor < end ) {
if (!ReadScope(output_tokens, input, cursor, input + length, is64bits)) {
if (!ReadScope(output_tokens, token_allocator, input, cursor, input + length, is64bits)) {
break;
}
}

View File

@ -235,7 +235,7 @@ FileGlobalSettings::FileGlobalSettings(const Document &doc, std::shared_ptr<cons
}
// ------------------------------------------------------------------------------------------------
Document::Document(const Parser& parser, const ImportSettings& settings) :
Document::Document(Parser& parser, const ImportSettings& settings) :
settings(settings), parser(parser) {
ASSIMP_LOG_DEBUG("Creating FBX Document");
@ -258,13 +258,6 @@ Document::Document(const Parser& parser, const ImportSettings& settings) :
// ------------------------------------------------------------------------------------------------
Document::~Document() {
for(ObjectMap::value_type& v : objects) {
delete v.second;
}
for(ConnectionMap::value_type& v : src_connections) {
delete v.second;
}
// |dest_connections| contain the same Connection objects as the |src_connections|
}
@ -348,9 +341,11 @@ void Document::ReadObjects() {
DOMError("no Objects dictionary found");
}
StackAllocator &allocator = parser.GetAllocator();
// add a dummy entry to represent the Model::RootNode object (id 0),
// which is only indirectly defined in the input file
objects[0] = new LazyObject(0L, *eobjects, *this);
objects[0] = new_LazyObject(0L, *eobjects, *this);
const Scope& sobjects = *eobjects->Compound();
for(const ElementMap::value_type& el : sobjects.Elements()) {
@ -377,7 +372,7 @@ void Document::ReadObjects() {
DOMWarning("encountered duplicate object id, ignoring first occurrence",el.second);
}
objects[id] = new LazyObject(id, *el.second, *this);
objects[id] = new_LazyObject(id, *el.second, *this);
// grab all animation stacks upfront since there is no listing of them
if(!strcmp(el.first.c_str(),"AnimationStack")) {
@ -444,8 +439,10 @@ void Document::ReadPropertyTemplates() {
}
// ------------------------------------------------------------------------------------------------
void Document::ReadConnections() {
const Scope& sc = parser.GetRootScope();
void Document::ReadConnections()
{
StackAllocator &allocator = parser.GetAllocator();
const Scope &sc = parser.GetRootScope();
// read property templates from "Definitions" section
const Element* const econns = sc["Connections"];
if(!econns || !econns->Compound()) {
@ -484,7 +481,7 @@ void Document::ReadConnections() {
}
// add new connection
const Connection* const c = new Connection(insertionOrder++,src,dest,prop,*this);
const Connection* const c = new_Connection(insertionOrder++,src,dest,prop,*this);
src_connections.insert(ConnectionMap::value_type(src,c));
dest_connections.insert(ConnectionMap::value_type(dest,c));
}

View File

@ -80,6 +80,8 @@ class BlendShape;
class Skin;
class Cluster;
#define new_LazyObject new (allocator.Allocate(sizeof(LazyObject))) LazyObject
#define new_Connection new (allocator.Allocate(sizeof(Connection))) Connection
/** Represents a delay-parsed FBX objects. Many objects in the scene
* are not needed by assimp, so it makes no sense to parse them

View File

@ -160,17 +160,18 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
TokenList tokens;
try {
Assimp::StackAllocator tempAllocator;
bool is_binary = false;
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
is_binary = true;
TokenizeBinary(tokens, begin, contents.size());
TokenizeBinary(tokens, begin, contents.size(), tempAllocator);
} else {
Tokenize(tokens, begin);
Tokenize(tokens, begin, tempAllocator);
}
// use this information to construct a very rudimentary
// parse-tree representing the FBX scope structure
Parser parser(tokens, is_binary);
Parser parser(tokens, tempAllocator, is_binary);
// take the raw parse-tree and convert it to a FBX DOM
Document doc(parser, settings);
@ -189,10 +190,7 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
// Set FBX file scale is relative to CM must be converted to M for
// assimp universal format (M)
SetFileScale(size_relative_to_cm * 0.01f);
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
} catch (std::exception &) {
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
throw;
}
}

View File

@ -117,6 +117,7 @@ namespace FBX {
// ------------------------------------------------------------------------------------------------
Element::Element(const Token& key_token, Parser& parser) : key_token(key_token) {
TokenPtr n = nullptr;
StackAllocator &allocator = parser.GetAllocator();
do {
n = parser.AdvanceToNextToken();
if(!n) {
@ -145,7 +146,7 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
}
if (n->Type() == TokenType_OPEN_BRACKET) {
compound.reset(new Scope(parser));
compound.reset(new_Scope(parser));
// current token should be a TOK_CLOSE_BRACKET
n = parser.CurrentToken();
@ -178,6 +179,7 @@ Scope::Scope(Parser& parser,bool topLevel)
}
}
StackAllocator &allocator = parser.GetAllocator();
TokenPtr n = parser.AdvanceToNextToken();
if (n == nullptr) {
ParseError("unexpected end of file");
@ -208,22 +210,16 @@ Scope::Scope(Parser& parser,bool topLevel)
}
// ------------------------------------------------------------------------------------------------
Scope::~Scope() {
for(ElementMap::value_type& v : elements) {
delete v.second;
}
Scope::~Scope()
{
}
// ------------------------------------------------------------------------------------------------
Parser::Parser (const TokenList& tokens, bool is_binary)
: tokens(tokens)
, last()
, current()
, cursor(tokens.begin())
, is_binary(is_binary)
Parser::Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary) :
tokens(tokens), allocator(allocator), last(), current(), cursor(tokens.begin()), is_binary(is_binary)
{
ASSIMP_LOG_DEBUG("Parsing FBX tokens");
root.reset(new Scope(*this,true));
root = new_Scope(*this, true);
}
// ------------------------------------------------------------------------------------------------

View File

@ -52,6 +52,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <assimp/LogAux.h>
#include <assimp/fast_atof.h>
#include "Common/StackAllocator.h"
#include "FBXCompileConfig.h"
#include "FBXTokenizer.h"
@ -68,8 +69,8 @@ typedef std::fbx_unordered_multimap< std::string, Element* > ElementMap;
typedef std::pair<ElementMap::const_iterator,ElementMap::const_iterator> ElementCollection;
# define new_Scope new Scope
# define new_Element new Element
#define new_Scope new (allocator.Allocate(sizeof(Scope))) Scope
#define new_Element new (allocator.Allocate(sizeof(Element))) Element
/** FBX data entity that consists of a key:value tuple.
@ -159,17 +160,21 @@ class Parser
public:
/** Parse given a token list. Does not take ownership of the tokens -
* the objects must persist during the entire parser lifetime */
Parser (const TokenList& tokens,bool is_binary);
Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary);
~Parser();
const Scope& GetRootScope() const {
return *root.get();
return *root;
}
bool IsBinary() const {
return is_binary;
}
StackAllocator &GetAllocator() {
return allocator;
}
private:
friend class Scope;
friend class Element;
@ -180,10 +185,10 @@ private:
private:
const TokenList& tokens;
StackAllocator &allocator;
TokenPtr last, current;
TokenList::const_iterator cursor;
std::unique_ptr<Scope> root;
Scope *root;
const bool is_binary;
};

View File

@ -96,7 +96,8 @@ AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int line,
// process a potential data token up to 'cur', adding it to 'output_tokens'.
// ------------------------------------------------------------------------------------------------
void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*& end,
void ProcessDataToken(TokenList &output_tokens, StackAllocator &token_allocator,
const char*& start, const char*& end,
unsigned int line,
unsigned int column,
TokenType type = TokenType_DATA,
@ -133,8 +134,7 @@ void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*
}
// ------------------------------------------------------------------------------------------------
void Tokenize(TokenList& output_tokens, const char* input)
{
void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &token_allocator) {
ai_assert(input);
ASSIMP_LOG_DEBUG("Tokenizing ASCII FBX file");
@ -166,7 +166,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
in_double_quotes = false;
token_end = cur;
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
pending_data_token = false;
}
continue;
@ -183,30 +183,30 @@ void Tokenize(TokenList& output_tokens, const char* input)
continue;
case ';':
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
comment = true;
continue;
case '{':
ProcessDataToken(output_tokens,token_begin,token_end, line, column);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
output_tokens.push_back(new_Token(cur,cur+1,TokenType_OPEN_BRACKET,line,column));
continue;
case '}':
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
output_tokens.push_back(new_Token(cur,cur+1,TokenType_CLOSE_BRACKET,line,column));
continue;
case ',':
if (pending_data_token) {
ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_DATA,true);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_DATA, true);
}
output_tokens.push_back(new_Token(cur,cur+1,TokenType_COMMA,line,column));
continue;
case ':':
if (pending_data_token) {
ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_KEY,true);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_KEY, true);
}
else {
TokenizeError("unexpected colon", line, column);
@ -228,7 +228,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
}
}
ProcessDataToken(output_tokens,token_begin,token_end,line,column,type);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, type);
}
pending_data_token = false;

View File

@ -47,6 +47,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#define INCLUDED_AI_FBX_TOKENIZER_H
#include "FBXCompileConfig.h"
#include "Common/StackAllocator.h"
#include <assimp/ai_assert.h>
#include <assimp/defs.h>
#include <vector>
@ -158,7 +159,7 @@ private:
typedef const Token* TokenPtr;
typedef std::vector< TokenPtr > TokenList;
#define new_Token new Token
#define new_Token new (token_allocator.Allocate(sizeof(Token))) Token
/** Main FBX tokenizer function. Transform input buffer into a list of preprocessed tokens.
@ -168,7 +169,7 @@ typedef std::vector< TokenPtr > TokenList;
* @param output_tokens Receives a list of all tokens in the input data.
* @param input_buffer Textual input buffer to be processed, 0-terminated.
* @throw DeadlyImportError if something goes wrong */
void Tokenize(TokenList& output_tokens, const char* input);
void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &tokenAllocator);
/** Tokenizer function for binary FBX files.
@ -179,7 +180,7 @@ void Tokenize(TokenList& output_tokens, const char* input);
* @param input_buffer Binary input buffer to be processed.
* @param length Length of input buffer, in bytes. There is no 0-terminal.
* @throw DeadlyImportError if something goes wrong */
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length);
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &tokenAllocator);
} // ! FBX

View File

@ -193,6 +193,8 @@ SET( Common_SRCS
Common/ScenePreprocessor.cpp
Common/ScenePreprocessor.h
Common/SkeletonMeshBuilder.cpp
Common/StackAllocator.h
Common/StackAllocator.cpp
Common/StandardShapes.cpp
Common/TargetAnimation.cpp
Common/TargetAnimation.h