Proper destruction of individual objects
parent
2b3c49cb93
commit
d3646c3118
|
@ -257,7 +257,18 @@ Document::Document(Parser& parser, const ImportSettings& settings) :
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Document::~Document() {
|
Document::~Document()
|
||||||
|
{
|
||||||
|
// The document does not own the memory for the following objects, but we need to call their d'tor
|
||||||
|
// so they can properly free memory like string members:
|
||||||
|
|
||||||
|
for (ObjectMap::value_type &v : objects) {
|
||||||
|
delete_LazyObject(v.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (ConnectionMap::value_type &v : src_connections) {
|
||||||
|
delete_Connection(v.second);
|
||||||
|
}
|
||||||
// |dest_connections| contain the same Connection objects as the |src_connections|
|
// |dest_connections| contain the same Connection objects as the |src_connections|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,6 +82,8 @@ class Cluster;
|
||||||
|
|
||||||
#define new_LazyObject new (allocator.Allocate(sizeof(LazyObject))) LazyObject
|
#define new_LazyObject new (allocator.Allocate(sizeof(LazyObject))) LazyObject
|
||||||
#define new_Connection new (allocator.Allocate(sizeof(Connection))) Connection
|
#define new_Connection new (allocator.Allocate(sizeof(Connection))) Connection
|
||||||
|
#define delete_LazyObject(_p) (_p)->~LazyObject()
|
||||||
|
#define delete_Connection(_p) (_p)->~Connection()
|
||||||
|
|
||||||
/** Represents a delay-parsed FBX objects. Many objects in the scene
|
/** Represents a delay-parsed FBX objects. Many objects in the scene
|
||||||
* are not needed by assimp, so it makes no sense to parse them
|
* are not needed by assimp, so it makes no sense to parse them
|
||||||
|
|
|
@ -158,9 +158,8 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
||||||
// broadphase tokenizing pass in which we identify the core
|
// broadphase tokenizing pass in which we identify the core
|
||||||
// syntax elements of FBX (brackets, commas, key:value mappings)
|
// syntax elements of FBX (brackets, commas, key:value mappings)
|
||||||
TokenList tokens;
|
TokenList tokens;
|
||||||
try {
|
Assimp::StackAllocator tempAllocator;
|
||||||
|
try {
|
||||||
Assimp::StackAllocator tempAllocator;
|
|
||||||
bool is_binary = false;
|
bool is_binary = false;
|
||||||
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
|
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
|
||||||
is_binary = true;
|
is_binary = true;
|
||||||
|
@ -190,8 +189,13 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
|
||||||
// Set FBX file scale is relative to CM must be converted to M for
|
// Set FBX file scale is relative to CM must be converted to M for
|
||||||
// assimp universal format (M)
|
// assimp universal format (M)
|
||||||
SetFileScale(size_relative_to_cm * 0.01f);
|
SetFileScale(size_relative_to_cm * 0.01f);
|
||||||
} catch (std::exception &) {
|
|
||||||
throw;
|
// This collection does not own the memory for the tokens, but we need to call their d'tor
|
||||||
|
std::for_each(tokens.begin(), tokens.end(), Util::destructor_fun<Token>());
|
||||||
|
|
||||||
|
} catch (std::exception &) {
|
||||||
|
std::for_each(tokens.begin(), tokens.end(), Util::destructor_fun<Token>());
|
||||||
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -115,7 +115,9 @@ namespace Assimp {
|
||||||
namespace FBX {
|
namespace FBX {
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Element::Element(const Token& key_token, Parser& parser) : key_token(key_token) {
|
Element::Element(const Token& key_token, Parser& parser) :
|
||||||
|
key_token(key_token), compound(nullptr)
|
||||||
|
{
|
||||||
TokenPtr n = nullptr;
|
TokenPtr n = nullptr;
|
||||||
StackAllocator &allocator = parser.GetAllocator();
|
StackAllocator &allocator = parser.GetAllocator();
|
||||||
do {
|
do {
|
||||||
|
@ -146,7 +148,7 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (n->Type() == TokenType_OPEN_BRACKET) {
|
if (n->Type() == TokenType_OPEN_BRACKET) {
|
||||||
compound.reset(new_Scope(parser));
|
compound = new_Scope(parser);
|
||||||
|
|
||||||
// current token should be a TOK_CLOSE_BRACKET
|
// current token should be a TOK_CLOSE_BRACKET
|
||||||
n = parser.CurrentToken();
|
n = parser.CurrentToken();
|
||||||
|
@ -166,6 +168,10 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Element::~Element()
|
Element::~Element()
|
||||||
{
|
{
|
||||||
|
if (compound) {
|
||||||
|
delete_Scope(compound);
|
||||||
|
}
|
||||||
|
|
||||||
// no need to delete tokens, they are owned by the parser
|
// no need to delete tokens, they are owned by the parser
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -212,6 +218,11 @@ Scope::Scope(Parser& parser,bool topLevel)
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Scope::~Scope()
|
Scope::~Scope()
|
||||||
{
|
{
|
||||||
|
// This collection does not own the memory for the elements, but we need to call their d'tor:
|
||||||
|
|
||||||
|
for (ElementMap::value_type &v : elements) {
|
||||||
|
delete_Element(v.second);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
|
@ -225,7 +236,7 @@ Parser::Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binar
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
Parser::~Parser()
|
Parser::~Parser()
|
||||||
{
|
{
|
||||||
// empty
|
delete_Scope(root);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
|
|
|
@ -71,7 +71,8 @@ typedef std::pair<ElementMap::const_iterator,ElementMap::const_iterator> Element
|
||||||
|
|
||||||
#define new_Scope new (allocator.Allocate(sizeof(Scope))) Scope
|
#define new_Scope new (allocator.Allocate(sizeof(Scope))) Scope
|
||||||
#define new_Element new (allocator.Allocate(sizeof(Element))) Element
|
#define new_Element new (allocator.Allocate(sizeof(Element))) Element
|
||||||
|
#define delete_Scope(_p) (_p)->~Scope()
|
||||||
|
#define delete_Element(_p) (_p)->~Element()
|
||||||
|
|
||||||
/** FBX data entity that consists of a key:value tuple.
|
/** FBX data entity that consists of a key:value tuple.
|
||||||
*
|
*
|
||||||
|
@ -91,7 +92,7 @@ public:
|
||||||
~Element();
|
~Element();
|
||||||
|
|
||||||
const Scope* Compound() const {
|
const Scope* Compound() const {
|
||||||
return compound.get();
|
return compound;
|
||||||
}
|
}
|
||||||
|
|
||||||
const Token& KeyToken() const {
|
const Token& KeyToken() const {
|
||||||
|
@ -105,7 +106,7 @@ public:
|
||||||
private:
|
private:
|
||||||
const Token& key_token;
|
const Token& key_token;
|
||||||
TokenList tokens;
|
TokenList tokens;
|
||||||
std::unique_ptr<Scope> compound;
|
Scope* compound;
|
||||||
};
|
};
|
||||||
|
|
||||||
/** FBX data entity that consists of a 'scope', a collection
|
/** FBX data entity that consists of a 'scope', a collection
|
||||||
|
|
|
@ -160,6 +160,7 @@ typedef const Token* TokenPtr;
|
||||||
typedef std::vector< TokenPtr > TokenList;
|
typedef std::vector< TokenPtr > TokenList;
|
||||||
|
|
||||||
#define new_Token new (token_allocator.Allocate(sizeof(Token))) Token
|
#define new_Token new (token_allocator.Allocate(sizeof(Token))) Token
|
||||||
|
#define delete_Token(_p) (_p)->~Token()
|
||||||
|
|
||||||
|
|
||||||
/** Main FBX tokenizer function. Transform input buffer into a list of preprocessed tokens.
|
/** Main FBX tokenizer function. Transform input buffer into a list of preprocessed tokens.
|
||||||
|
|
|
@ -66,6 +66,17 @@ struct delete_fun
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** helper for std::for_each to call the destructor on all items in a container without freeing their heap*/
|
||||||
|
template <typename T>
|
||||||
|
struct destructor_fun {
|
||||||
|
void operator()(const volatile T* del) {
|
||||||
|
if (del) {
|
||||||
|
del->~T();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
/** Get a string representation for a #TokenType. */
|
/** Get a string representation for a #TokenType. */
|
||||||
const char* TokenTypeString(TokenType t);
|
const char* TokenTypeString(TokenType t);
|
||||||
|
|
||||||
|
|
|
@ -67,6 +67,10 @@ public:
|
||||||
// Destructs the allocator and frees all memory
|
// Destructs the allocator and frees all memory
|
||||||
~StackAllocator();
|
~StackAllocator();
|
||||||
|
|
||||||
|
// non copyable
|
||||||
|
StackAllocator(const StackAllocator &) = delete;
|
||||||
|
StackAllocator &operator=(const StackAllocator &) = delete;
|
||||||
|
|
||||||
// Returns a pointer to byteSize bytes of heap memory that persists
|
// Returns a pointer to byteSize bytes of heap memory that persists
|
||||||
// for the lifetime of the allocator (or until FreeAll is called).
|
// for the lifetime of the allocator (or until FreeAll is called).
|
||||||
void *Allocate(size_t byteSize);
|
void *Allocate(size_t byteSize);
|
||||||
|
|
Loading…
Reference in New Issue