Merge branch 'SmallAllocationFixInFBXLoader' of https://github.com/FlorianBorn71/assimp into FlorianBorn71-SmallAllocationFixInFBXLoader

pull/5096/head
Kim Kulling 2023-05-15 09:30:57 +02:00
commit e627f691a8
12 changed files with 294 additions and 63 deletions

View File

@ -341,8 +341,7 @@ void ReadData(const char*& sbegin_out, const char*& send_out, const char* input,
// ------------------------------------------------------------------------------------------------
bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor, const char* end, bool const is64bits)
{
bool ReadScope(TokenList &output_tokens, StackAllocator &token_allocator, const char *input, const char *&cursor, const char *end, bool const is64bits) {
// the first word contains the offset at which this block ends
const uint64_t end_offset = is64bits ? ReadDoubleWord(input, cursor, end) : ReadWord(input, cursor, end);
@ -408,7 +407,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
// XXX this is vulnerable to stack overflowing ..
while(Offset(input, cursor) < end_offset - sentinel_block_length) {
ReadScope(output_tokens, input, cursor, input + end_offset - sentinel_block_length, is64bits);
ReadScope(output_tokens, token_allocator, input, cursor, input + end_offset - sentinel_block_length, is64bits);
}
output_tokens.push_back(new_Token(cursor, cursor + 1, TokenType_CLOSE_BRACKET, Offset(input, cursor) ));
@ -431,8 +430,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
// ------------------------------------------------------------------------------------------------
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
{
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &token_allocator) {
ai_assert(input);
ASSIMP_LOG_DEBUG("Tokenizing binary FBX file");
@ -465,7 +463,7 @@ void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
try
{
while (cursor < end ) {
if (!ReadScope(output_tokens, input, cursor, input + length, is64bits)) {
if (!ReadScope(output_tokens, token_allocator, input, cursor, input + length, is64bits)) {
break;
}
}

View File

@ -243,7 +243,7 @@ FileGlobalSettings::FileGlobalSettings(const Document &doc, std::shared_ptr<cons
}
// ------------------------------------------------------------------------------------------------
Document::Document(const Parser& parser, const ImportSettings& settings) :
Document::Document(Parser& parser, const ImportSettings& settings) :
settings(settings), parser(parser) {
ASSIMP_LOG_DEBUG("Creating FBX Document");
@ -265,13 +265,17 @@ Document::Document(const Parser& parser, const ImportSettings& settings) :
}
// ------------------------------------------------------------------------------------------------
Document::~Document() {
for(ObjectMap::value_type& v : objects) {
delete v.second;
Document::~Document()
{
// The document does not own the memory for the following objects, but we need to call their d'tor
// so they can properly free memory like string members:
for (ObjectMap::value_type &v : objects) {
delete_LazyObject(v.second);
}
for(ConnectionMap::value_type& v : src_connections) {
delete v.second;
for (ConnectionMap::value_type &v : src_connections) {
delete_Connection(v.second);
}
// |dest_connections| contain the same Connection objects as the |src_connections|
}
@ -356,9 +360,11 @@ void Document::ReadObjects() {
DOMError("no Objects dictionary found");
}
StackAllocator &allocator = parser.GetAllocator();
// add a dummy entry to represent the Model::RootNode object (id 0),
// which is only indirectly defined in the input file
objects[0] = new LazyObject(0L, *eobjects, *this);
objects[0] = new_LazyObject(0L, *eobjects, *this);
const Scope& sobjects = *eobjects->Compound();
for(const ElementMap::value_type& el : sobjects.Elements()) {
@ -387,7 +393,7 @@ void Document::ReadObjects() {
delete foundObject->second;
}
objects[id] = new LazyObject(id, *el.second, *this);
objects[id] = new_LazyObject(id, *el.second, *this);
// grab all animation stacks upfront since there is no listing of them
if(!strcmp(el.first.c_str(),"AnimationStack")) {
@ -454,8 +460,10 @@ void Document::ReadPropertyTemplates() {
}
// ------------------------------------------------------------------------------------------------
void Document::ReadConnections() {
const Scope& sc = parser.GetRootScope();
void Document::ReadConnections()
{
StackAllocator &allocator = parser.GetAllocator();
const Scope &sc = parser.GetRootScope();
// read property templates from "Definitions" section
const Element* const econns = sc["Connections"];
if(!econns || !econns->Compound()) {
@ -494,7 +502,7 @@ void Document::ReadConnections() {
}
// add new connection
const Connection* const c = new Connection(insertionOrder++,src,dest,prop,*this);
const Connection* const c = new_Connection(insertionOrder++,src,dest,prop,*this);
src_connections.insert(ConnectionMap::value_type(src,c));
dest_connections.insert(ConnectionMap::value_type(dest,c));
}

View File

@ -81,6 +81,10 @@ class BlendShape;
class Skin;
class Cluster;
#define new_LazyObject new (allocator.Allocate(sizeof(LazyObject))) LazyObject
#define new_Connection new (allocator.Allocate(sizeof(Connection))) Connection
#define delete_LazyObject(_p) (_p)->~LazyObject()
#define delete_Connection(_p) (_p)->~Connection()
/** Represents a delay-parsed FBX objects. Many objects in the scene
* are not needed by assimp, so it makes no sense to parse them
@ -1073,7 +1077,7 @@ private:
/** DOM root for a FBX file */
class Document {
public:
Document(const Parser& parser, const ImportSettings& settings);
Document(Parser& parser, const ImportSettings& settings);
~Document();
@ -1157,7 +1161,7 @@ private:
const ImportSettings& settings;
ObjectMap objects;
const Parser& parser;
Parser& parser;
PropertyTemplateMap templates;
ConnectionMap src_connections;

View File

@ -152,19 +152,19 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
// broad-phase tokenized pass in which we identify the core
// syntax elements of FBX (brackets, commas, key:value mappings)
TokenList tokens;
Assimp::StackAllocator tempAllocator;
try {
bool is_binary = false;
if (!strncmp(begin, "Kaydara FBX Binary", 18)) {
is_binary = true;
TokenizeBinary(tokens, begin, contents.size());
TokenizeBinary(tokens, begin, contents.size(), tempAllocator);
} else {
Tokenize(tokens, begin);
Tokenize(tokens, begin, tempAllocator);
}
// use this information to construct a very rudimentary
// parse-tree representing the FBX scope structure
Parser parser(tokens, is_binary);
Parser parser(tokens, tempAllocator, is_binary);
// take the raw parse-tree and convert it to a FBX DOM
Document doc(parser, mSettings);
@ -183,9 +183,11 @@ void FBXImporter::InternReadFile(const std::string &pFile, aiScene *pScene, IOSy
// assimp universal format (M)
SetFileScale(size_relative_to_cm * 0.01f);
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
// This collection does not own the memory for the tokens, but we need to call their d'tor
std::for_each(tokens.begin(), tokens.end(), Util::destructor_fun<Token>());
} catch (std::exception &) {
std::for_each(tokens.begin(), tokens.end(), Util::delete_fun<Token>());
std::for_each(tokens.begin(), tokens.end(), Util::destructor_fun<Token>());
throw;
}
}

View File

@ -115,8 +115,11 @@ namespace Assimp {
namespace FBX {
// ------------------------------------------------------------------------------------------------
Element::Element(const Token& key_token, Parser& parser) : key_token(key_token) {
Element::Element(const Token& key_token, Parser& parser) :
key_token(key_token), compound(nullptr)
{
TokenPtr n = nullptr;
StackAllocator &allocator = parser.GetAllocator();
do {
n = parser.AdvanceToNextToken();
if(!n) {
@ -145,7 +148,7 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
}
if (n->Type() == TokenType_OPEN_BRACKET) {
compound.reset(new Scope(parser));
compound = new_Scope(parser);
// current token should be a TOK_CLOSE_BRACKET
n = parser.CurrentToken();
@ -163,6 +166,15 @@ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token)
}
// ------------------------------------------------------------------------------------------------
Element::~Element()
{
if (compound) {
delete_Scope(compound);
}
// no need to delete tokens, they are owned by the parser
}
Scope::Scope(Parser& parser,bool topLevel)
{
if(!topLevel) {
@ -172,6 +184,7 @@ Scope::Scope(Parser& parser,bool topLevel)
}
}
StackAllocator &allocator = parser.GetAllocator();
TokenPtr n = parser.AdvanceToNextToken();
if (n == nullptr) {
ParseError("unexpected end of file");
@ -206,22 +219,27 @@ Scope::Scope(Parser& parser,bool topLevel)
}
// ------------------------------------------------------------------------------------------------
Scope::~Scope() {
for(ElementMap::value_type& v : elements) {
delete v.second;
Scope::~Scope()
{
// This collection does not own the memory for the elements, but we need to call their d'tor:
for (ElementMap::value_type &v : elements) {
delete_Element(v.second);
}
}
// ------------------------------------------------------------------------------------------------
Parser::Parser (const TokenList& tokens, bool is_binary)
: tokens(tokens)
, last()
, current()
, cursor(tokens.begin())
, is_binary(is_binary)
Parser::Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary) :
tokens(tokens), allocator(allocator), last(), current(), cursor(tokens.begin()), is_binary(is_binary)
{
ASSIMP_LOG_DEBUG("Parsing FBX tokens");
root.reset(new Scope(*this,true));
root = new_Scope(*this, true);
}
// ------------------------------------------------------------------------------------------------
Parser::~Parser()
{
delete_Scope(root);
}
// ------------------------------------------------------------------------------------------------

View File

@ -52,6 +52,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <assimp/LogAux.h>
#include <assimp/fast_atof.h>
#include "Common/StackAllocator.h"
#include "FBXCompileConfig.h"
#include "FBXTokenizer.h"
@ -68,9 +69,10 @@ typedef std::fbx_unordered_multimap< std::string, Element* > ElementMap;
typedef std::pair<ElementMap::const_iterator,ElementMap::const_iterator> ElementCollection;
# define new_Scope new Scope
# define new_Element new Element
#define new_Scope new (allocator.Allocate(sizeof(Scope))) Scope
#define new_Element new (allocator.Allocate(sizeof(Element))) Element
#define delete_Scope(_p) (_p)->~Scope()
#define delete_Element(_p) (_p)->~Element()
/** FBX data entity that consists of a key:value tuple.
*
@ -87,10 +89,10 @@ class Element
{
public:
Element(const Token& key_token, Parser& parser);
~Element() = default;
~Element():
const Scope* Compound() const {
return compound.get();
return compound;
}
const Token& KeyToken() const {
@ -104,7 +106,7 @@ public:
private:
const Token& key_token;
TokenList tokens;
std::unique_ptr<Scope> compound;
Scope* compound;
};
/** FBX data entity that consists of a 'scope', a collection
@ -159,8 +161,8 @@ class Parser
public:
/** Parse given a token list. Does not take ownership of the tokens -
* the objects must persist during the entire parser lifetime */
Parser (const TokenList& tokens,bool is_binary);
~Parser() = default;
Parser(const TokenList &tokens, StackAllocator &allocator, bool is_binary);
~Parser();
const Scope& GetRootScope() const {
return *root;
@ -170,6 +172,10 @@ public:
return is_binary;
}
StackAllocator &GetAllocator() {
return allocator;
}
private:
friend class Scope;
friend class Element;
@ -180,10 +186,10 @@ private:
private:
const TokenList& tokens;
StackAllocator &allocator;
TokenPtr last, current;
TokenList::const_iterator cursor;
std::unique_ptr<Scope> root;
Scope *root;
const bool is_binary;
};

View File

@ -94,7 +94,8 @@ AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int line,
// process a potential data token up to 'cur', adding it to 'output_tokens'.
// ------------------------------------------------------------------------------------------------
void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*& end,
void ProcessDataToken(TokenList &output_tokens, StackAllocator &token_allocator,
const char*& start, const char*& end,
unsigned int line,
unsigned int column,
TokenType type = TokenType_DATA,
@ -131,8 +132,7 @@ void ProcessDataToken( TokenList& output_tokens, const char*& start, const char*
}
// ------------------------------------------------------------------------------------------------
void Tokenize(TokenList& output_tokens, const char* input)
{
void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &token_allocator) {
ai_assert(input);
ASSIMP_LOG_DEBUG("Tokenizing ASCII FBX file");
@ -164,7 +164,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
in_double_quotes = false;
token_end = cur;
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
pending_data_token = false;
}
continue;
@ -181,30 +181,30 @@ void Tokenize(TokenList& output_tokens, const char* input)
continue;
case ';':
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
comment = true;
continue;
case '{':
ProcessDataToken(output_tokens,token_begin,token_end, line, column);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
output_tokens.push_back(new_Token(cur,cur+1,TokenType_OPEN_BRACKET,line,column));
continue;
case '}':
ProcessDataToken(output_tokens,token_begin,token_end,line,column);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column);
output_tokens.push_back(new_Token(cur,cur+1,TokenType_CLOSE_BRACKET,line,column));
continue;
case ',':
if (pending_data_token) {
ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_DATA,true);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_DATA, true);
}
output_tokens.push_back(new_Token(cur,cur+1,TokenType_COMMA,line,column));
continue;
case ':':
if (pending_data_token) {
ProcessDataToken(output_tokens,token_begin,token_end,line,column,TokenType_KEY,true);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, TokenType_KEY, true);
}
else {
TokenizeError("unexpected colon", line, column);
@ -226,7 +226,7 @@ void Tokenize(TokenList& output_tokens, const char* input)
}
}
ProcessDataToken(output_tokens,token_begin,token_end,line,column,type);
ProcessDataToken(output_tokens, token_allocator, token_begin, token_end, line, column, type);
}
pending_data_token = false;

View File

@ -47,6 +47,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#define INCLUDED_AI_FBX_TOKENIZER_H
#include "FBXCompileConfig.h"
#include "Common/StackAllocator.h"
#include <assimp/ai_assert.h>
#include <assimp/defs.h>
#include <vector>
@ -157,7 +158,8 @@ private:
typedef const Token* TokenPtr;
typedef std::vector< TokenPtr > TokenList;
#define new_Token new Token
#define new_Token new (token_allocator.Allocate(sizeof(Token))) Token
#define delete_Token(_p) (_p)->~Token()
/** Main FBX tokenizer function. Transform input buffer into a list of preprocessed tokens.
@ -167,7 +169,7 @@ typedef std::vector< TokenPtr > TokenList;
* @param output_tokens Receives a list of all tokens in the input data.
* @param input_buffer Textual input buffer to be processed, 0-terminated.
* @throw DeadlyImportError if something goes wrong */
void Tokenize(TokenList& output_tokens, const char* input);
void Tokenize(TokenList &output_tokens, const char *input, StackAllocator &tokenAllocator);
/** Tokenizer function for binary FBX files.
@ -178,7 +180,7 @@ void Tokenize(TokenList& output_tokens, const char* input);
* @param input_buffer Binary input buffer to be processed.
* @param length Length of input buffer, in bytes. There is no 0-terminal.
* @throw DeadlyImportError if something goes wrong */
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length);
void TokenizeBinary(TokenList &output_tokens, const char *input, size_t length, StackAllocator &tokenAllocator);
} // ! FBX

View File

@ -66,6 +66,17 @@ struct delete_fun
}
};
/** helper for std::for_each to call the destructor on all items in a container without freeing their heap*/
template <typename T>
struct destructor_fun {
void operator()(const volatile T* del) {
if (del) {
del->~T();
}
}
};
/** Get a string representation for a #TokenType. */
const char* TokenTypeString(TokenType t);

View File

@ -194,6 +194,8 @@ SET( Common_SRCS
Common/ScenePreprocessor.cpp
Common/ScenePreprocessor.h
Common/SkeletonMeshBuilder.cpp
Common/StackAllocator.h
Common/StackAllocator.inl
Common/StandardShapes.cpp
Common/TargetAnimation.cpp
Common/TargetAnimation.h

View File

@ -0,0 +1,96 @@
/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2022, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------
*/
/** @file StackAllocator.h
* @brief A very bare-bone allocator class that is suitable when
* allocating many small objects, e.g. during parsing.
* Individual objects are not freed, instead only the whole memory
* can be deallocated.
*/
#ifndef AI_STACK_ALLOCATOR_H_INC
#define AI_STACK_ALLOCATOR_H_INC
#include <deque>
#include <stdint.h>
namespace Assimp
{
/** @brief A very bare-bone allocator class that is suitable when
* allocating many small objects, e.g. during parsing.
* Individual objects are not freed, instead only the whole memory
* can be deallocated.
*/
class StackAllocator {
public:
// Constructs the allocator
inline StackAllocator();
// Destructs the allocator and frees all memory
inline ~StackAllocator();
// non copyable
StackAllocator(const StackAllocator &) = delete;
StackAllocator &operator=(const StackAllocator &) = delete;
// Returns a pointer to byteSize bytes of heap memory that persists
// for the lifetime of the allocator (or until FreeAll is called).
inline void *Allocate(size_t byteSize);
// Releases all the memory owned by this allocator.
// Memory provided through function Allocate is not valid anymore after this function has been called.
inline void FreeAll();
private:
constexpr const static size_t g_maxBytesPerBlock = 64 * 1024 * 1024; // The maximum size (in bytes) of a block
constexpr const static size_t g_startBytesPerBlock = 16 * 1024; // Size of the first block. Next blocks will double in size until maximum size of g_maxBytesPerBlock
size_t m_blockAllocationSize = g_startBytesPerBlock; // Block size of the current block
size_t m_subIndex = g_maxBytesPerBlock; // The current byte offset in the current block
std::deque<uint8_t *> m_storageBlocks; // A list of blocks
};
} // namespace Assimp
#include "StackAllocator.inl"
#endif // include guard

View File

@ -0,0 +1,84 @@
/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2022, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------
*/
#include "StackAllocator.h"
#include <assimp/ai_assert.h>
using namespace Assimp;
inline StackAllocator::StackAllocator() {
}
inline StackAllocator::~StackAllocator() {
FreeAll();
}
inline void *StackAllocator::Allocate(size_t byteSize) {
if (m_subIndex + byteSize > m_blockAllocationSize) // start a new block
{
// double block size every time, up to maximum of g_maxBytesPerBlock.
// Block size must be at least as large as byteSize, but we want to use this for small allocations anyway.
m_blockAllocationSize = std::max(std::min(m_blockAllocationSize * 2, g_maxBytesPerBlock), byteSize);
uint8_t *data = new uint8_t[m_blockAllocationSize];
m_storageBlocks.push_back(data);
m_subIndex = byteSize;
return data;
}
uint8_t *data = m_storageBlocks.back();
data += m_subIndex;
m_subIndex += byteSize;
return data;
}
inline void StackAllocator::FreeAll() {
for (size_t i = 0; i < m_storageBlocks.size(); i++) {
delete [] m_storageBlocks[i];
}
std::deque<uint8_t *> empty;
m_storageBlocks.swap(empty);
// start over:
m_blockAllocationSize = g_startBytesPerBlock;
m_subIndex = g_maxBytesPerBlock;
}