From c58ed8da22c17ff1e180b28b39f2c8370eb8b6c3 Mon Sep 17 00:00:00 2001 From: Mike Samsonov Date: Tue, 14 May 2019 15:24:23 +0100 Subject: [PATCH] Support for FBX file sizes more than 4GB --- code/FBXBinaryTokenizer.cpp | 12 ++++++------ code/FBXImporter.cpp | 2 +- code/FBXTokenizer.h | 12 ++++++------ code/FBXUtil.cpp | 2 +- code/FBXUtil.h | 2 +- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/code/FBXBinaryTokenizer.cpp b/code/FBXBinaryTokenizer.cpp index 7138df431..a4a2bc8e7 100644 --- a/code/FBXBinaryTokenizer.cpp +++ b/code/FBXBinaryTokenizer.cpp @@ -98,7 +98,7 @@ namespace FBX { // return (flags & to_check) != 0; //} // ------------------------------------------------------------------------------------------------ -Token::Token(const char* sbegin, const char* send, TokenType type, unsigned int offset) +Token::Token(const char* sbegin, const char* send, TokenType type, size_t offset) : #ifdef DEBUG contents(sbegin, static_cast(send-sbegin)), @@ -122,18 +122,18 @@ namespace { // ------------------------------------------------------------------------------------------------ // signal tokenization error, this is always unrecoverable. Throws DeadlyImportError. -AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int offset) AI_WONT_RETURN_SUFFIX; -AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int offset) +AI_WONT_RETURN void TokenizeError(const std::string& message, size_t offset) AI_WONT_RETURN_SUFFIX; +AI_WONT_RETURN void TokenizeError(const std::string& message, size_t offset) { throw DeadlyImportError(Util::AddOffset("FBX-Tokenize",message,offset)); } // ------------------------------------------------------------------------------------------------ -uint32_t Offset(const char* begin, const char* cursor) { +size_t Offset(const char* begin, const char* cursor) { ai_assert(begin <= cursor); - return static_cast(cursor - begin); + return cursor - begin; } // ------------------------------------------------------------------------------------------------ @@ -424,7 +424,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor, // ------------------------------------------------------------------------------------------------ // TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent -void TokenizeBinary(TokenList& output_tokens, const char* input, unsigned int length) +void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length) { ai_assert(input); diff --git a/code/FBXImporter.cpp b/code/FBXImporter.cpp index 988735a77..ec8bbd2b4 100644 --- a/code/FBXImporter.cpp +++ b/code/FBXImporter.cpp @@ -172,7 +172,7 @@ void FBXImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS bool is_binary = false; if (!strncmp(begin,"Kaydara FBX Binary",18)) { is_binary = true; - TokenizeBinary(tokens,begin,static_cast(contents.size())); + TokenizeBinary(tokens,begin,contents.size()); } else { Tokenize(tokens,begin); diff --git a/code/FBXTokenizer.h b/code/FBXTokenizer.h index 2af29743f..afa588a47 100644 --- a/code/FBXTokenizer.h +++ b/code/FBXTokenizer.h @@ -93,7 +93,7 @@ public: Token(const char* sbegin, const char* send, TokenType type, unsigned int line, unsigned int column); /** construct a binary token */ - Token(const char* sbegin, const char* send, TokenType type, unsigned int offset); + Token(const char* sbegin, const char* send, TokenType type, size_t offset); ~Token(); @@ -118,14 +118,14 @@ public: return type; } - unsigned int Offset() const { + size_t Offset() const { ai_assert(IsBinary()); return offset; } unsigned int Line() const { ai_assert(!IsBinary()); - return line; + return static_cast(line); } unsigned int Column() const { @@ -147,8 +147,8 @@ private: const TokenType type; union { - const unsigned int line; - unsigned int offset; + size_t line; + size_t offset; }; const unsigned int column; }; @@ -178,7 +178,7 @@ void Tokenize(TokenList& output_tokens, const char* input); * @param input_buffer Binary input buffer to be processed. * @param length Length of input buffer, in bytes. There is no 0-terminal. * @throw DeadlyImportError if something goes wrong */ -void TokenizeBinary(TokenList& output_tokens, const char* input, unsigned int length); +void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length); } // ! FBX diff --git a/code/FBXUtil.cpp b/code/FBXUtil.cpp index 557d0d843..1616d1531 100644 --- a/code/FBXUtil.cpp +++ b/code/FBXUtil.cpp @@ -86,7 +86,7 @@ const char* TokenTypeString(TokenType t) // ------------------------------------------------------------------------------------------------ -std::string AddOffset(const std::string& prefix, const std::string& text, unsigned int offset) +std::string AddOffset(const std::string& prefix, const std::string& text, size_t offset) { return static_cast( (Formatter::format() << prefix << " (offset 0x" << std::hex << offset << ") " << text) ); } diff --git a/code/FBXUtil.h b/code/FBXUtil.h index b26eba5d5..9233b7482 100644 --- a/code/FBXUtil.h +++ b/code/FBXUtil.h @@ -78,7 +78,7 @@ const char* TokenTypeString(TokenType t); * @param line Line index, 1-based * @param column Column index, 1-based * @return A string of the following format: {prefix} (offset 0x{offset}) {text}*/ -std::string AddOffset(const std::string& prefix, const std::string& text, unsigned int offset); +std::string AddOffset(const std::string& prefix, const std::string& text, size_t offset); /** Format log/error messages using a given line location in the source file.