Merge pull request #2468 from muxanickms/topic/fbx_file_more_than_4gb
Support for FBX file sizes more than 4GBpull/2475/head^2
commit
9787ac724f
|
@ -98,7 +98,7 @@ namespace FBX {
|
|||
// return (flags & to_check) != 0;
|
||||
//}
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
Token::Token(const char* sbegin, const char* send, TokenType type, unsigned int offset)
|
||||
Token::Token(const char* sbegin, const char* send, TokenType type, size_t offset)
|
||||
:
|
||||
#ifdef DEBUG
|
||||
contents(sbegin, static_cast<size_t>(send-sbegin)),
|
||||
|
@ -122,18 +122,18 @@ namespace {
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// signal tokenization error, this is always unrecoverable. Throws DeadlyImportError.
|
||||
AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int offset) AI_WONT_RETURN_SUFFIX;
|
||||
AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int offset)
|
||||
AI_WONT_RETURN void TokenizeError(const std::string& message, size_t offset) AI_WONT_RETURN_SUFFIX;
|
||||
AI_WONT_RETURN void TokenizeError(const std::string& message, size_t offset)
|
||||
{
|
||||
throw DeadlyImportError(Util::AddOffset("FBX-Tokenize",message,offset));
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
uint32_t Offset(const char* begin, const char* cursor) {
|
||||
size_t Offset(const char* begin, const char* cursor) {
|
||||
ai_assert(begin <= cursor);
|
||||
|
||||
return static_cast<unsigned int>(cursor - begin);
|
||||
return cursor - begin;
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
@ -424,7 +424,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
|
||||
void TokenizeBinary(TokenList& output_tokens, const char* input, unsigned int length)
|
||||
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length)
|
||||
{
|
||||
ai_assert(input);
|
||||
|
||||
|
|
|
@ -172,7 +172,7 @@ void FBXImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
|
|||
bool is_binary = false;
|
||||
if (!strncmp(begin,"Kaydara FBX Binary",18)) {
|
||||
is_binary = true;
|
||||
TokenizeBinary(tokens,begin,static_cast<unsigned int>(contents.size()));
|
||||
TokenizeBinary(tokens,begin,contents.size());
|
||||
}
|
||||
else {
|
||||
Tokenize(tokens,begin);
|
||||
|
|
|
@ -93,7 +93,7 @@ public:
|
|||
Token(const char* sbegin, const char* send, TokenType type, unsigned int line, unsigned int column);
|
||||
|
||||
/** construct a binary token */
|
||||
Token(const char* sbegin, const char* send, TokenType type, unsigned int offset);
|
||||
Token(const char* sbegin, const char* send, TokenType type, size_t offset);
|
||||
|
||||
~Token();
|
||||
|
||||
|
@ -118,14 +118,14 @@ public:
|
|||
return type;
|
||||
}
|
||||
|
||||
unsigned int Offset() const {
|
||||
size_t Offset() const {
|
||||
ai_assert(IsBinary());
|
||||
return offset;
|
||||
}
|
||||
|
||||
unsigned int Line() const {
|
||||
ai_assert(!IsBinary());
|
||||
return line;
|
||||
return static_cast<unsigned int>(line);
|
||||
}
|
||||
|
||||
unsigned int Column() const {
|
||||
|
@ -147,8 +147,8 @@ private:
|
|||
const TokenType type;
|
||||
|
||||
union {
|
||||
const unsigned int line;
|
||||
unsigned int offset;
|
||||
size_t line;
|
||||
size_t offset;
|
||||
};
|
||||
const unsigned int column;
|
||||
};
|
||||
|
@ -178,7 +178,7 @@ void Tokenize(TokenList& output_tokens, const char* input);
|
|||
* @param input_buffer Binary input buffer to be processed.
|
||||
* @param length Length of input buffer, in bytes. There is no 0-terminal.
|
||||
* @throw DeadlyImportError if something goes wrong */
|
||||
void TokenizeBinary(TokenList& output_tokens, const char* input, unsigned int length);
|
||||
void TokenizeBinary(TokenList& output_tokens, const char* input, size_t length);
|
||||
|
||||
|
||||
} // ! FBX
|
||||
|
|
|
@ -86,7 +86,7 @@ const char* TokenTypeString(TokenType t)
|
|||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
std::string AddOffset(const std::string& prefix, const std::string& text, unsigned int offset)
|
||||
std::string AddOffset(const std::string& prefix, const std::string& text, size_t offset)
|
||||
{
|
||||
return static_cast<std::string>( (Formatter::format() << prefix << " (offset 0x" << std::hex << offset << ") " << text) );
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ const char* TokenTypeString(TokenType t);
|
|||
* @param line Line index, 1-based
|
||||
* @param column Column index, 1-based
|
||||
* @return A string of the following format: {prefix} (offset 0x{offset}) {text}*/
|
||||
std::string AddOffset(const std::string& prefix, const std::string& text, unsigned int offset);
|
||||
std::string AddOffset(const std::string& prefix, const std::string& text, size_t offset);
|
||||
|
||||
|
||||
/** Format log/error messages using a given line location in the source file.
|
||||
|
|
Loading…
Reference in New Issue