Merge branch 'master' into remove_ctest
commit
b2bfb40859
|
@ -60,6 +60,7 @@ test/gtest/src/gtest-stamp/Debug/gtest-build
|
|||
*.lib
|
||||
test/gtest/src/gtest-stamp/Debug/
|
||||
tools/assimp_view/assimp_viewer.vcxproj.user
|
||||
*.pyc
|
||||
|
||||
# Unix editor backups
|
||||
*~
|
||||
|
|
|
@ -129,30 +129,26 @@ AI_WONT_RETURN void TokenizeError(const std::string& message, unsigned int offse
|
|||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
uint32_t Offset(const char* begin, const char* cursor)
|
||||
{
|
||||
uint32_t Offset(const char* begin, const char* cursor) {
|
||||
ai_assert(begin <= cursor);
|
||||
|
||||
return static_cast<unsigned int>(cursor - begin);
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void TokenizeError(const std::string& message, const char* begin, const char* cursor)
|
||||
{
|
||||
void TokenizeError(const std::string& message, const char* begin, const char* cursor) {
|
||||
TokenizeError(message, Offset(begin, cursor));
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
uint32_t ReadWord(const char* input, const char*& cursor, const char* end)
|
||||
{
|
||||
uint32_t ReadWord(const char* input, const char*& cursor, const char* end) {
|
||||
const size_t k_to_read = sizeof( uint32_t );
|
||||
if(Offset(cursor, end) < k_to_read ) {
|
||||
TokenizeError("cannot ReadWord, out of bounds",input, cursor);
|
||||
}
|
||||
|
||||
uint32_t word;
|
||||
memcpy(&word, cursor, 4);
|
||||
::memcpy(&word, cursor, 4);
|
||||
AI_SWAP4(word);
|
||||
|
||||
cursor += k_to_read;
|
||||
|
@ -167,7 +163,8 @@ uint64_t ReadDoubleWord(const char* input, const char*& cursor, const char* end)
|
|||
TokenizeError("cannot ReadDoubleWord, out of bounds",input, cursor);
|
||||
}
|
||||
|
||||
uint64_t dword = *reinterpret_cast<const uint64_t*>(cursor);
|
||||
uint64_t dword /*= *reinterpret_cast<const uint64_t*>(cursor)*/;
|
||||
::memcpy( &dword, cursor, sizeof( uint64_t ) );
|
||||
AI_SWAP8(dword);
|
||||
|
||||
cursor += k_to_read;
|
||||
|
@ -176,24 +173,21 @@ uint64_t ReadDoubleWord(const char* input, const char*& cursor, const char* end)
|
|||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
uint8_t ReadByte(const char* input, const char*& cursor, const char* end)
|
||||
{
|
||||
uint8_t ReadByte(const char* input, const char*& cursor, const char* end) {
|
||||
if(Offset(cursor, end) < sizeof( uint8_t ) ) {
|
||||
TokenizeError("cannot ReadByte, out of bounds",input, cursor);
|
||||
}
|
||||
|
||||
uint8_t word = *reinterpret_cast<const uint8_t*>(cursor);
|
||||
uint8_t word;/* = *reinterpret_cast< const uint8_t* >( cursor )*/
|
||||
::memcpy( &word, cursor, sizeof( uint8_t ) );
|
||||
++cursor;
|
||||
|
||||
return word;
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
unsigned int ReadString(const char*& sbegin_out, const char*& send_out, const char* input, const char*& cursor, const char* end,
|
||||
bool long_length = false,
|
||||
bool allow_null = false)
|
||||
{
|
||||
unsigned int ReadString(const char*& sbegin_out, const char*& send_out, const char* input,
|
||||
const char*& cursor, const char* end, bool long_length = false, bool allow_null = false) {
|
||||
const uint32_t len_len = long_length ? 4 : 1;
|
||||
if(Offset(cursor, end) < len_len) {
|
||||
TokenizeError("cannot ReadString, out of bounds reading length",input, cursor);
|
||||
|
@ -222,8 +216,7 @@ unsigned int ReadString(const char*& sbegin_out, const char*& send_out, const ch
|
|||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ReadData(const char*& sbegin_out, const char*& send_out, const char* input, const char*& cursor, const char* end)
|
||||
{
|
||||
void ReadData(const char*& sbegin_out, const char*& send_out, const char* input, const char*& cursor, const char* end) {
|
||||
if(Offset(cursor, end) < 1) {
|
||||
TokenizeError("cannot ReadData, out of bounds reading length",input, cursor);
|
||||
}
|
||||
|
@ -422,7 +415,7 @@ bool ReadScope(TokenList& output_tokens, const char* input, const char*& cursor,
|
|||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
} // anonymous namespace
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// TODO: Test FBX Binary files newer than the 7500 version to check if the 64 bits address behaviour is consistent
|
||||
|
|
|
@ -433,7 +433,11 @@ void ResolveVertexDataArray(std::vector<T>& data_out, const Scope& source,
|
|||
// deal with this more elegantly and with less redundancy, but right
|
||||
// now it seems unavoidable.
|
||||
if (MappingInformationType == "ByVertice" && ReferenceInformationType == "Direct") {
|
||||
std::vector<T> tempData;
|
||||
if ( !HasElement( source, indexDataElementName ) ) {
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<T> tempData;
|
||||
ParseVectorDataArray(tempData, GetRequiredElement(source, dataElementName));
|
||||
|
||||
data_out.resize(vertex_count);
|
||||
|
@ -450,10 +454,12 @@ void ResolveVertexDataArray(std::vector<T>& data_out, const Scope& source,
|
|||
ParseVectorDataArray(tempData, GetRequiredElement(source, dataElementName));
|
||||
|
||||
data_out.resize(vertex_count);
|
||||
if ( !HasElement( source, indexDataElementName ) ) {
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<int> uvIndices;
|
||||
ParseVectorDataArray(uvIndices,GetRequiredElement(source,indexDataElementName));
|
||||
|
||||
for (size_t i = 0, e = uvIndices.size(); i < e; ++i) {
|
||||
|
||||
const unsigned int istart = mapping_offsets[i], iend = istart + mapping_counts[i];
|
||||
|
|
|
@ -1197,6 +1197,14 @@ std::string ParseTokenAsString(const Token& t)
|
|||
return i;
|
||||
}
|
||||
|
||||
bool HasElement( const Scope& sc, const std::string& index ) {
|
||||
const Element* el = sc[ index ];
|
||||
if ( nullptr == el ) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// extract a required element from a scope, abort if the element cannot be found
|
||||
|
|
|
@ -218,6 +218,8 @@ void ParseVectorDataArray(std::vector<unsigned int>& out, const Element& el);
|
|||
void ParseVectorDataArray(std::vector<uint64_t>& out, const Element& e);
|
||||
void ParseVectorDataArray(std::vector<int64_t>& out, const Element& el);
|
||||
|
||||
bool HasElement( const Scope& sc, const std::string& index );
|
||||
|
||||
// extract a required element from a scope, abort if the element cannot be found
|
||||
const Element& GetRequiredElement(const Scope& sc, const std::string& index, const Element* element = NULL);
|
||||
|
||||
|
|
|
@ -156,7 +156,10 @@ namespace glTF2 {
|
|||
inline void Write(Value& obj, Buffer& b, AssetWriter& w)
|
||||
{
|
||||
obj.AddMember("byteLength", static_cast<uint64_t>(b.byteLength), w.mAl);
|
||||
obj.AddMember("uri", Value(b.GetURI(), w.mAl).Move(), w.mAl);
|
||||
|
||||
const auto uri = b.GetURI();
|
||||
const auto relativeUri = uri.substr(uri.find_last_of("/\\") + 1u);
|
||||
obj.AddMember("uri", Value(relativeUri, w.mAl).Move(), w.mAl);
|
||||
}
|
||||
|
||||
inline void Write(Value& obj, BufferView& bv, AssetWriter& w)
|
||||
|
@ -167,7 +170,9 @@ namespace glTF2 {
|
|||
if (bv.byteStride != 0) {
|
||||
obj.AddMember("byteStride", bv.byteStride, w.mAl);
|
||||
}
|
||||
obj.AddMember("target", int(bv.target), w.mAl);
|
||||
if (bv.target != 0) {
|
||||
obj.AddMember("target", int(bv.target), w.mAl);
|
||||
}
|
||||
}
|
||||
|
||||
inline void Write(Value& /*obj*/, Camera& /*c*/, AssetWriter& /*w*/)
|
||||
|
@ -177,17 +182,23 @@ namespace glTF2 {
|
|||
|
||||
inline void Write(Value& obj, Image& img, AssetWriter& w)
|
||||
{
|
||||
std::string uri;
|
||||
if (img.HasData()) {
|
||||
uri = "data:" + (img.mimeType.empty() ? "application/octet-stream" : img.mimeType);
|
||||
uri += ";base64,";
|
||||
Util::EncodeBase64(img.GetData(), img.GetDataLength(), uri);
|
||||
if (img.bufferView) {
|
||||
obj.AddMember("bufferView", img.bufferView->index, w.mAl);
|
||||
obj.AddMember("mimeType", Value(img.mimeType, w.mAl).Move(), w.mAl);
|
||||
}
|
||||
else {
|
||||
uri = img.uri;
|
||||
}
|
||||
std::string uri;
|
||||
if (img.HasData()) {
|
||||
uri = "data:" + (img.mimeType.empty() ? "application/octet-stream" : img.mimeType);
|
||||
uri += ";base64,";
|
||||
Util::EncodeBase64(img.GetData(), img.GetDataLength(), uri);
|
||||
}
|
||||
else {
|
||||
uri = img.uri;
|
||||
}
|
||||
|
||||
obj.AddMember("uri", Value(uri, w.mAl).Move(), w.mAl);
|
||||
obj.AddMember("uri", Value(uri, w.mAl).Move(), w.mAl);
|
||||
}
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
@ -569,13 +580,17 @@ namespace glTF2 {
|
|||
throw DeadlyExportError("Could not open output file: " + std::string(path));
|
||||
}
|
||||
|
||||
Ref<Buffer> bodyBuffer = mAsset.GetBodyBuffer();
|
||||
if (bodyBuffer->byteLength > 0) {
|
||||
rapidjson::Value glbBodyBuffer;
|
||||
glbBodyBuffer.SetObject();
|
||||
glbBodyBuffer.AddMember("byteLength", bodyBuffer->byteLength, mAl);
|
||||
mDoc["buffers"].PushBack(glbBodyBuffer, mAl);
|
||||
}
|
||||
|
||||
// Padding with spaces as required by the spec
|
||||
uint32_t padding = 0x20202020;
|
||||
|
||||
// Adapt JSON so that it is not pointing to an external file,
|
||||
// as this is required by the GLB spec'.
|
||||
mDoc["buffers"][0].RemoveMember("uri");
|
||||
|
||||
//
|
||||
// JSON chunk
|
||||
//
|
||||
|
@ -608,28 +623,25 @@ namespace glTF2 {
|
|||
//
|
||||
|
||||
uint32_t binaryChunkLength = 0;
|
||||
if (mAsset.buffers.Size() > 0) {
|
||||
Ref<Buffer> b = mAsset.buffers.Get(0u);
|
||||
if (b->byteLength > 0) {
|
||||
binaryChunkLength = (b->byteLength + 3) & ~3; // Round up to next multiple of 4
|
||||
auto paddingLength = binaryChunkLength - b->byteLength;
|
||||
if (bodyBuffer->byteLength > 0) {
|
||||
binaryChunkLength = (bodyBuffer->byteLength + 3) & ~3; // Round up to next multiple of 4
|
||||
auto paddingLength = binaryChunkLength - bodyBuffer->byteLength;
|
||||
|
||||
GLB_Chunk binaryChunk;
|
||||
binaryChunk.chunkLength = binaryChunkLength;
|
||||
binaryChunk.chunkType = ChunkType_BIN;
|
||||
AI_SWAP4(binaryChunk.chunkLength);
|
||||
GLB_Chunk binaryChunk;
|
||||
binaryChunk.chunkLength = binaryChunkLength;
|
||||
binaryChunk.chunkType = ChunkType_BIN;
|
||||
AI_SWAP4(binaryChunk.chunkLength);
|
||||
|
||||
size_t bodyOffset = sizeof(GLB_Header) + sizeof(GLB_Chunk) + jsonChunk.chunkLength;
|
||||
outfile->Seek(bodyOffset, aiOrigin_SET);
|
||||
if (outfile->Write(&binaryChunk, 1, sizeof(GLB_Chunk)) != sizeof(GLB_Chunk)) {
|
||||
throw DeadlyExportError("Failed to write body data header!");
|
||||
}
|
||||
if (outfile->Write(b->GetPointer(), 1, b->byteLength) != b->byteLength) {
|
||||
throw DeadlyExportError("Failed to write body data!");
|
||||
}
|
||||
if (paddingLength && outfile->Write(&padding, 1, paddingLength) != paddingLength) {
|
||||
throw DeadlyExportError("Failed to write body data padding!");
|
||||
}
|
||||
size_t bodyOffset = sizeof(GLB_Header) + sizeof(GLB_Chunk) + jsonChunk.chunkLength;
|
||||
outfile->Seek(bodyOffset, aiOrigin_SET);
|
||||
if (outfile->Write(&binaryChunk, 1, sizeof(GLB_Chunk)) != sizeof(GLB_Chunk)) {
|
||||
throw DeadlyExportError("Failed to write body data header!");
|
||||
}
|
||||
if (outfile->Write(bodyBuffer->GetPointer(), 1, bodyBuffer->byteLength) != bodyBuffer->byteLength) {
|
||||
throw DeadlyExportError("Failed to write body data!");
|
||||
}
|
||||
if (paddingLength && outfile->Write(&padding, 1, paddingLength) != paddingLength) {
|
||||
throw DeadlyExportError("Failed to write body data padding!");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -109,6 +109,10 @@ glTF2Exporter::glTF2Exporter(const char* filename, IOSystem* pIOSystem, const ai
|
|||
|
||||
mAsset.reset( new Asset( pIOSystem ) );
|
||||
|
||||
if (isBinary) {
|
||||
mAsset->SetAsBinary();
|
||||
}
|
||||
|
||||
ExportMetadata();
|
||||
|
||||
ExportMaterials();
|
||||
|
|
|
@ -110,8 +110,7 @@ extern "C" {
|
|||
|
||||
/** Maximum dimension for strings, ASSIMP strings are zero terminated. */
|
||||
#ifdef __cplusplus
|
||||
static
|
||||
const size_t MAXLEN = 1024;
|
||||
static const size_t MAXLEN = 1024;
|
||||
#else
|
||||
# define MAXLEN 1024
|
||||
#endif
|
||||
|
|
|
@ -51,11 +51,8 @@ if os.name=='posix':
|
|||
elif os.name=='nt':
|
||||
ext_whitelist.append('.dll')
|
||||
path_dirs = os.environ['PATH'].split(';')
|
||||
for dir_candidate in path_dirs:
|
||||
if 'assimp' in dir_candidate.lower():
|
||||
additional_dirs.append(dir_candidate)
|
||||
additional_dirs.extend(path_dirs)
|
||||
|
||||
#print(additional_dirs)
|
||||
def vec2tuple(x):
|
||||
""" Converts a VECTOR3D to a Tuple """
|
||||
return (x.x, x.y, x.z)
|
||||
|
|
Binary file not shown.
|
@ -61,3 +61,9 @@ public:
|
|||
TEST_F( utFBXImporterExporter, importXFromFileTest ) {
|
||||
EXPECT_TRUE( importerTest() );
|
||||
}
|
||||
|
||||
TEST_F( utFBXImporterExporter, importBareBoxWithoutColorsAndTextureCoords ) {
|
||||
Assimp::Importer importer;
|
||||
const aiScene *scene = importer.ReadFile( ASSIMP_TEST_MODELS_DIR "/FBX/box.fbx", aiProcess_ValidateDataStructure );
|
||||
EXPECT_NE( nullptr, scene );
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue