Merge branch 'master' into issue_1957

pull/2407/head
Matias Lavik 2019-04-06 22:22:05 +02:00 committed by GitHub
commit 015e960d0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
46 changed files with 6715 additions and 3914 deletions

View File

@ -70,6 +70,6 @@ addons:
project: project:
name: "assimp/assimp" name: "assimp/assimp"
notification_email: kim.kulling@googlemail.com notification_email: kim.kulling@googlemail.com
build_command_prepend: "cmake . -DASSIMP_ENABLE_BOOST_WORKAROUND=YES" build_command_prepend: "cmake ./"
build_command: "make -j4" build_command: "make -j4"
branch_pattern: coverity_scan branch_pattern: coverity_scan

View File

@ -555,8 +555,6 @@ void WriteDump(const aiScene* scene, IOStream* io, bool shortened) {
mesh->mNormals[n].z); mesh->mNormals[n].z);
} }
} }
else {
}
ioprintf(io,"\t\t</Normals>\n"); ioprintf(io,"\t\t</Normals>\n");
} }

View File

@ -416,7 +416,7 @@ template <> struct Structure :: _defaultInitializer<ErrorPolicy_Fail> {
void operator ()(T& /*out*/,const char* = "") { void operator ()(T& /*out*/,const char* = "") {
// obviously, it is crucial that _DefaultInitializer is used // obviously, it is crucial that _DefaultInitializer is used
// only from within a catch clause. // only from within a catch clause.
throw; throw DeadlyImportError("Constructing BlenderDNA Structure encountered an error");
} }
}; };

View File

@ -2,7 +2,7 @@
Open Asset Import Library (assimp) Open Asset Import Library (assimp)
---------------------------------------------------------------------- ----------------------------------------------------------------------
Copyright (c) 2006-2012, assimp team Copyright (c) 2006-2019, assimp team
All rights reserved. All rights reserved.
Redistribution and use of this software in source and binary forms, Redistribution and use of this software in source and binary forms,
@ -68,8 +68,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
using namespace melange; using namespace melange;
// overload this function and fill in your own unique data // overload this function and fill in your own unique data
void GetWriterInfo(int &id, String &appname) void GetWriterInfo(int &id, String &appname) {
{
id = 2424226; id = 2424226;
appname = "Open Asset Import Library"; appname = "Open Asset Import Library";
} }
@ -78,7 +77,10 @@ using namespace Assimp;
using namespace Assimp::Formatter; using namespace Assimp::Formatter;
namespace Assimp { namespace Assimp {
template<> const std::string LogFunctions<C4DImporter>::log_prefix = "C4D: "; template<> const char* LogFunctions<C4DImporter>::Prefix() {
static auto prefix = "C4D: ";
return prefix;
}
} }
static const aiImporterDesc desc = { static const aiImporterDesc desc = {
@ -97,47 +99,44 @@ static const aiImporterDesc desc = {
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
C4DImporter::C4DImporter() C4DImporter::C4DImporter()
{} : BaseImporter() {
// empty
}
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
C4DImporter::~C4DImporter() C4DImporter::~C4DImporter() {
{} // empty
}
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
bool C4DImporter::CanRead( const std::string& pFile, IOSystem* pIOHandler, bool checkSig) const bool C4DImporter::CanRead( const std::string& pFile, IOSystem* pIOHandler, bool checkSig) const {
{
const std::string& extension = GetExtension(pFile); const std::string& extension = GetExtension(pFile);
if (extension == "c4d") { if (extension == "c4d") {
return true; return true;
} } else if ((!extension.length() || checkSig) && pIOHandler) {
else if ((!extension.length() || checkSig) && pIOHandler) {
// TODO // TODO
} }
return false; return false;
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
const aiImporterDesc* C4DImporter::GetInfo () const const aiImporterDesc* C4DImporter::GetInfo () const {
{
return &desc; return &desc;
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
void C4DImporter::SetupProperties(const Importer* /*pImp*/) void C4DImporter::SetupProperties(const Importer* /*pImp*/) {
{
// nothing to be done for the moment // nothing to be done for the moment
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// Imports the given file into the given scene structure. // Imports the given file into the given scene structure.
void C4DImporter::InternReadFile( const std::string& pFile, void C4DImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOSystem* pIOHandler) {
aiScene* pScene, IOSystem* pIOHandler)
{
std::unique_ptr<IOStream> file( pIOHandler->Open( pFile)); std::unique_ptr<IOStream> file( pIOHandler->Open( pFile));
if( file.get() == NULL) { if( file.get() == nullptr ) {
ThrowException("failed to open file " + pFile); ThrowException("failed to open file " + pFile);
} }
@ -151,7 +150,7 @@ void C4DImporter::InternReadFile( const std::string& pFile,
// open document first // open document first
BaseDocument* doc = LoadDocument(f, SCENEFILTER_OBJECTS | SCENEFILTER_MATERIALS); BaseDocument* doc = LoadDocument(f, SCENEFILTER_OBJECTS | SCENEFILTER_MATERIALS);
if(doc == NULL) { if(doc == nullptr ) {
ThrowException("failed to read document " + pFile); ThrowException("failed to read document " + pFile);
} }
@ -160,11 +159,10 @@ void C4DImporter::InternReadFile( const std::string& pFile,
// first convert all materials // first convert all materials
ReadMaterials(doc->GetFirstMaterial()); ReadMaterials(doc->GetFirstMaterial());
// process C4D scenegraph recursively // process C4D scene-graph recursively
try { try {
RecurseHierarchy(doc->GetFirstObject(), pScene->mRootNode); RecurseHierarchy(doc->GetFirstObject(), pScene->mRootNode);
} } catch(...) {
catch(...) {
for(aiMesh* mesh : meshes) { for(aiMesh* mesh : meshes) {
delete mesh; delete mesh;
} }
@ -201,8 +199,7 @@ void C4DImporter::InternReadFile( const std::string& pFile,
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
bool C4DImporter::ReadShader(aiMaterial* out, melange::BaseShader* shader) bool C4DImporter::ReadShader(aiMaterial* out, melange::BaseShader* shader) {
{
// based on Melange sample code (C4DImportExport.cpp) // based on Melange sample code (C4DImportExport.cpp)
while(shader) { while(shader) {
if(shader->GetType() == Xlayer) { if(shader->GetType() == Xlayer) {
@ -220,15 +217,12 @@ bool C4DImporter::ReadShader(aiMaterial* out, melange::BaseShader* shader)
// Ignore the actual layer blending - models for real-time rendering should not // Ignore the actual layer blending - models for real-time rendering should not
// use them in a non-trivial way. Just try to find textures that we can apply // use them in a non-trivial way. Just try to find textures that we can apply
// to the model. // to the model.
while (lsl) while (lsl) {
{ if (lsl->GetType() == TypeFolder) {
if (lsl->GetType() == TypeFolder)
{
BlendFolder* const folder = dynamic_cast<BlendFolder*>(lsl); BlendFolder* const folder = dynamic_cast<BlendFolder*>(lsl);
LayerShaderLayer *subLsl = dynamic_cast<LayerShaderLayer*>(folder->m_Children.GetObject(0)); LayerShaderLayer *subLsl = dynamic_cast<LayerShaderLayer*>(folder->m_Children.GetObject(0));
while (subLsl) while (subLsl) {
{
if (subLsl->GetType() == TypeShader) { if (subLsl->GetType() == TypeShader) {
BlendShader* const shader = dynamic_cast<BlendShader*>(subLsl); BlendShader* const shader = dynamic_cast<BlendShader*>(subLsl);
if(ReadShader(out, static_cast<BaseShader*>(shader->m_pLink->GetLink()))) { if(ReadShader(out, static_cast<BaseShader*>(shader->m_pLink->GetLink()))) {
@ -238,8 +232,7 @@ bool C4DImporter::ReadShader(aiMaterial* out, melange::BaseShader* shader)
subLsl = subLsl->GetNext(); subLsl = subLsl->GetNext();
} }
} } else if (lsl->GetType() == TypeShader) {
else if (lsl->GetType() == TypeShader) {
BlendShader* const shader = dynamic_cast<BlendShader*>(lsl); BlendShader* const shader = dynamic_cast<BlendShader*>(lsl);
if(ReadShader(out, static_cast<BaseShader*>(shader->m_pLink->GetLink()))) { if(ReadShader(out, static_cast<BaseShader*>(shader->m_pLink->GetLink()))) {
return true; return true;
@ -248,33 +241,27 @@ bool C4DImporter::ReadShader(aiMaterial* out, melange::BaseShader* shader)
lsl = lsl->GetNext(); lsl = lsl->GetNext();
} }
} } else if ( shader->GetType() == Xbitmap ) {
else if ( shader->GetType() == Xbitmap )
{
aiString path; aiString path;
shader->GetFileName().GetString().GetCString(path.data, MAXLEN-1); shader->GetFileName().GetString().GetCString(path.data, MAXLEN-1);
path.length = ::strlen(path.data); path.length = ::strlen(path.data);
out->AddProperty(&path, AI_MATKEY_TEXTURE_DIFFUSE(0)); out->AddProperty(&path, AI_MATKEY_TEXTURE_DIFFUSE(0));
return true; return true;
} } else {
else {
LogWarn("ignoring shader type: " + std::string(GetObjectTypeName(shader->GetType()))); LogWarn("ignoring shader type: " + std::string(GetObjectTypeName(shader->GetType())));
} }
shader = shader->GetNext(); shader = shader->GetNext();
} }
return false; return false;
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
void C4DImporter::ReadMaterials(melange::BaseMaterial* mat) void C4DImporter::ReadMaterials(melange::BaseMaterial* mat) {
{
// based on Melange sample code // based on Melange sample code
while (mat) while (mat) {
{
const String& name = mat->GetName(); const String& name = mat->GetName();
if (mat->GetType() == Mmaterial) if (mat->GetType() == Mmaterial) {
{
aiMaterial* out = new aiMaterial(); aiMaterial* out = new aiMaterial();
material_mapping[mat] = static_cast<unsigned int>(materials.size()); material_mapping[mat] = static_cast<unsigned int>(materials.size());
materials.push_back(out); materials.push_back(out);
@ -286,8 +273,7 @@ void C4DImporter::ReadMaterials(melange::BaseMaterial* mat)
Material& m = dynamic_cast<Material&>(*mat); Material& m = dynamic_cast<Material&>(*mat);
if (m.GetChannelState(CHANNEL_COLOR)) if (m.GetChannelState(CHANNEL_COLOR)) {
{
GeData data; GeData data;
mat->GetParameter(MATERIAL_COLOR_COLOR, data); mat->GetParameter(MATERIAL_COLOR_COLOR, data);
Vector color = data.GetVector(); Vector color = data.GetVector();
@ -307,9 +293,7 @@ void C4DImporter::ReadMaterials(melange::BaseMaterial* mat)
if(shader) { if(shader) {
ReadShader(out, shader); ReadShader(out, shader);
} }
} } else {
else
{
LogWarn("ignoring plugin material: " + std::string(GetObjectTypeName(mat->GetType()))); LogWarn("ignoring plugin material: " + std::string(GetObjectTypeName(mat->GetType())));
} }
mat = mat->GetNext(); mat = mat->GetNext();
@ -317,14 +301,12 @@ void C4DImporter::ReadMaterials(melange::BaseMaterial* mat)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
void C4DImporter::RecurseHierarchy(BaseObject* object, aiNode* parent) void C4DImporter::RecurseHierarchy(BaseObject* object, aiNode* parent) {
{ ai_assert(parent != nullptr );
ai_assert(parent != NULL);
std::vector<aiNode*> nodes; std::vector<aiNode*> nodes;
// based on Melange sample code // based on Melange sample code
while (object) while (object) {
{
const String& name = object->GetName(); const String& name = object->GetName();
const LONG type = object->GetType(); const LONG type = object->GetType();
const Matrix& ml = object->GetMl(); const Matrix& ml = object->GetMl();
@ -356,26 +338,20 @@ void C4DImporter::RecurseHierarchy(BaseObject* object, aiNode* parent)
nodes.push_back(nd); nodes.push_back(nd);
GeData data; GeData data;
if (type == Ocamera) if (type == Ocamera) {
{
object->GetParameter(CAMERAOBJECT_FOV, data); object->GetParameter(CAMERAOBJECT_FOV, data);
// TODO: read camera // TODO: read camera
} } else if (type == Olight) {
else if (type == Olight)
{
// TODO: read light // TODO: read light
} } else if (type == Opolygon) {
else if (type == Opolygon)
{
aiMesh* const mesh = ReadMesh(object); aiMesh* const mesh = ReadMesh(object);
if(mesh != NULL) { if(mesh != nullptr) {
nd->mNumMeshes = 1; nd->mNumMeshes = 1;
nd->mMeshes = new unsigned int[1]; nd->mMeshes = new unsigned int[1];
nd->mMeshes[0] = static_cast<unsigned int>(meshes.size()); nd->mMeshes[0] = static_cast<unsigned int>(meshes.size());
meshes.push_back(mesh); meshes.push_back(mesh);
} }
} } else {
else {
LogWarn("ignoring object: " + std::string(GetObjectTypeName(type))); LogWarn("ignoring object: " + std::string(GetObjectTypeName(type)));
} }
@ -389,28 +365,27 @@ void C4DImporter::RecurseHierarchy(BaseObject* object, aiNode* parent)
std::copy(nodes.begin(), nodes.end(), parent->mChildren); std::copy(nodes.begin(), nodes.end(), parent->mChildren);
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
aiMesh* C4DImporter::ReadMesh(BaseObject* object) aiMesh* C4DImporter::ReadMesh(BaseObject* object) {
{ ai_assert(object != nullptr);
ai_assert(object != NULL && object->GetType() == Opolygon); ai_assert( object->GetType() == Opolygon );
// based on Melange sample code // based on Melange sample code
PolygonObject* const polyObject = dynamic_cast<PolygonObject*>(object); PolygonObject* const polyObject = dynamic_cast<PolygonObject*>(object);
ai_assert(polyObject != NULL); ai_assert(polyObject != nullptr);
const LONG pointCount = polyObject->GetPointCount(); const LONG pointCount = polyObject->GetPointCount();
const LONG polyCount = polyObject->GetPolygonCount(); const LONG polyCount = polyObject->GetPolygonCount();
if(!polyObject || !pointCount) { if(!polyObject || !pointCount) {
LogWarn("ignoring mesh with zero vertices or faces"); LogWarn("ignoring mesh with zero vertices or faces");
return NULL; return nullptr;
} }
const Vector* points = polyObject->GetPointR(); const Vector* points = polyObject->GetPointR();
ai_assert(points != NULL); ai_assert(points != nullptr);
const CPolygon* polys = polyObject->GetPolygonR(); const CPolygon* polys = polyObject->GetPolygonR();
ai_assert(polys != NULL); ai_assert(polys != nullptr);
std::unique_ptr<aiMesh> mesh(new aiMesh()); std::unique_ptr<aiMesh> mesh(new aiMesh());
mesh->mNumFaces = static_cast<unsigned int>(polyCount); mesh->mNumFaces = static_cast<unsigned int>(polyCount);
@ -443,14 +418,14 @@ aiMesh* C4DImporter::ReadMesh(BaseObject* object)
// check if there are normals, tangents or UVW coordinates // check if there are normals, tangents or UVW coordinates
BaseTag* tag = object->GetTag(Tnormal); BaseTag* tag = object->GetTag(Tnormal);
NormalTag* normals_src = NULL; NormalTag* normals_src = nullptr;
if(tag) { if(tag) {
normals_src = dynamic_cast<NormalTag*>(tag); normals_src = dynamic_cast<NormalTag*>(tag);
normals = mesh->mNormals = new aiVector3D[mesh->mNumVertices](); normals = mesh->mNormals = new aiVector3D[mesh->mNumVertices]();
} }
tag = object->GetTag(Ttangent); tag = object->GetTag(Ttangent);
TangentTag* tangents_src = NULL; TangentTag* tangents_src = nullptr;
if(tag) { if(tag) {
tangents_src = dynamic_cast<TangentTag*>(tag); tangents_src = dynamic_cast<TangentTag*>(tag);
tangents = mesh->mTangents = new aiVector3D[mesh->mNumVertices](); tangents = mesh->mTangents = new aiVector3D[mesh->mNumVertices]();
@ -458,15 +433,14 @@ aiMesh* C4DImporter::ReadMesh(BaseObject* object)
} }
tag = object->GetTag(Tuvw); tag = object->GetTag(Tuvw);
UVWTag* uvs_src = NULL; UVWTag* uvs_src = nullptr;
if(tag) { if(tag) {
uvs_src = dynamic_cast<UVWTag*>(tag); uvs_src = dynamic_cast<UVWTag*>(tag);
uvs = mesh->mTextureCoords[0] = new aiVector3D[mesh->mNumVertices](); uvs = mesh->mTextureCoords[0] = new aiVector3D[mesh->mNumVertices]();
} }
// copy vertices and extra channels over and populate faces // copy vertices and extra channels over and populate faces
for (LONG i = 0; i < polyCount; ++i, ++face) for (LONG i = 0; i < polyCount; ++i, ++face) {
{
ai_assert(polys[i].a < pointCount && polys[i].a >= 0); ai_assert(polys[i].a < pointCount && polys[i].a >= 0);
const Vector& pointA = points[polys[i].a]; const Vector& pointA = points[polys[i].a];
verts->x = pointA.x; verts->x = pointA.x;
@ -489,8 +463,7 @@ aiMesh* C4DImporter::ReadMesh(BaseObject* object)
++verts; ++verts;
// TODO: do we also need to handle lines or points with similar checks? // TODO: do we also need to handle lines or points with similar checks?
if (polys[i].c != polys[i].d) if (polys[i].c != polys[i].d) {
{
ai_assert(polys[i].d < pointCount && polys[i].d >= 0); ai_assert(polys[i].d < pointCount && polys[i].d >= 0);
face->mNumIndices = 4; face->mNumIndices = 4;
@ -500,8 +473,7 @@ aiMesh* C4DImporter::ReadMesh(BaseObject* object)
verts->y = pointD.y; verts->y = pointD.y;
verts->z = pointD.z; verts->z = pointD.z;
++verts; ++verts;
} } else {
else {
face->mNumIndices = 3; face->mNumIndices = 3;
} }
face->mIndices = new unsigned int[face->mNumIndices]; face->mIndices = new unsigned int[face->mNumIndices];
@ -513,8 +485,7 @@ aiMesh* C4DImporter::ReadMesh(BaseObject* object)
if (normals_src) { if (normals_src) {
if(i >= normals_src->GetDataCount()) { if(i >= normals_src->GetDataCount()) {
LogError("unexpected number of normals, ignoring"); LogError("unexpected number of normals, ignoring");
} } else {
else {
ConstNormalHandle normal_handle = normals_src->GetDataAddressR(); ConstNormalHandle normal_handle = normals_src->GetDataAddressR();
NormalStruct nor; NormalStruct nor;
NormalTag::Get(normal_handle, i, nor); NormalTag::Get(normal_handle, i, nor);
@ -616,26 +587,25 @@ aiMesh* C4DImporter::ReadMesh(BaseObject* object)
} }
mesh->mMaterialIndex = ResolveMaterial(polyObject); mesh->mMaterialIndex = ResolveMaterial(polyObject);
return mesh.release(); return mesh.release();
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
unsigned int C4DImporter::ResolveMaterial(PolygonObject* obj) unsigned int C4DImporter::ResolveMaterial(PolygonObject* obj) {
{ ai_assert(obj != nullptr);
ai_assert(obj != NULL);
const unsigned int mat_count = static_cast<unsigned int>(materials.size()); const unsigned int mat_count = static_cast<unsigned int>(materials.size());
BaseTag* tag = obj->GetTag(Ttexture); BaseTag* tag = obj->GetTag(Ttexture);
if(tag == NULL) { if(tag == nullptr) {
return mat_count; return mat_count;
} }
TextureTag& ttag = dynamic_cast<TextureTag&>(*tag); TextureTag& ttag = dynamic_cast<TextureTag&>(*tag);
BaseMaterial* const mat = ttag.GetMaterial(); BaseMaterial* const mat = ttag.GetMaterial();
ai_assert(mat != NULL); ai_assert(mat != nullptr);
const MaterialMap::const_iterator it = material_mapping.find(mat); const MaterialMap::const_iterator it = material_mapping.find(mat);
if(it == material_mapping.end()) { if(it == material_mapping.end()) {
@ -643,6 +613,7 @@ unsigned int C4DImporter::ResolveMaterial(PolygonObject* obj)
} }
ai_assert((*it).second < mat_count); ai_assert((*it).second < mat_count);
return (*it).second; return (*it).second;
} }

View File

@ -2,7 +2,7 @@
Open Asset Import Library (assimp) Open Asset Import Library (assimp)
---------------------------------------------------------------------- ----------------------------------------------------------------------
Copyright (c) 2006-2012, assimp team Copyright (c) 2006-2019, assimp team
All rights reserved. All rights reserved.
Redistribution and use of this software in source and binary forms, Redistribution and use of this software in source and binary forms,
@ -48,6 +48,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <assimp/LogAux.h> #include <assimp/LogAux.h>
#include <map> #include <map>
// Forward declarations
struct aiNode; struct aiNode;
struct aiMesh; struct aiMesh;
struct aiMaterial; struct aiMaterial;
@ -61,8 +63,7 @@ namespace melange {
class BaseShader; class BaseShader;
} }
namespace Assimp { namespace Assimp {
// TinyFormatter.h // TinyFormatter.h
namespace Formatter { namespace Formatter {
template <typename T,typename TR, typename A> class basic_formatter; template <typename T,typename TR, typename A> class basic_formatter;
@ -75,17 +76,10 @@ namespace Assimp {
* *
* Note that Melange is not free software. */ * Note that Melange is not free software. */
// ------------------------------------------------------------------------------------------- // -------------------------------------------------------------------------------------------
class C4DImporter : public BaseImporter, public LogFunctions<C4DImporter> class C4DImporter : public BaseImporter, public LogFunctions<C4DImporter> {
{
public: public:
C4DImporter(); C4DImporter();
~C4DImporter(); ~C4DImporter();
public:
// --------------------
bool CanRead( const std::string& pFile, IOSystem* pIOHandler, bool CanRead( const std::string& pFile, IOSystem* pIOHandler,
bool checkSig) const; bool checkSig) const;
@ -119,5 +113,5 @@ private:
}; // !class C4DImporter }; // !class C4DImporter
} // end of namespace Assimp } // end of namespace Assimp
#endif // INCLUDED_AI_CINEMA_4D_LOADER_H
#endif // INCLUDED_AI_CINEMA_4D_LOADER_H

View File

@ -866,6 +866,13 @@ SET( ziplib_SRCS
../contrib/zip/src/zip.h ../contrib/zip/src/zip.h
) )
# TODO if cmake required version has been updated to >3.12.0, collapse this to the second case only
if(${CMAKE_VERSION} VERSION_LESS "3.12.0")
add_definitions(-DMINIZ_USE_UNALIGNED_LOADS_AND_STORES=0)
else()
add_compile_definitions(MINIZ_USE_UNALIGNED_LOADS_AND_STORES=0)
endif()
SOURCE_GROUP( ziplib FILES ${ziplib_SRCS} ) SOURCE_GROUP( ziplib FILES ${ziplib_SRCS} )
SET ( openddl_parser_SRCS SET ( openddl_parser_SRCS

View File

@ -144,7 +144,7 @@ void COBImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
// check header // check header
char head[32]; char head[32];
stream->CopyAndAdvance(head,32); stream->CopyAndAdvance(head,32);
if (strncmp(head,"Caligari ",9)) { if (strncmp(head,"Caligari ",9) != 0) {
ThrowException("Could not found magic id: `Caligari`"); ThrowException("Could not found magic id: `Caligari`");
} }
@ -656,14 +656,14 @@ void COBImporter::ReadLght_Ascii(Scene& out, LineSplitter& splitter, const Chunk
ReadFloat3Tuple_Ascii(msh.color ,&rgb); ReadFloat3Tuple_Ascii(msh.color ,&rgb);
SkipSpaces(&rgb); SkipSpaces(&rgb);
if (strncmp(rgb,"cone angle",10)) { if (strncmp(rgb,"cone angle",10) != 0) {
ASSIMP_LOG_WARN_F( "Expected `cone angle` entity in `color` line in `Lght` chunk ", nfo.id ); ASSIMP_LOG_WARN_F( "Expected `cone angle` entity in `color` line in `Lght` chunk ", nfo.id );
} }
SkipSpaces(rgb+10,&rgb); SkipSpaces(rgb+10,&rgb);
msh.angle = fast_atof(&rgb); msh.angle = fast_atof(&rgb);
SkipSpaces(&rgb); SkipSpaces(&rgb);
if (strncmp(rgb,"inner angle",11)) { if (strncmp(rgb,"inner angle",11) != 0) {
ASSIMP_LOG_WARN_F( "Expected `inner angle` entity in `color` line in `Lght` chunk ", nfo.id); ASSIMP_LOG_WARN_F( "Expected `inner angle` entity in `color` line in `Lght` chunk ", nfo.id);
} }
SkipSpaces(rgb+11,&rgb); SkipSpaces(rgb+11,&rgb);
@ -903,7 +903,7 @@ public:
if(nfo.size != static_cast<unsigned int>(-1)) { if(nfo.size != static_cast<unsigned int>(-1)) {
try { try {
reader.IncPtr( static_cast< int >( nfo.size ) - reader.GetCurrentPos() + cur ); reader.IncPtr( static_cast< int >( nfo.size ) - reader.GetCurrentPos() + cur );
} catch ( DeadlyImportError e ) { } catch (const DeadlyImportError& e ) {
// out of limit so correct the value // out of limit so correct the value
reader.IncPtr( reader.GetReadLimit() ); reader.IncPtr( reader.GetReadLimit() );
} }
@ -1214,7 +1214,7 @@ void COBImporter::ReadGrou_Binary(COB::Scene& out, StreamReaderLE& reader, const
const chunk_guard cn(nfo,reader); const chunk_guard cn(nfo,reader);
out.nodes.push_back(std::shared_ptr<Group>(new Group())); out.nodes.push_back(std::make_shared<Group>());
Group& msh = (Group&)(*out.nodes.back().get()); Group& msh = (Group&)(*out.nodes.back().get());
msh = nfo; msh = nfo;

View File

@ -476,8 +476,11 @@ D3MFOpcPackage::D3MFOpcPackage(IOSystem* pIOHandler, const std::string& rFile)
mZipArchive->Close( fileStream ); mZipArchive->Close( fileStream );
} else if( file == D3MF::XmlTag::CONTENT_TYPES_ARCHIVE) { } else if( file == D3MF::XmlTag::CONTENT_TYPES_ARCHIVE) {
ASSIMP_LOG_WARN_F("Ignored file of unsupported type CONTENT_TYPES_ARCHIVES",file);
} else {
ASSIMP_LOG_WARN_F("Ignored file of unknown type: ",file);
} }
} }
} }

View File

@ -78,6 +78,16 @@ namespace Assimp {
FBXConverter::FBXConverter(aiScene* out, const Document& doc) FBXConverter::FBXConverter(aiScene* out, const Document& doc)
: defaultMaterialIndex() : defaultMaterialIndex()
, lights()
, cameras()
, textures()
, materials_converted()
, textures_converted()
, meshes_converted()
, node_anim_chain_bits()
, mNodeNameInstances()
, mNodeNames()
, anim_fps()
, out(out) , out(out)
, doc(doc) { , doc(doc) {
// animations need to be converted first since this will // animations need to be converted first since this will
@ -410,19 +420,24 @@ namespace Assimp {
void FBXConverter::GetUniqueName(const std::string &name, std::string &uniqueName) void FBXConverter::GetUniqueName(const std::string &name, std::string &uniqueName)
{ {
int i = 0;
uniqueName = name; uniqueName = name;
while (mNodeNames.find(uniqueName) != mNodeNames.end()) int i = 0;
auto it = mNodeNameInstances.find(name); // duplicate node name instance count
if (it != mNodeNameInstances.end())
{ {
++i; i = it->second;
std::stringstream ext; while (mNodeNames.find(uniqueName) != mNodeNames.end())
ext << name << std::setfill('0') << std::setw(3) << i; {
uniqueName = ext.str(); i++;
std::stringstream ext;
ext << name << std::setfill('0') << std::setw(3) << i;
uniqueName = ext.str();
}
} }
mNodeNameInstances[name] = i;
mNodeNames.insert(uniqueName); mNodeNames.insert(uniqueName);
} }
const char* FBXConverter::NameTransformationComp(TransformationComp comp) { const char* FBXConverter::NameTransformationComp(TransformationComp comp) {
switch (comp) { switch (comp) {
case TransformationComp_Translation: case TransformationComp_Translation:
@ -1712,22 +1727,22 @@ namespace Assimp {
if (!mesh) if (!mesh)
{ {
for (const MeshMap::value_type& v : meshes_converted) { for (const MeshMap::value_type& v : meshes_converted) {
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*> (v.first); const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*> (v.first);
if (!mesh) { if (!meshGeom) {
continue; continue;
} }
const MatIndexArray& mats = mesh->GetMaterialIndices(); const MatIndexArray& mats = meshGeom->GetMaterialIndices();
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) { if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
continue; continue;
} }
int index = -1; int index = -1;
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
if (mesh->GetTextureCoords(i).empty()) { if (meshGeom->GetTextureCoords(i).empty()) {
break; break;
} }
const std::string& name = mesh->GetTextureCoordChannelName(i); const std::string& name = meshGeom->GetTextureCoordChannelName(i);
if (name == uvSet) { if (name == uvSet) {
index = static_cast<int>(i); index = static_cast<int>(i);
break; break;
@ -1835,22 +1850,22 @@ namespace Assimp {
if (!mesh) if (!mesh)
{ {
for (const MeshMap::value_type& v : meshes_converted) { for (const MeshMap::value_type& v : meshes_converted) {
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*> (v.first); const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*> (v.first);
if (!mesh) { if (!meshGeom) {
continue; continue;
} }
const MatIndexArray& mats = mesh->GetMaterialIndices(); const MatIndexArray& mats = meshGeom->GetMaterialIndices();
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) { if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
continue; continue;
} }
int index = -1; int index = -1;
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
if (mesh->GetTextureCoords(i).empty()) { if (meshGeom->GetTextureCoords(i).empty()) {
break; break;
} }
const std::string& name = mesh->GetTextureCoordChannelName(i); const std::string& name = meshGeom->GetTextureCoordChannelName(i);
if (name == uvSet) { if (name == uvSet) {
index = static_cast<int>(i); index = static_cast<int>(i);
break; break;
@ -2041,6 +2056,12 @@ namespace Assimp {
CalculatedOpacity = 1.0f - ((Transparent.r + Transparent.g + Transparent.b) / 3.0f); CalculatedOpacity = 1.0f - ((Transparent.r + Transparent.g + Transparent.b) / 3.0f);
} }
// try to get the transparency factor
const float TransparencyFactor = PropertyGet<float>(props, "TransparencyFactor", ok);
if (ok) {
out_mat->AddProperty(&TransparencyFactor, 1, AI_MATKEY_TRANSPARENCYFACTOR);
}
// use of TransparencyFactor is inconsistent. // use of TransparencyFactor is inconsistent.
// Maya always stores it as 1.0, // Maya always stores it as 1.0,
// so we can't use it to set AI_MATKEY_OPACITY. // so we can't use it to set AI_MATKEY_OPACITY.
@ -2191,22 +2212,22 @@ void FBXConverter::SetShadingPropertiesRaw(aiMaterial* out_mat, const PropertyTa
if (!mesh) if (!mesh)
{ {
for (const MeshMap::value_type& v : meshes_converted) { for (const MeshMap::value_type& v : meshes_converted) {
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*>(v.first); const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*>(v.first);
if (!mesh) { if (!meshGeom) {
continue; continue;
} }
const MatIndexArray& mats = mesh->GetMaterialIndices(); const MatIndexArray& mats = meshGeom->GetMaterialIndices();
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) { if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
continue; continue;
} }
int index = -1; int index = -1;
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) { for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
if (mesh->GetTextureCoords(i).empty()) { if (meshGeom->GetTextureCoords(i).empty()) {
break; break;
} }
const std::string& name = mesh->GetTextureCoordChannelName(i); const std::string& name = meshGeom->GetTextureCoordChannelName(i);
if (name == uvSet) { if (name == uvSet) {
index = static_cast<int>(i); index = static_cast<int>(i);
break; break;

View File

@ -58,6 +58,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <assimp/texture.h> #include <assimp/texture.h>
#include <assimp/camera.h> #include <assimp/camera.h>
#include <assimp/StringComparison.h> #include <assimp/StringComparison.h>
#include <unordered_map>
#include <unordered_set>
struct aiScene; struct aiScene;
struct aiNode; struct aiNode;
@ -74,8 +76,6 @@ namespace FBX {
class Document; class Document;
using NodeNameCache = std::set<std::string>;
/** /**
* Convert a FBX #Document to #aiScene * Convert a FBX #Document to #aiScene
* @param out Empty scene to be populated * @param out Empty scene to be populated
@ -419,7 +419,6 @@ private:
void TransferDataToScene(); void TransferDataToScene();
private: private:
// 0: not assigned yet, others: index is value - 1 // 0: not assigned yet, others: index is value - 1
unsigned int defaultMaterialIndex; unsigned int defaultMaterialIndex;
@ -429,22 +428,27 @@ private:
std::vector<aiLight*> lights; std::vector<aiLight*> lights;
std::vector<aiCamera*> cameras; std::vector<aiCamera*> cameras;
std::vector<aiTexture*> textures; std::vector<aiTexture*> textures;
typedef std::map<const Material*, unsigned int> MaterialMap; using MaterialMap = std::map<const Material*, unsigned int>;
MaterialMap materials_converted; MaterialMap materials_converted;
typedef std::map<const Video*, unsigned int> VideoMap; using VideoMap = std::map<const Video*, unsigned int>;
VideoMap textures_converted; VideoMap textures_converted;
typedef std::map<const Geometry*, std::vector<unsigned int> > MeshMap; using MeshMap = std::map<const Geometry*, std::vector<unsigned int> >;
MeshMap meshes_converted; MeshMap meshes_converted;
// fixed node name -> which trafo chain components have animations? // fixed node name -> which trafo chain components have animations?
typedef std::map<std::string, unsigned int> NodeAnimBitMap; using NodeAnimBitMap = std::map<std::string, unsigned int> ;
NodeAnimBitMap node_anim_chain_bits; NodeAnimBitMap node_anim_chain_bits;
// number of nodes with the same name
using NodeAnimNameMap = std::unordered_map<std::string, unsigned int>;
NodeAnimNameMap mNodeNameInstances;
using NodeNameCache = std::unordered_set<std::string>;
NodeNameCache mNodeNames; NodeNameCache mNodeNames;
double anim_fps; double anim_fps;
aiScene* const out; aiScene* const out;

View File

@ -432,7 +432,7 @@ void FBX::Node::WritePropertyNodeAscii(
char buffer[32]; char buffer[32];
FBX::Node node(name); FBX::Node node(name);
node.Begin(s, false, indent); node.Begin(s, false, indent);
std::string vsize = std::to_string(v.size()); std::string vsize = to_string(v.size());
// *<size> { // *<size> {
s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n"); s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n");
// indent + 1 // indent + 1
@ -468,7 +468,7 @@ void FBX::Node::WritePropertyNodeAscii(
char buffer[32]; char buffer[32];
FBX::Node node(name); FBX::Node node(name);
node.Begin(s, false, indent); node.Begin(s, false, indent);
std::string vsize = std::to_string(v.size()); std::string vsize = to_string(v.size());
// *<size> { // *<size> {
s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n"); s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n");
// indent + 1 // indent + 1

View File

@ -207,6 +207,20 @@ Texture::Texture(uint64_t id, const Element& element, const Document& doc, const
props = GetPropertyTable(doc,"Texture.FbxFileTexture",element,sc); props = GetPropertyTable(doc,"Texture.FbxFileTexture",element,sc);
// 3DS Max and FBX SDK use "Scaling" and "Translation" instead of "ModelUVScaling" and "ModelUVTranslation". Use these properties if available.
bool ok;
const aiVector3D& scaling = PropertyGet<aiVector3D>(*props, "Scaling", ok);
if (ok) {
uvScaling.x = scaling.x;
uvScaling.y = scaling.y;
}
const aiVector3D& trans = PropertyGet<aiVector3D>(*props, "Translation", ok);
if (ok) {
uvTrans.x = trans.x;
uvTrans.y = trans.y;
}
// resolve video links // resolve video links
if(doc.Settings().readTextures) { if(doc.Settings().readTextures) {
const std::vector<const Connection*>& conns = doc.GetConnectionsByDestinationSequenced(ID()); const std::vector<const Connection*>& conns = doc.GetConnectionsByDestinationSequenced(ID());
@ -342,8 +356,11 @@ Video::Video(uint64_t id, const Element& element, const Document& doc, const std
content = new uint8_t[len]; content = new uint8_t[len];
::memcpy(content, data + 5, len); ::memcpy(content, data + 5, len);
} }
} catch (runtime_error runtimeError) { } catch (const runtime_error& runtimeError)
{
//we don't need the content data for contents that has already been loaded //we don't need the content data for contents that has already been loaded
ASSIMP_LOG_DEBUG_F("Caught exception in FBXMaterial (likely because content was already loaded): ",
runtimeError.what());
} }
} }

View File

@ -300,13 +300,10 @@ int ClampSpline(int idx, int size) {
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
inline void FindSuitableMultiple(int& angle) inline void FindSuitableMultiple(int& angle)
{ {
if (angle < 3)angle = 3; if (angle < 3) angle = 3;
else if (angle < 10) angle = 10; else if (angle < 10) angle = 10;
else if (angle < 20) angle = 20; else if (angle < 20) angle = 20;
else if (angle < 30) angle = 30; else if (angle < 30) angle = 30;
else
{
}
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -317,6 +314,8 @@ void IRRImporter::ComputeAnimations(Node* root, aiNode* real, std::vector<aiNode
// XXX totally WIP - doesn't produce proper results, need to evaluate // XXX totally WIP - doesn't produce proper results, need to evaluate
// whether there's any use for Irrlicht's proprietary scene format // whether there's any use for Irrlicht's proprietary scene format
// outside Irrlicht ... // outside Irrlicht ...
// This also applies to the above function of FindSuitableMultiple and ClampSpline which are
// solely used in this function
if (root->animators.empty()) { if (root->animators.empty()) {
return; return;
@ -674,38 +673,38 @@ void IRRImporter::GenerateGraph(Node* root,aiNode* rootOut ,aiScene* scene,
// Get the loaded mesh from the scene and add it to // Get the loaded mesh from the scene and add it to
// the list of all scenes to be attached to the // the list of all scenes to be attached to the
// graph we're currently building // graph we're currently building
aiScene* scene = batch.GetImport(root->id); aiScene* localScene = batch.GetImport(root->id);
if (!scene) { if (!localScene) {
ASSIMP_LOG_ERROR("IRR: Unable to load external file: " + root->meshPath); ASSIMP_LOG_ERROR("IRR: Unable to load external file: " + root->meshPath);
break; break;
} }
attach.push_back(AttachmentInfo(scene,rootOut)); attach.push_back(AttachmentInfo(localScene,rootOut));
// Now combine the material we've loaded for this mesh // Now combine the material we've loaded for this mesh
// with the real materials we got from the file. As we // with the real materials we got from the file. As we
// don't execute any pp-steps on the file, the numbers // don't execute any pp-steps on the file, the numbers
// should be equal. If they are not, we can impossibly // should be equal. If they are not, we can impossibly
// do this ... // do this ...
if (root->materials.size() != (unsigned int)scene->mNumMaterials) { if (root->materials.size() != (unsigned int)localScene->mNumMaterials) {
ASSIMP_LOG_WARN("IRR: Failed to match imported materials " ASSIMP_LOG_WARN("IRR: Failed to match imported materials "
"with the materials found in the IRR scene file"); "with the materials found in the IRR scene file");
break; break;
} }
for (unsigned int i = 0; i < scene->mNumMaterials;++i) { for (unsigned int i = 0; i < localScene->mNumMaterials;++i) {
// Delete the old material, we don't need it anymore // Delete the old material, we don't need it anymore
delete scene->mMaterials[i]; delete localScene->mMaterials[i];
std::pair<aiMaterial*, unsigned int>& src = root->materials[i]; std::pair<aiMaterial*, unsigned int>& src = root->materials[i];
scene->mMaterials[i] = src.first; localScene->mMaterials[i] = src.first;
} }
// NOTE: Each mesh should have exactly one material assigned, // NOTE: Each mesh should have exactly one material assigned,
// but we do it in a separate loop if this behaviour changes // but we do it in a separate loop if this behaviour changes
// in future. // in future.
for (unsigned int i = 0; i < scene->mNumMeshes;++i) { for (unsigned int i = 0; i < localScene->mNumMeshes;++i) {
// Process material flags // Process material flags
aiMesh* mesh = scene->mMeshes[i]; aiMesh* mesh = localScene->mMeshes[i];
// If "trans_vertex_alpha" mode is enabled, search all vertex colors // If "trans_vertex_alpha" mode is enabled, search all vertex colors

View File

@ -278,10 +278,10 @@ void STEP::ReadFile(DB& db,const EXPRESS::ConversionSchema& scheme,
std::transform( type.begin(), type.end(), type.begin(), &Assimp::ToLower<char> ); std::transform( type.begin(), type.end(), type.begin(), &Assimp::ToLower<char> );
const char* sz = scheme.GetStaticStringForToken(type); const char* sz = scheme.GetStaticStringForToken(type);
if(sz) { if(sz) {
const std::string::size_type len = n2-n1+1; const std::string::size_type szLen = n2-n1+1;
char* const copysz = new char[len+1]; char* const copysz = new char[szLen+1];
std::copy(s.c_str()+n1,s.c_str()+n2+1,copysz); std::copy(s.c_str()+n1,s.c_str()+n2+1,copysz);
copysz[len] = '\0'; copysz[szLen] = '\0';
db.InternInsert(new LazyObject(db,id,line,sz,copysz)); db.InternInsert(new LazyObject(db,id,line,sz,copysz));
} }
if(!has_next) { if(!has_next) {

View File

@ -443,10 +443,10 @@ void MD5Importer::LoadMD5MeshFile ()
for (MD5::VertexList::const_iterator iter = meshSrc.mVertices.begin();iter != meshSrc.mVertices.end();++iter,++pv) { for (MD5::VertexList::const_iterator iter = meshSrc.mVertices.begin();iter != meshSrc.mVertices.end();++iter,++pv) {
for (unsigned int jub = (*iter).mFirstWeight, w = jub; w < jub + (*iter).mNumWeights;++w) for (unsigned int jub = (*iter).mFirstWeight, w = jub; w < jub + (*iter).mNumWeights;++w)
{ {
MD5::WeightDesc& desc = meshSrc.mWeights[w]; MD5::WeightDesc& weightDesc = meshSrc.mWeights[w];
/* FIX for some invalid exporters */ /* FIX for some invalid exporters */
if (!(desc.mWeight < AI_MD5_WEIGHT_EPSILON && desc.mWeight >= -AI_MD5_WEIGHT_EPSILON )) if (!(weightDesc.mWeight < AI_MD5_WEIGHT_EPSILON && weightDesc.mWeight >= -AI_MD5_WEIGHT_EPSILON ))
++piCount[desc.mBone]; ++piCount[weightDesc.mBone];
} }
} }
@ -493,20 +493,20 @@ void MD5Importer::LoadMD5MeshFile ()
if (w >= meshSrc.mWeights.size()) if (w >= meshSrc.mWeights.size())
throw DeadlyImportError("MD5MESH: Invalid weight index"); throw DeadlyImportError("MD5MESH: Invalid weight index");
MD5::WeightDesc& desc = meshSrc.mWeights[w]; MD5::WeightDesc& weightDesc = meshSrc.mWeights[w];
if ( desc.mWeight < AI_MD5_WEIGHT_EPSILON && desc.mWeight >= -AI_MD5_WEIGHT_EPSILON) { if ( weightDesc.mWeight < AI_MD5_WEIGHT_EPSILON && weightDesc.mWeight >= -AI_MD5_WEIGHT_EPSILON) {
continue; continue;
} }
const ai_real fNewWeight = desc.mWeight / fSum; const ai_real fNewWeight = weightDesc.mWeight / fSum;
// transform the local position into worldspace // transform the local position into worldspace
MD5::BoneDesc& boneSrc = meshParser.mJoints[desc.mBone]; MD5::BoneDesc& boneSrc = meshParser.mJoints[weightDesc.mBone];
const aiVector3D v = boneSrc.mRotationQuatConverted.Rotate (desc.vOffsetPosition); const aiVector3D v = boneSrc.mRotationQuatConverted.Rotate (weightDesc.vOffsetPosition);
// use the original weight to compute the vertex position // use the original weight to compute the vertex position
// (some MD5s seem to depend on the invalid weight values ...) // (some MD5s seem to depend on the invalid weight values ...)
*pv += ((boneSrc.mPositionXYZ+v)* (ai_real)desc.mWeight); *pv += ((boneSrc.mPositionXYZ+v)* (ai_real)weightDesc.mWeight);
aiBone* bone = mesh->mBones[boneSrc.mMap]; aiBone* bone = mesh->mBones[boneSrc.mMap];
*bone->mWeights++ = aiVertexWeight((unsigned int)(pv-mesh->mVertices),fNewWeight); *bone->mWeights++ = aiVertexWeight((unsigned int)(pv-mesh->mVertices),fNewWeight);

View File

@ -127,7 +127,7 @@ STLExporter::STLExporter(const char* _filename, const aiScene* pScene, bool expo
mOutput.write((char *)&meshnum, 4); mOutput.write((char *)&meshnum, 4);
if (exportPointClouds) { if (exportPointClouds) {
throw DeadlyExportError("This functionality is not yet implemented for binary output.");
} }
for(unsigned int i = 0; i < pScene->mNumMeshes; ++i) { for(unsigned int i = 0; i < pScene->mNumMeshes; ++i) {

View File

@ -294,17 +294,17 @@ namespace glTF {
// filling object "compressedData" // filling object "compressedData"
json_comp_data.SetObject(); json_comp_data.SetObject();
json_comp_data.AddMember("buffer", ptr_ext_comp->Buffer, w.mAl); json_comp_data.AddMember("buffer", ptr_ext_comp->Buffer, w.mAl);
json_comp_data.AddMember("byteOffset", ptr_ext_comp->Offset, w.mAl); json_comp_data.AddMember("byteOffset", static_cast<uint64_t>(ptr_ext_comp->Offset), w.mAl);
json_comp_data.AddMember("componentType", 5121, w.mAl); json_comp_data.AddMember("componentType", 5121, w.mAl);
json_comp_data.AddMember("type", "SCALAR", w.mAl); json_comp_data.AddMember("type", "SCALAR", w.mAl);
json_comp_data.AddMember("count", ptr_ext_comp->Count, w.mAl); json_comp_data.AddMember("count", static_cast<uint64_t>(ptr_ext_comp->Count), w.mAl);
if(ptr_ext_comp->Binary) if(ptr_ext_comp->Binary)
json_comp_data.AddMember("mode", "binary", w.mAl); json_comp_data.AddMember("mode", "binary", w.mAl);
else else
json_comp_data.AddMember("mode", "ascii", w.mAl); json_comp_data.AddMember("mode", "ascii", w.mAl);
json_comp_data.AddMember("indicesCount", ptr_ext_comp->IndicesCount, w.mAl); json_comp_data.AddMember("indicesCount", static_cast<uint64_t>(ptr_ext_comp->IndicesCount), w.mAl);
json_comp_data.AddMember("verticesCount", ptr_ext_comp->VerticesCount, w.mAl); json_comp_data.AddMember("verticesCount", static_cast<uint64_t>(ptr_ext_comp->VerticesCount), w.mAl);
// filling object "Open3DGC-compression" // filling object "Open3DGC-compression"
Value json_o3dgc; Value json_o3dgc;

View File

@ -245,7 +245,7 @@ inline Ref<Accessor> ExportData(Asset& a, std::string& meshName, Ref<Buffer>& bu
namespace { namespace {
void GetMatScalar(const aiMaterial* mat, float& val, const char* propName, int type, int idx) { void GetMatScalar(const aiMaterial* mat, float& val, const char* propName, int type, int idx) {
if (mat->Get(propName, type, idx, val) == AI_SUCCESS) {} ai_assert(mat->Get(propName, type, idx, val) == AI_SUCCESS);
} }
} }

View File

@ -36,3 +36,21 @@
# Temporary # Temporary
*.swp *.swp
.DS_Store .DS_Store
# CMake
CMakeScripts
*.cmake
# Xcode
*.build
*.xcodeproj
zip.sln
zip.vcxproj.filters
zip.vcxproj
ALL_BUILD.vcxproj.filters
ALL_BUILD.vcxproj
CMakeFiles/
zip.dir/
test/test.exe.vcxproj.filters
test/test.exe.vcxproj
test/test.exe.dir/

View File

@ -0,0 +1,18 @@
#!/bin/bash
#
# Build script for travis-ci.org builds.
#
if [ $ANALYZE = "true" ] && [ "$CC" = "clang" ]; then
# scan-build -h
scan-build cmake -G "Unix Makefiles"
scan-build -enable-checker security.FloatLoopCounter \
-enable-checker security.insecureAPI.UncheckedReturn \
--status-bugs -v \
make -j 8 \
make -j 8 test
else
cmake -DCMAKE_BUILD_TYPE=Debug -DSANITIZE_ADDRESS=On -DCMAKE_INSTALL_PREFIX=_install
make -j 8
make install
ASAN_OPTIONS=detect_leaks=0 LSAN_OPTIONS=verbosity=1:log_threads=1 ctest -V
fi

View File

@ -1,10 +1,22 @@
language: c language: c
addons:
apt:
packages: &1
- lcov
# Compiler selection # Compiler selection
compiler: compiler:
- clang - clang
- gcc - gcc
env:
- ANALYZE=false
- ANALYZE=true
# Build steps # Build steps
script: script:
- mkdir build - ./.travis.sh
- cd build after_success:
- cmake -DCMAKE_BUILD_TYPE=Debug .. && make && make test # Creating report
- cmake -DENABLE_COVERAGE=ON
- make
- make test
# Uploading report to CodeCov
- bash <(curl -s https://codecov.io/bash)

View File

@ -1,18 +1,47 @@
cmake_minimum_required(VERSION 2.8) cmake_minimum_required(VERSION 2.8)
project(zip) project(zip)
enable_language(C)
set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake" ${CMAKE_MODULE_PATH})
if (MSVC) if (MSVC)
# Use secure functions by defaualt and suppress warnings about "deprecated" functions # Use secure functions by defaualt and suppress warnings about "deprecated" functions
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /D _CRT_SECURE_CPP_OVERLOAD_STANDARD_NAMES=1") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /D _CRT_SECURE_CPP_OVERLOAD_STANDARD_NAMES=1")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /D _CRT_SECURE_CPP_OVERLOAD_STANDARD_NAMES_COUNT=1") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /D _CRT_SECURE_CPP_OVERLOAD_STANDARD_NAMES_COUNT=1")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /D _CRT_NONSTDC_NO_WARNINGS=1 /D _CRT_SECURE_NO_WARNINGS=1") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /D _CRT_NONSTDC_NO_WARNINGS=1 /D _CRT_SECURE_NO_WARNINGS=1")
elseif ("${CMAKE_C_COMPILER_ID}" STREQUAL "GNU" OR
"${CMAKE_C_COMPILER_ID}" STREQUAL "Clang" OR
"${CMAKE_C_COMPILER_ID}" STREQUAL "AppleClang")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99 -Wall -Wextra -Werror -pedantic")
endif (MSVC) endif (MSVC)
# zip # zip
set(SRC src/miniz.h src/zip.h src/zip.c) set(SRC src/miniz.h src/zip.h src/zip.c)
add_library(${CMAKE_PROJECT_NAME} ${SRC}) add_library(${PROJECT_NAME} ${SRC})
target_include_directories(${PROJECT_NAME} INTERFACE src)
# test # test
enable_testing() if (NOT CMAKE_DISABLE_TESTING)
add_subdirectory(test) enable_testing()
add_subdirectory(test)
find_package(Sanitizers)
add_sanitizers(${PROJECT_NAME} test.exe)
add_sanitizers(${PROJECT_NAME} test_miniz.exe)
endif()
install(TARGETS ${PROJECT_NAME}
RUNTIME DESTINATION bin
ARCHIVE DESTINATION lib
LIBRARY DESTINATION lib
COMPONENT library)
install(FILES ${PROJECT_SOURCE_DIR}/src/zip.h DESTINATION include)
# uninstall target (https://gitlab.kitware.com/cmake/community/wikis/FAQ#can-i-do-make-uninstall-with-cmake)
if(NOT TARGET uninstall)
configure_file(
"${CMAKE_CURRENT_SOURCE_DIR}/cmake/cmake_uninstall.cmake.in"
"${CMAKE_CURRENT_BINARY_DIR}/cmake/cmake_uninstall.cmake"
IMMEDIATE @ONLY)
add_custom_target(uninstall
COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake/cmake_uninstall.cmake)
endif()

View File

@ -1,12 +1,11 @@
### A portable (OSX/Linux/Windows), simple zip library written in C ### A portable (OSX/Linux/Windows), simple zip library written in C
This is done by hacking awesome [miniz](https://code.google.com/p/miniz) library and layering functions on top of the miniz v1.15 API. This is done by hacking awesome [miniz](https://code.google.com/p/miniz) library and layering functions on top of the miniz v1.15 API.
[![Windows][win-badge]][win-link] [![OS X][osx-linux-badge]][osx-linux-link] [![Windows](https://ci.appveyor.com/api/projects/status/bph8dr3jacgmjv32/branch/master?svg=true&label=windows)](https://ci.appveyor.com/project/kuba--/zip)
[![Linux](https://travis-ci.org/kuba--/zip.svg?branch=master&label=linux%2fosx)](https://travis-ci.org/kuba--/zip)
[![Version](https://badge.fury.io/gh/kuba--%2Fzip.svg)](https://github.com/kuba--/zip/releases)
[![Codecov](https://codecov.io/gh/kuba--/zip/branch/master/graph/badge.svg)](https://codecov.io/gh/kuba--/zip)
[win-badge]: https://img.shields.io/appveyor/ci/kuba--/zip/master.svg?label=windows "AppVeyor build status"
[win-link]: https://ci.appveyor.com/project/kuba--/zip "AppVeyor build status"
[osx-linux-badge]: https://img.shields.io/travis/kuba--/zip/master.svg?label=linux/osx "Travis CI build status"
[osx-linux-link]: https://travis-ci.org/kuba--/zip "Travis CI build status"
# The Idea # The Idea
<img src="zip.png" name="zip" /> <img src="zip.png" name="zip" />
@ -23,117 +22,288 @@ It was the reason, why I decided to write zip module on top of the miniz. It req
* Create a new zip archive with default compression level. * Create a new zip archive with default compression level.
```c ```c
struct zip_t *zip = zip_open("foo.zip", ZIP_DEFAULT_COMPRESSION_LEVEL, 'w'); struct zip_t *zip = zip_open("foo.zip", ZIP_DEFAULT_COMPRESSION_LEVEL, 'w');
{
zip_entry_open(zip, "foo-1.txt");
{ {
zip_entry_open(zip, "foo-1.txt"); const char *buf = "Some data here...\0";
{ zip_entry_write(zip, buf, strlen(buf));
char *buf = "Some data here...";
zip_entry_write(zip, buf, strlen(buf));
}
zip_entry_close(zip);
zip_entry_open(zip, "foo-2.txt");
{
// merge 3 files into one entry and compress them on-the-fly.
zip_entry_fwrite(zip, "foo-2.1.txt");
zip_entry_fwrite(zip, "foo-2.2.txt");
zip_entry_fwrite(zip, "foo-2.3.txt");
}
zip_entry_close(zip);
} }
zip_close(zip); zip_entry_close(zip);
zip_entry_open(zip, "foo-2.txt");
{
// merge 3 files into one entry and compress them on-the-fly.
zip_entry_fwrite(zip, "foo-2.1.txt");
zip_entry_fwrite(zip, "foo-2.2.txt");
zip_entry_fwrite(zip, "foo-2.3.txt");
}
zip_entry_close(zip);
}
zip_close(zip);
``` ```
* Append to the existing zip archive. * Append to the existing zip archive.
```c ```c
struct zip_t *zip = zip_open("foo.zip", ZIP_DEFAULT_COMPRESSION_LEVEL, 'a'); struct zip_t *zip = zip_open("foo.zip", ZIP_DEFAULT_COMPRESSION_LEVEL, 'a');
{
zip_entry_open(zip, "foo-3.txt");
{ {
zip_entry_open(zip, "foo-3.txt"); const char *buf = "Append some data here...\0";
{ zip_entry_write(zip, buf, strlen(buf));
char *buf = "Append some data here...";
zip_entry_write(zip, buf, strlen(buf));
}
zip_entry_close(zip);
} }
zip_close(zip); zip_entry_close(zip);
}
zip_close(zip);
``` ```
* Extract a zip archive into a folder. * Extract a zip archive into a folder.
```c ```c
int on_extract_entry(const char *filename, void *arg) { int on_extract_entry(const char *filename, void *arg) {
static int i = 0; static int i = 0;
int n = *(int *)arg; int n = *(int *)arg;
printf("Extracted: %s (%d of %d)\n", filename, ++i, n); printf("Extracted: %s (%d of %d)\n", filename, ++i, n);
return 0; return 0;
} }
int arg = 2; int arg = 2;
zip_extract("foo.zip", "/tmp", on_extract_entry, &arg); zip_extract("foo.zip", "/tmp", on_extract_entry, &arg);
``` ```
* Extract a zip entry into memory. * Extract a zip entry into memory.
```c ```c
void *buf = NULL; void *buf = NULL;
size_t bufsize; size_t bufsize;
struct zip_t *zip = zip_open("foo.zip", 0, 'r'); struct zip_t *zip = zip_open("foo.zip", 0, 'r');
{
zip_entry_open(zip, "foo-1.txt");
{ {
zip_entry_open(zip, "foo-1.txt"); zip_entry_read(zip, &buf, &bufsize);
{
zip_entry_read(zip, &buf, &bufsize);
}
zip_entry_close(zip);
} }
zip_close(zip); zip_entry_close(zip);
}
zip_close(zip);
free(buf); free(buf);
``` ```
* Extract a zip entry into memory using callback. * Extract a zip entry into memory (no internal allocation).
```c ```c
struct buffer_t { unsigned char *buf;
char *data; size_t bufsize;
size_t size;
};
static size_t on_extract(void *arg, unsigned long long offset, const void *data, size_t size) { struct zip_t *zip = zip_open("foo.zip", 0, 'r');
struct buffer_t *buf = (struct buffer_t *)arg; {
buf->data = realloc(buf->data, buf->size + size + 1); zip_entry_open(zip, "foo-1.txt");
assert(NULL != buf->data);
memcpy(&(buf->data[buf->size]), data, size);
buf->size += size;
buf->data[buf->size] = 0;
return size;
}
struct buffer_t buf = {0};
struct zip_t *zip = zip_open("foo.zip", 0, 'r');
{ {
zip_entry_open(zip, "foo-1.txt"); bufsize = zip_entry_size(zip);
{ buf = calloc(sizeof(unsigned char), bufsize);
zip_entry_extract(zip, on_extract, &buf);
}
zip_entry_close(zip);
}
zip_close(zip);
free(buf.data); zip_entry_noallocread(zip, (void *)buf, bufsize);
}
zip_entry_close(zip);
}
zip_close(zip);
free(buf);
``` ```
* Extract a zip entry into memory using callback.
* Extract a zip entry into a file.
```c ```c
struct zip_t *zip = zip_open("foo.zip", 0, 'r'); struct buffer_t {
char *data;
size_t size;
};
static size_t on_extract(void *arg, unsigned long long offset, const void *data, size_t size) {
struct buffer_t *buf = (struct buffer_t *)arg;
buf->data = realloc(buf->data, buf->size + size + 1);
assert(NULL != buf->data);
memcpy(&(buf->data[buf->size]), data, size);
buf->size += size;
buf->data[buf->size] = 0;
return size;
}
struct buffer_t buf = {0};
struct zip_t *zip = zip_open("foo.zip", 0, 'r');
{
zip_entry_open(zip, "foo-1.txt");
{ {
zip_entry_open(zip, "foo-2.txt"); zip_entry_extract(zip, on_extract, &buf);
{
zip_entry_fread(zip, "foo-2.txt");
}
zip_entry_close(zip);
} }
zip_close(zip); zip_entry_close(zip);
}
zip_close(zip);
free(buf.data);
```
* Extract a zip entry into a file.
```c
struct zip_t *zip = zip_open("foo.zip", 0, 'r');
{
zip_entry_open(zip, "foo-2.txt");
{
zip_entry_fread(zip, "foo-2.txt");
}
zip_entry_close(zip);
}
zip_close(zip);
```
* List of all zip entries
```c
struct zip_t *zip = zip_open("foo.zip", 0, 'r');
int i, n = zip_total_entries(zip);
for (i = 0; i < n; ++i) {
zip_entry_openbyindex(zip, i);
{
const char *name = zip_entry_name(zip);
int isdir = zip_entry_isdir(zip);
unsigned long long size = zip_entry_size(zip);
unsigned int crc32 = zip_entry_crc32(zip);
}
zip_entry_close(zip);
}
zip_close(zip);
```
## Bindings
Compile zip library as a dynamic library.
```shell
$ mkdir build
$ cd build
$ cmake -DBUILD_SHARED_LIBS=true ..
$ make
```
### Go (cgo)
```go
package main
/*
#cgo CFLAGS: -I../src
#cgo LDFLAGS: -L. -lzip
#include <zip.h>
*/
import "C"
import "unsafe"
func main() {
path := C.CString("/tmp/go.zip")
zip := C.zip_open(path, 6, 'w')
entryname := C.CString("test")
C.zip_entry_open(zip, entryname)
content := "test content"
buf := unsafe.Pointer(C.CString(content))
bufsize := C.size_t(len(content))
C.zip_entry_write(zip, buf, bufsize)
C.zip_entry_close(zip)
C.zip_close(zip)
}
```
### Ruby (ffi)
Install _ffi_ gem.
```shell
$ gem install ffi
```
Bind in your module.
```ruby
require 'ffi'
module Zip
extend FFI::Library
ffi_lib "./libzip.#{::FFI::Platform::LIBSUFFIX}"
attach_function :zip_open, [:string, :int, :char], :pointer
attach_function :zip_close, [:pointer], :void
attach_function :zip_entry_open, [:pointer, :string], :int
attach_function :zip_entry_close, [:pointer], :void
attach_function :zip_entry_write, [:pointer, :string, :int], :int
end
ptr = Zip.zip_open("/tmp/ruby.zip", 6, "w".bytes()[0])
status = Zip.zip_entry_open(ptr, "test")
content = "test content"
status = Zip.zip_entry_write(ptr, content, content.size())
Zip.zip_entry_close(ptr)
Zip.zip_close(ptr)
```
### Python (cffi)
Install _cffi_ package
```shell
$ pip install cffi
```
Bind in your package.
```python
import ctypes.util
from cffi import FFI
ffi = FFI()
ffi.cdef("""
struct zip_t *zip_open(const char *zipname, int level, char mode);
void zip_close(struct zip_t *zip);
int zip_entry_open(struct zip_t *zip, const char *entryname);
int zip_entry_close(struct zip_t *zip);
int zip_entry_write(struct zip_t *zip, const void *buf, size_t bufsize);
""")
Zip = ffi.dlopen(ctypes.util.find_library("zip"))
ptr = Zip.zip_open("/tmp/python.zip", 6, 'w')
status = Zip.zip_entry_open(ptr, "test")
content = "test content"
status = Zip.zip_entry_write(ptr, content, len(content))
Zip.zip_entry_close(ptr)
Zip.zip_close(ptr)
```
### Ring
The language comes with RingZip based on this library
```ring
load "ziplib.ring"
new Zip {
setFileName("myfile.zip")
open("w")
newEntry() {
open("test.c")
writefile("test.c")
close()
}
close()
}
```
# Contribution Rules/Coding Standards
No need to throw away your coding style, just do your best to follow default clang-format style.
Apply `clang-format` to the source files before commit:
```sh
for file in $(git ls-files | \grep -E '\.(c|h)$' | \grep -v -- '#')
do
clang-format -i $file
done
``` ```

View File

@ -1,4 +1,4 @@
version: 1.0.{build} version: zip-0.1.9.{build}
build_script: build_script:
- cmd: >- - cmd: >-
cd c:\projects\zip cd c:\projects\zip

View File

@ -0,0 +1,55 @@
#!/bin/sh
# The MIT License (MIT)
#
# Copyright (c)
# 2013 Matthew Arsenault
# 2015-2016 RWTH Aachen University, Federal Republic of Germany
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This script is a wrapper for AddressSanitizer. In some special cases you need
# to preload AddressSanitizer to avoid error messages - e.g. if you're
# preloading another library to your application. At the moment this script will
# only do something, if we're running on a Linux platform. OSX might not be
# affected.
# Exit immediately, if platform is not Linux.
if [ "$(uname)" != "Linux" ]
then
exec $@
fi
# Get the used libasan of the application ($1). If a libasan was found, it will
# be prepended to LD_PRELOAD.
libasan=$(ldd $1 | grep libasan | sed "s/^[[:space:]]//" | cut -d' ' -f1)
if [ -n "$libasan" ]
then
if [ -n "$LD_PRELOAD" ]
then
export LD_PRELOAD="$libasan:$LD_PRELOAD"
else
export LD_PRELOAD="$libasan"
fi
fi
# Execute the application.
exec $@

View File

@ -0,0 +1,23 @@
# copied from https://gitlab.kitware.com/cmake/community/wikis/FAQ#can-i-do-make-uninstall-with-cmake
if(NOT EXISTS "@CMAKE_BINARY_DIR@/install_manifest.txt")
message(FATAL_ERROR "Cannot find install manifest: @CMAKE_BINARY_DIR@/install_manifest.txt")
endif(NOT EXISTS "@CMAKE_BINARY_DIR@/install_manifest.txt")
file(READ "@CMAKE_BINARY_DIR@/install_manifest.txt" files)
string(REGEX REPLACE "\n" ";" files "${files}")
foreach(file ${files})
message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
exec_program(
"@CMAKE_COMMAND@" ARGS "-E remove \"$ENV{DESTDIR}${file}\""
OUTPUT_VARIABLE rm_out
RETURN_VALUE rm_retval
)
if(NOT "${rm_retval}" STREQUAL 0)
message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
endif(NOT "${rm_retval}" STREQUAL 0)
else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
endif(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
endforeach(file)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -13,11 +13,24 @@
#define ZIP_H #define ZIP_H
#include <string.h> #include <string.h>
#include <sys/types.h>
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
#if !defined(_SSIZE_T_DEFINED) && !defined(_SSIZE_T_DEFINED_) && \
!defined(_SSIZE_T) && !defined(_SSIZE_T_) && !defined(__ssize_t_defined)
#define _SSIZE_T
// 64-bit Windows is the only mainstream platform
// where sizeof(long) != sizeof(void*)
#ifdef _WIN64
typedef long long ssize_t; /* byte count or error */
#else
typedef long ssize_t; /* byte count or error */
#endif
#endif
#ifndef MAX_PATH #ifndef MAX_PATH
#define MAX_PATH 32767 /* # chars in a path name including NULL */ #define MAX_PATH 32767 /* # chars in a path name including NULL */
#endif #endif
@ -47,7 +60,7 @@ struct zip_t;
extern struct zip_t *zip_open(const char *zipname, int level, char mode); extern struct zip_t *zip_open(const char *zipname, int level, char mode);
/* /*
Closes zip archive, releases resources - always finalize. Closes the zip archive, releases resources - always finalize.
Args: Args:
zip: zip archive handler. zip: zip archive handler.
@ -55,7 +68,10 @@ extern struct zip_t *zip_open(const char *zipname, int level, char mode);
extern void zip_close(struct zip_t *zip); extern void zip_close(struct zip_t *zip);
/* /*
Opens a new entry for writing in a zip archive. Opens an entry by name in the zip archive.
For zip archive opened in 'w' or 'a' mode the function will append
a new entry. In readonly mode the function tries to locate the entry
in global dictionary.
Args: Args:
zip: zip archive handler. zip: zip archive handler.
@ -66,6 +82,19 @@ extern void zip_close(struct zip_t *zip);
*/ */
extern int zip_entry_open(struct zip_t *zip, const char *entryname); extern int zip_entry_open(struct zip_t *zip, const char *entryname);
/*
Opens a new entry by index in the zip archive.
This function is only valid if zip archive was opened in 'r' (readonly) mode.
Args:
zip: zip archive handler.
index: index in local dictionary.
Returns:
The return code - 0 on success, negative number (< 0) on error.
*/
extern int zip_entry_openbyindex(struct zip_t *zip, int index);
/* /*
Closes a zip entry, flushes buffer and releases resources. Closes a zip entry, flushes buffer and releases resources.
@ -77,6 +106,67 @@ extern int zip_entry_open(struct zip_t *zip, const char *entryname);
*/ */
extern int zip_entry_close(struct zip_t *zip); extern int zip_entry_close(struct zip_t *zip);
/*
Returns a local name of the current zip entry.
The main difference between user's entry name and local entry name
is optional relative path.
Following .ZIP File Format Specification - the path stored MUST not contain
a drive or device letter, or a leading slash.
All slashes MUST be forward slashes '/' as opposed to backwards slashes '\'
for compatibility with Amiga and UNIX file systems etc.
Args:
zip: zip archive handler.
Returns:
The pointer to the current zip entry name, or NULL on error.
*/
extern const char *zip_entry_name(struct zip_t *zip);
/*
Returns an index of the current zip entry.
Args:
zip: zip archive handler.
Returns:
The index on success, negative number (< 0) on error.
*/
extern int zip_entry_index(struct zip_t *zip);
/*
Determines if the current zip entry is a directory entry.
Args:
zip: zip archive handler.
Returns:
The return code - 1 (true), 0 (false), negative number (< 0) on error.
*/
extern int zip_entry_isdir(struct zip_t *zip);
/*
Returns an uncompressed size of the current zip entry.
Args:
zip: zip archive handler.
Returns:
The uncompressed size in bytes.
*/
extern unsigned long long zip_entry_size(struct zip_t *zip);
/*
Returns CRC-32 checksum of the current zip entry.
Args:
zip: zip archive handler.
Returns:
The CRC-32 checksum.
*/
extern unsigned int zip_entry_crc32(struct zip_t *zip);
/* /*
Compresses an input buffer for the current zip entry. Compresses an input buffer for the current zip entry.
@ -116,9 +206,31 @@ extern int zip_entry_fwrite(struct zip_t *zip, const char *filename);
- for large entries, please take a look at zip_entry_extract function. - for large entries, please take a look at zip_entry_extract function.
Returns: Returns:
The return code - 0 on success, negative number (< 0) on error. The return code - the number of bytes actually read on success.
Otherwise a -1 on error.
*/ */
extern int zip_entry_read(struct zip_t *zip, void **buf, size_t *bufsize); extern ssize_t zip_entry_read(struct zip_t *zip, void **buf, size_t *bufsize);
/*
Extracts the current zip entry into a memory buffer using no memory
allocation.
Args:
zip: zip archive handler.
buf: preallocated output buffer.
bufsize: output buffer size (in bytes).
Note:
- ensure supplied output buffer is large enough.
- zip_entry_size function (returns uncompressed size for the current entry)
can be handy to estimate how big buffer is needed.
- for large entries, please take a look at zip_entry_extract function.
Returns:
The return code - the number of bytes actually read on success.
Otherwise a -1 on error (e.g. bufsize is not large enough).
*/
extern ssize_t zip_entry_noallocread(struct zip_t *zip, void *buf, size_t bufsize);
/* /*
Extracts the current zip entry into output file. Extracts the current zip entry into output file.
@ -133,9 +245,9 @@ extern int zip_entry_read(struct zip_t *zip, void **buf, size_t *bufsize);
extern int zip_entry_fread(struct zip_t *zip, const char *filename); extern int zip_entry_fread(struct zip_t *zip, const char *filename);
/* /*
Extract the current zip entry using a callback function (on_extract). Extracts the current zip entry using a callback function (on_extract).
Args: Args:
zip: zip archive handler. zip: zip archive handler.
on_extract: callback function. on_extract: callback function.
arg: opaque pointer (optional argument, arg: opaque pointer (optional argument,
@ -144,12 +256,23 @@ extern int zip_entry_fread(struct zip_t *zip, const char *filename);
Returns: Returns:
The return code - 0 on success, negative number (< 0) on error. The return code - 0 on success, negative number (< 0) on error.
*/ */
extern int zip_entry_extract(struct zip_t *zip, extern int
size_t (*on_extract)(void *arg, zip_entry_extract(struct zip_t *zip,
unsigned long long offset, size_t (*on_extract)(void *arg, unsigned long long offset,
const void *data, const void *data, size_t size),
size_t size), void *arg);
void *arg);
/*
Returns the number of all entries (files and directories) in the zip archive.
Args:
zip: zip archive handler.
Returns:
The return code - the number of entries on success,
negative number (< 0) on error.
*/
extern int zip_total_entries(struct zip_t *zip);
/* /*
Creates a new archive and puts files into a single zip archive. Creates a new archive and puts files into a single zip archive.

View File

@ -1,7 +1,19 @@
cmake_minimum_required(VERSION 2.8) cmake_minimum_required(VERSION 2.8)
if ("${CMAKE_C_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_C_COMPILER_ID}" STREQUAL "Clang" OR "${CMAKE_C_COMPILER_ID}" STREQUAL "AppleClang")
if(ENABLE_COVERAGE)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g ")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O0")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fprofile-arcs")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -ftest-coverage")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --coverage")
endif()
endif ()
# test # test
include_directories(../src) include_directories(../src)
add_executable(test.exe test.c ../src/zip.c) add_executable(test.exe test.c ../src/zip.c)
add_executable(test_miniz.exe test_miniz.c)
add_test(NAME test COMMAND test.exe) add_test(NAME test COMMAND test.exe)
add_test(NAME test_miniz COMMAND test_miniz.exe)

View File

@ -4,102 +4,456 @@
#include <stdio.h> #include <stdio.h>
#include <stdlib.h> #include <stdlib.h>
#include <string.h> #include <string.h>
#include <sys/stat.h>
#define ZIPNAME "test.zip" #if defined(_MSC_VER) || defined(__MINGW64__) || defined(__MINGW32__)
#define MZ_FILE_STAT_STRUCT _stat
#define MZ_FILE_STAT _stat
#else
#define MZ_FILE_STAT_STRUCT stat
#define MZ_FILE_STAT stat
#endif
#define ZIPNAME "test.zip\0"
#define TESTDATA1 "Some test data 1...\0" #define TESTDATA1 "Some test data 1...\0"
#define CRC32DATA1 2220805626
#define TESTDATA2 "Some test data 2...\0" #define TESTDATA2 "Some test data 2...\0"
#define CRC32DATA2 2532008468
#define RFILE "4.txt\0"
#define RMODE 0100444
#define WFILE "6.txt\0"
#define WMODE 0100666
#define XFILE "7.txt\0"
#define XMODE 0100777
#define UNUSED(x) (void)x
static int total_entries = 0;
static void test_write(void) { static void test_write(void) {
struct zip_t *zip = zip_open(ZIPNAME, ZIP_DEFAULT_COMPRESSION_LEVEL, 'w'); struct zip_t *zip = zip_open(ZIPNAME, ZIP_DEFAULT_COMPRESSION_LEVEL, 'w');
assert(zip != NULL); assert(zip != NULL);
assert(0 == zip_entry_open(zip, "test/test-1.txt")); assert(0 == zip_entry_open(zip, "test/test-1.txt"));
assert(0 == zip_entry_write(zip, TESTDATA1, strlen(TESTDATA1))); assert(0 == zip_entry_write(zip, TESTDATA1, strlen(TESTDATA1)));
assert(0 == zip_entry_close(zip)); assert(0 == strcmp(zip_entry_name(zip), "test/test-1.txt"));
assert(total_entries == zip_entry_index(zip));
assert(strlen(TESTDATA1) == zip_entry_size(zip));
assert(CRC32DATA1 == zip_entry_crc32(zip));
++total_entries;
assert(0 == zip_entry_close(zip));
zip_close(zip); zip_close(zip);
} }
static void test_append(void) { static void test_append(void) {
struct zip_t *zip = zip_open(ZIPNAME, ZIP_DEFAULT_COMPRESSION_LEVEL, 'a'); struct zip_t *zip = zip_open(ZIPNAME, ZIP_DEFAULT_COMPRESSION_LEVEL, 'a');
assert(zip != NULL); assert(zip != NULL);
assert(0 == zip_entry_open(zip, "test\\test-2.txt")); assert(0 == zip_entry_open(zip, "test\\test-2.txt"));
assert(0 == zip_entry_write(zip, TESTDATA2, strlen(TESTDATA2))); assert(0 == strcmp(zip_entry_name(zip), "test/test-2.txt"));
assert(0 == zip_entry_close(zip)); assert(total_entries == zip_entry_index(zip));
assert(0 == zip_entry_write(zip, TESTDATA2, strlen(TESTDATA2)));
assert(strlen(TESTDATA2) == zip_entry_size(zip));
assert(CRC32DATA2 == zip_entry_crc32(zip));
zip_close(zip); ++total_entries;
assert(0 == zip_entry_close(zip));
assert(0 == zip_entry_open(zip, "test\\empty/"));
assert(0 == strcmp(zip_entry_name(zip), "test/empty/"));
assert(0 == zip_entry_size(zip));
assert(0 == zip_entry_crc32(zip));
assert(total_entries == zip_entry_index(zip));
++total_entries;
assert(0 == zip_entry_close(zip));
assert(0 == zip_entry_open(zip, "empty/"));
assert(0 == strcmp(zip_entry_name(zip), "empty/"));
assert(0 == zip_entry_size(zip));
assert(0 == zip_entry_crc32(zip));
assert(total_entries == zip_entry_index(zip));
++total_entries;
assert(0 == zip_entry_close(zip));
zip_close(zip);
} }
static void test_read(void) { static void test_read(void) {
char *buf = NULL; char *buf = NULL;
size_t bufsize; ssize_t bufsize;
struct zip_t *zip = zip_open(ZIPNAME, 0, 'r'); size_t buftmp;
assert(zip != NULL); struct zip_t *zip = zip_open(ZIPNAME, 0, 'r');
assert(zip != NULL);
assert(0 == zip_entry_open(zip, "test\\test-1.txt")); assert(0 == zip_entry_open(zip, "test\\test-1.txt"));
assert(0 == zip_entry_read(zip, (void **)&buf, &bufsize)); assert(strlen(TESTDATA1) == zip_entry_size(zip));
assert(bufsize == strlen(TESTDATA1)); assert(CRC32DATA1 == zip_entry_crc32(zip));
assert(0 == strncmp(buf, TESTDATA1, bufsize));
assert(0 == zip_entry_close(zip));
free(buf);
buf = NULL;
bufsize = 0;
assert(0 == zip_entry_open(zip, "test/test-2.txt")); bufsize = zip_entry_read(zip, (void **)&buf, &buftmp);
assert(0 == zip_entry_read(zip, (void **)&buf, &bufsize)); assert(bufsize == strlen(TESTDATA1));
assert(bufsize == strlen(TESTDATA2)); assert((size_t)bufsize == buftmp);
assert(0 == strncmp(buf, TESTDATA2, bufsize)); assert(0 == strncmp(buf, TESTDATA1, bufsize));
assert(0 == zip_entry_close(zip)); assert(0 == zip_entry_close(zip));
free(buf); free(buf);
buf = NULL; buf = NULL;
bufsize = 0;
assert(0 == zip_entry_open(zip, "test/test-2.txt"));
assert(strlen(TESTDATA2) == zip_entry_size(zip));
assert(CRC32DATA2 == zip_entry_crc32(zip));
zip_close(zip); bufsize = zip_entry_read(zip, (void **)&buf, NULL);
assert((size_t)bufsize == strlen(TESTDATA2));
assert(0 == strncmp(buf, TESTDATA2, (size_t)bufsize));
assert(0 == zip_entry_close(zip));
free(buf);
buf = NULL;
bufsize = 0;
assert(0 == zip_entry_open(zip, "test\\empty/"));
assert(0 == strcmp(zip_entry_name(zip), "test/empty/"));
assert(0 == zip_entry_size(zip));
assert(0 == zip_entry_crc32(zip));
assert(0 == zip_entry_close(zip));
buftmp = strlen(TESTDATA2);
buf = calloc(buftmp, sizeof(char));
assert(0 == zip_entry_open(zip, "test/test-2.txt"));
bufsize = zip_entry_noallocread(zip, (void *)buf, buftmp);
assert(buftmp == (size_t)bufsize);
assert(0 == strncmp(buf, TESTDATA2, buftmp));
assert(0 == zip_entry_close(zip));
free(buf);
buf = NULL;
buftmp = strlen(TESTDATA1);
buf = calloc(buftmp, sizeof(char));
assert(0 == zip_entry_open(zip, "test/test-1.txt"));
bufsize = zip_entry_noallocread(zip, (void *)buf, buftmp);
assert(buftmp == (size_t)bufsize);
assert(0 == strncmp(buf, TESTDATA1, buftmp));
assert(0 == zip_entry_close(zip));
free(buf);
buf = NULL;
bufsize = 0;
zip_close(zip);
} }
struct buffer_t { struct buffer_t {
char *data; char *data;
size_t size; size_t size;
}; };
static size_t on_extract(void *arg, unsigned long long offset, const void *data, static size_t on_extract(void *arg, unsigned long long offset, const void *data,
size_t size) { size_t size) {
struct buffer_t *buf = (struct buffer_t *)arg; UNUSED(offset);
buf->data = realloc(buf->data, buf->size + size + 1);
assert(NULL != buf->data);
memcpy(&(buf->data[buf->size]), data, size); struct buffer_t *buf = (struct buffer_t *)arg;
buf->size += size; buf->data = realloc(buf->data, buf->size + size + 1);
buf->data[buf->size] = 0; assert(NULL != buf->data);
return size; memcpy(&(buf->data[buf->size]), data, size);
buf->size += size;
buf->data[buf->size] = 0;
return size;
} }
static void test_extract(void) { static void test_extract(void) {
struct buffer_t buf = {0}; struct buffer_t buf;
struct zip_t *zip = zip_open(ZIPNAME, 0, 'r'); struct zip_t *zip = zip_open(ZIPNAME, 0, 'r');
assert(zip != NULL); assert(zip != NULL);
memset((void *)&buf, 0, sizeof(struct buffer_t));
assert(0 == zip_entry_open(zip, "test/test-1.txt")); assert(0 == zip_entry_open(zip, "test/test-1.txt"));
assert(0 == zip_entry_extract(zip, on_extract, &buf)); assert(0 == zip_entry_extract(zip, on_extract, &buf));
assert(buf.size == strlen(TESTDATA1)); assert(buf.size == strlen(TESTDATA1));
assert(0 == strncmp(buf.data, TESTDATA1, buf.size)); assert(0 == strncmp(buf.data, TESTDATA1, buf.size));
assert(0 == zip_entry_close(zip));
free(buf.data);
buf.data = NULL;
buf.size = 0;
zip_close(zip);
}
static void test_total_entries(void) {
struct zip_t *zip = zip_open(ZIPNAME, 0, 'r');
assert(zip != NULL);
int n = zip_total_entries(zip);
zip_close(zip);
assert(n == total_entries);
}
static void test_entry_name(void) {
struct zip_t *zip = zip_open(ZIPNAME, 0, 'r');
assert(zip != NULL);
assert(zip_entry_name(zip) == NULL);
assert(0 == zip_entry_open(zip, "test\\test-1.txt"));
assert(NULL != zip_entry_name(zip));
assert(0 == strcmp(zip_entry_name(zip), "test/test-1.txt"));
assert(strlen(TESTDATA1) == zip_entry_size(zip));
assert(CRC32DATA1 == zip_entry_crc32(zip));
assert(0 == zip_entry_index(zip));
assert(0 == zip_entry_close(zip));
assert(0 == zip_entry_open(zip, "test/test-2.txt"));
assert(NULL != zip_entry_name(zip));
assert(0 == strcmp(zip_entry_name(zip), "test/test-2.txt"));
assert(strlen(TESTDATA2) == zip_entry_size(zip));
assert(CRC32DATA2 == zip_entry_crc32(zip));
assert(1 == zip_entry_index(zip));
assert(0 == zip_entry_close(zip));
zip_close(zip);
}
static void test_entry_index(void) {
struct zip_t *zip = zip_open(ZIPNAME, 0, 'r');
assert(zip != NULL);
assert(0 == zip_entry_open(zip, "test\\test-1.txt"));
assert(0 == zip_entry_index(zip));
assert(0 == strcmp(zip_entry_name(zip), "test/test-1.txt"));
assert(strlen(TESTDATA1) == zip_entry_size(zip));
assert(CRC32DATA1 == zip_entry_crc32(zip));
assert(0 == zip_entry_close(zip));
assert(0 == zip_entry_open(zip, "test/test-2.txt"));
assert(1 == zip_entry_index(zip));
assert(0 == strcmp(zip_entry_name(zip), "test/test-2.txt"));
assert(strlen(TESTDATA2) == zip_entry_size(zip));
assert(CRC32DATA2 == zip_entry_crc32(zip));
assert(0 == zip_entry_close(zip));
zip_close(zip);
}
static void test_entry_openbyindex(void) {
struct zip_t *zip = zip_open(ZIPNAME, 0, 'r');
assert(zip != NULL);
assert(0 == zip_entry_openbyindex(zip, 1));
assert(1 == zip_entry_index(zip));
assert(strlen(TESTDATA2) == zip_entry_size(zip));
assert(CRC32DATA2 == zip_entry_crc32(zip));
assert(0 == strcmp(zip_entry_name(zip), "test/test-2.txt"));
assert(0 == zip_entry_close(zip));
assert(0 == zip_entry_openbyindex(zip, 0));
assert(0 == zip_entry_index(zip));
assert(strlen(TESTDATA1) == zip_entry_size(zip));
assert(CRC32DATA1 == zip_entry_crc32(zip));
assert(0 == strcmp(zip_entry_name(zip), "test/test-1.txt"));
assert(0 == zip_entry_close(zip));
zip_close(zip);
}
static void test_list_entries(void) {
struct zip_t *zip = zip_open(ZIPNAME, 0, 'r');
assert(zip != NULL);
int i = 0, n = zip_total_entries(zip);
for (; i < n; ++i) {
assert(0 == zip_entry_openbyindex(zip, i));
fprintf(stdout, "[%d]: %s", i, zip_entry_name(zip));
if (zip_entry_isdir(zip)) {
fprintf(stdout, " (DIR)");
}
fprintf(stdout, "\n");
assert(0 == zip_entry_close(zip)); assert(0 == zip_entry_close(zip));
free(buf.data); }
buf.data = NULL;
buf.size = 0;
zip_close(zip); zip_close(zip);
}
static void test_fwrite(void) {
const char *filename = WFILE;
FILE *stream = NULL;
struct zip_t *zip = NULL;
#if defined(_MSC_VER)
if (0 != fopen_s(&stream, filename, "w+"))
#else
if (!(stream = fopen(filename, "w+")))
#endif
{
// Cannot open filename
fprintf(stdout, "Cannot open filename\n");
assert(0 == -1);
}
fwrite(TESTDATA1, sizeof(char), strlen(TESTDATA1), stream);
assert(0 == fclose(stream));
zip = zip_open(ZIPNAME, 9, 'w');
assert(zip != NULL);
assert(0 == zip_entry_open(zip, WFILE));
assert(0 == zip_entry_fwrite(zip, WFILE));
assert(0 == zip_entry_close(zip));
zip_close(zip);
remove(WFILE);
remove(ZIPNAME);
}
static void test_exe_permissions(void) {
#if defined(_WIN32) || defined(__WIN32__)
#else
struct MZ_FILE_STAT_STRUCT file_stats;
const char *filenames[] = {XFILE};
FILE *f = fopen(XFILE, "w");
fclose(f);
chmod(XFILE, XMODE);
remove(ZIPNAME);
assert(0 == zip_create(ZIPNAME, filenames, 1));
remove(XFILE);
assert(0 == zip_extract(ZIPNAME, ".", NULL, NULL));
assert(0 == MZ_FILE_STAT(XFILE, &file_stats));
assert(XMODE == file_stats.st_mode);
remove(XFILE);
remove(ZIPNAME);
#endif
}
static void test_read_permissions(void) {
#if defined(_MSC_VER)
#else
struct MZ_FILE_STAT_STRUCT file_stats;
const char *filenames[] = {RFILE};
FILE *f = fopen(RFILE, "w");
fclose(f);
chmod(RFILE, RMODE);
remove(ZIPNAME);
assert(0 == zip_create(ZIPNAME, filenames, 1));
// chmod from 444 to 666 to be able delete the file on windows
chmod(RFILE, WMODE);
remove(RFILE);
assert(0 == zip_extract(ZIPNAME, ".", NULL, NULL));
assert(0 == MZ_FILE_STAT(RFILE, &file_stats));
assert(RMODE == file_stats.st_mode);
chmod(RFILE, WMODE);
remove(RFILE);
remove(ZIPNAME);
#endif
}
static void test_write_permissions(void) {
#if defined(_MSC_VER)
#else
struct MZ_FILE_STAT_STRUCT file_stats;
const char *filenames[] = {WFILE};
FILE *f = fopen(WFILE, "w");
fclose(f);
chmod(WFILE, WMODE);
remove(ZIPNAME);
assert(0 == zip_create(ZIPNAME, filenames, 1));
remove(WFILE);
assert(0 == zip_extract(ZIPNAME, ".", NULL, NULL));
assert(0 == MZ_FILE_STAT(WFILE, &file_stats));
assert(WMODE == file_stats.st_mode);
remove(WFILE);
remove(ZIPNAME);
#endif
}
static void test_mtime(void) {
struct MZ_FILE_STAT_STRUCT file_stat1, file_stat2;
const char *filename = WFILE;
FILE *stream = NULL;
struct zip_t *zip = NULL;
#if defined(_MSC_VER)
if (0 != fopen_s(&stream, filename, "w+"))
#else
if (!(stream = fopen(filename, "w+")))
#endif
{
// Cannot open filename
fprintf(stdout, "Cannot open filename\n");
assert(0 == -1);
}
fwrite(TESTDATA1, sizeof(char), strlen(TESTDATA1), stream);
assert(0 == fclose(stream));
memset(&file_stat1, 0, sizeof(file_stat1));
memset(&file_stat2, 0, sizeof(file_stat2));
zip = zip_open(ZIPNAME, ZIP_DEFAULT_COMPRESSION_LEVEL, 'w');
assert(zip != NULL);
assert(0 == zip_entry_open(zip, filename));
assert(0 == zip_entry_fwrite(zip, filename));
assert(0 == zip_entry_close(zip));
zip_close(zip);
assert(0 == MZ_FILE_STAT(filename, &file_stat1));
remove(filename);
assert(0 == zip_extract(ZIPNAME, ".", NULL, NULL));
assert(0 == MZ_FILE_STAT(filename, &file_stat2));
fprintf(stdout, "file_stat1.st_mtime: %lu\n", file_stat1.st_mtime);
fprintf(stdout, "file_stat2.st_mtime: %lu\n", file_stat2.st_mtime);
assert(labs(file_stat1.st_mtime - file_stat2.st_mtime) <= 1);
remove(filename);
remove(ZIPNAME);
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
test_write(); UNUSED(argc);
test_append(); UNUSED(argv);
test_read();
test_extract();
return remove(ZIPNAME); remove(ZIPNAME);
test_write();
test_append();
test_read();
test_extract();
test_total_entries();
test_entry_name();
test_entry_index();
test_entry_openbyindex();
test_list_entries();
test_fwrite();
test_read_permissions();
test_write_permissions();
test_exe_permissions();
test_mtime();
remove(ZIPNAME);
return 0;
} }

View File

@ -0,0 +1,104 @@
// Demonstrates miniz.c's compress() and uncompress() functions
// (same as zlib's). Public domain, May 15 2011, Rich Geldreich,
// richgel99@gmail.com. See "unlicense" statement at the end of tinfl.c.
#include <miniz.h>
#include <stdio.h>
typedef unsigned char uint8;
typedef unsigned short uint16;
typedef unsigned int uint;
// The string to compress.
static const char *s_pStr =
"Good morning Dr. Chandra. This is Hal. I am ready for my first lesson."
"Good morning Dr. Chandra. This is Hal. I am ready for my first lesson."
"Good morning Dr. Chandra. This is Hal. I am ready for my first lesson."
"Good morning Dr. Chandra. This is Hal. I am ready for my first lesson."
"Good morning Dr. Chandra. This is Hal. I am ready for my first lesson."
"Good morning Dr. Chandra. This is Hal. I am ready for my first lesson."
"Good morning Dr. Chandra. This is Hal. I am ready for my first lesson.";
int main(int argc, char *argv[]) {
uint step = 0;
int cmp_status;
uLong src_len = (uLong)strlen(s_pStr);
uLong cmp_len = compressBound(src_len);
uLong uncomp_len = src_len;
uint8 *pCmp, *pUncomp;
uint total_succeeded = 0;
(void)argc, (void)argv;
printf("miniz.c version: %s\n", MZ_VERSION);
do {
// Allocate buffers to hold compressed and uncompressed data.
pCmp = (mz_uint8 *)malloc((size_t)cmp_len);
pUncomp = (mz_uint8 *)malloc((size_t)src_len);
if ((!pCmp) || (!pUncomp)) {
printf("Out of memory!\n");
return EXIT_FAILURE;
}
// Compress the string.
cmp_status =
compress(pCmp, &cmp_len, (const unsigned char *)s_pStr, src_len);
if (cmp_status != Z_OK) {
printf("compress() failed!\n");
free(pCmp);
free(pUncomp);
return EXIT_FAILURE;
}
printf("Compressed from %u to %u bytes\n", (mz_uint32)src_len,
(mz_uint32)cmp_len);
if (step) {
// Purposely corrupt the compressed data if fuzzy testing (this is a
// very crude fuzzy test).
uint n = 1 + (rand() % 3);
while (n--) {
uint i = rand() % cmp_len;
pCmp[i] ^= (rand() & 0xFF);
}
}
// Decompress.
cmp_status = uncompress(pUncomp, &uncomp_len, pCmp, cmp_len);
total_succeeded += (cmp_status == Z_OK);
if (step) {
printf("Simple fuzzy test: step %u total_succeeded: %u\n", step,
total_succeeded);
} else {
if (cmp_status != Z_OK) {
printf("uncompress failed!\n");
free(pCmp);
free(pUncomp);
return EXIT_FAILURE;
}
printf("Decompressed from %u to %u bytes\n", (mz_uint32)cmp_len,
(mz_uint32)uncomp_len);
// Ensure uncompress() returned the expected data.
if ((uncomp_len != src_len) ||
(memcmp(pUncomp, s_pStr, (size_t)src_len))) {
printf("Decompression failed!\n");
free(pCmp);
free(pUncomp);
return EXIT_FAILURE;
}
}
free(pCmp);
free(pUncomp);
step++;
// Keep on fuzzy testing if there's a non-empty command line.
} while (argc >= 2);
printf("Success.\n");
return EXIT_SUCCESS;
}

View File

@ -900,6 +900,7 @@ extern "C" {
#define AI_MATKEY_ENABLE_WIREFRAME "$mat.wireframe",0,0 #define AI_MATKEY_ENABLE_WIREFRAME "$mat.wireframe",0,0
#define AI_MATKEY_BLEND_FUNC "$mat.blend",0,0 #define AI_MATKEY_BLEND_FUNC "$mat.blend",0,0
#define AI_MATKEY_OPACITY "$mat.opacity",0,0 #define AI_MATKEY_OPACITY "$mat.opacity",0,0
#define AI_MATKEY_TRANSPARENCYFACTOR "$mat.transparencyfactor",0,0
#define AI_MATKEY_BUMPSCALING "$mat.bumpscaling",0,0 #define AI_MATKEY_BUMPSCALING "$mat.bumpscaling",0,0
#define AI_MATKEY_SHININESS "$mat.shininess",0,0 #define AI_MATKEY_SHININESS "$mat.shininess",0,0
#define AI_MATKEY_REFLECTIVITY "$mat.reflectivity",0,0 #define AI_MATKEY_REFLECTIVITY "$mat.reflectivity",0,0

File diff suppressed because it is too large Load Diff

View File

@ -21,7 +21,7 @@ FORMATS = ["CSM",
"STL", "STL",
"IRR", "IRR",
"Q3O", "Q3O",
"Q3D" "Q3D",
"MS3D", "MS3D",
"Q3S", "Q3S",
"ZGL", "ZGL",

View File

@ -1,280 +1,279 @@
#-*- coding: UTF-8 -*- #-*- coding: UTF-8 -*-
""" """
Some fancy helper functions. Some fancy helper functions.
""" """
import os import os
import ctypes import ctypes
from ctypes import POINTER import operator
import operator
from distutils.sysconfig import get_python_lib
from distutils.sysconfig import get_python_lib import re
import re import sys
import sys
try: import numpy
try: import numpy except ImportError: numpy = None
except: numpy = None
import logging;logger = logging.getLogger("pyassimp")
import logging;logger = logging.getLogger("pyassimp")
from .errors import AssimpError
from .errors import AssimpError
additional_dirs, ext_whitelist = [],[]
additional_dirs, ext_whitelist = [],[]
# populate search directories and lists of allowed file extensions
# populate search directories and lists of allowed file extensions # depending on the platform we're running on.
# depending on the platform we're running on. if os.name=='posix':
if os.name=='posix': additional_dirs.append('./')
additional_dirs.append('./') additional_dirs.append('/usr/lib/')
additional_dirs.append('/usr/lib/') additional_dirs.append('/usr/lib/x86_64-linux-gnu/')
additional_dirs.append('/usr/lib/x86_64-linux-gnu/') additional_dirs.append('/usr/local/lib/')
additional_dirs.append('/usr/local/lib/')
if 'LD_LIBRARY_PATH' in os.environ:
if 'LD_LIBRARY_PATH' in os.environ: additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item])
additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item])
# check if running from anaconda.
# check if running from anaconda. if "conda" or "continuum" in sys.version.lower():
if "conda" or "continuum" in sys.version.lower(): cur_path = get_python_lib()
cur_path = get_python_lib() pattern = re.compile('.*\/lib\/')
pattern = re.compile('.*\/lib\/') conda_lib = pattern.match(cur_path).group()
conda_lib = pattern.match(cur_path).group() logger.info("Adding Anaconda lib path:"+ conda_lib)
logger.info("Adding Anaconda lib path:"+ conda_lib) additional_dirs.append(conda_lib)
additional_dirs.append(conda_lib)
# note - this won't catch libassimp.so.N.n, but
# note - this won't catch libassimp.so.N.n, but # currently there's always a symlink called
# currently there's always a symlink called # libassimp.so in /usr/local/lib.
# libassimp.so in /usr/local/lib. ext_whitelist.append('.so')
ext_whitelist.append('.so') # libassimp.dylib in /usr/local/lib
# libassimp.dylib in /usr/local/lib ext_whitelist.append('.dylib')
ext_whitelist.append('.dylib')
elif os.name=='nt':
elif os.name=='nt': ext_whitelist.append('.dll')
ext_whitelist.append('.dll') path_dirs = os.environ['PATH'].split(';')
path_dirs = os.environ['PATH'].split(';') additional_dirs.extend(path_dirs)
additional_dirs.extend(path_dirs)
def vec2tuple(x):
def vec2tuple(x): """ Converts a VECTOR3D to a Tuple """
""" Converts a VECTOR3D to a Tuple """ return (x.x, x.y, x.z)
return (x.x, x.y, x.z)
def transform(vector3, matrix4x4):
def transform(vector3, matrix4x4): """ Apply a transformation matrix on a 3D vector.
""" Apply a transformation matrix on a 3D vector.
:param vector3: array with 3 elements
:param vector3: array with 3 elements :param matrix4x4: 4x4 matrix
:param matrix4x4: 4x4 matrix """
""" if numpy:
if numpy: return numpy.dot(matrix4x4, numpy.append(vector3, 1.))
return numpy.dot(matrix4x4, numpy.append(vector3, 1.)) else:
else: m0,m1,m2,m3 = matrix4x4; x,y,z = vector3
m0,m1,m2,m3 = matrix4x4; x,y,z = vector3 return [
return [ m0[0]*x + m0[1]*y + m0[2]*z + m0[3],
m0[0]*x + m0[1]*y + m0[2]*z + m0[3], m1[0]*x + m1[1]*y + m1[2]*z + m1[3],
m1[0]*x + m1[1]*y + m1[2]*z + m1[3], m2[0]*x + m2[1]*y + m2[2]*z + m2[3],
m2[0]*x + m2[1]*y + m2[2]*z + m2[3], m3[0]*x + m3[1]*y + m3[2]*z + m3[3]
m3[0]*x + m3[1]*y + m3[2]*z + m3[3] ]
]
def _inv(matrix4x4):
def _inv(matrix4x4): m0,m1,m2,m3 = matrix4x4
m0,m1,m2,m3 = matrix4x4
det = m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \
det = m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \ m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \
m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \ m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \
m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \ m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \
m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \ m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \
m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \ m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \
m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \ m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \
m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \ m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \
m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \ m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \
m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \ m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \
m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \ m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \
m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \ m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3]
m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3]
return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det,
return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det, ( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det,
( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det, ( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det,
( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det, ( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det],
( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det], [( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det,
[( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det, ( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det,
( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det, ( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det,
( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det, ( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det],
( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det], [( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det,
[( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det, ( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det,
( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det, ( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det,
( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det, ( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det],
( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det], [( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det,
[( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det, ( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det,
( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det, ( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det,
( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det, ( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]]
( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]]
def get_bounding_box(scene):
def get_bounding_box(scene): bb_min = [1e10, 1e10, 1e10] # x,y,z
bb_min = [1e10, 1e10, 1e10] # x,y,z bb_max = [-1e10, -1e10, -1e10] # x,y,z
bb_max = [-1e10, -1e10, -1e10] # x,y,z inv = numpy.linalg.inv if numpy else _inv
inv = numpy.linalg.inv if numpy else _inv return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation))
return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation))
def get_bounding_box_for_node(node, bb_min, bb_max, transformation):
def get_bounding_box_for_node(node, bb_min, bb_max, transformation):
if numpy:
if numpy: transformation = numpy.dot(transformation, node.transformation)
transformation = numpy.dot(transformation, node.transformation) else:
else: t0,t1,t2,t3 = transformation
t0,t1,t2,t3 = transformation T0,T1,T2,T3 = node.transformation
T0,T1,T2,T3 = node.transformation transformation = [ [
transformation = [ [ t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0],
t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0], t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1],
t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1], t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2],
t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2], t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3]
t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3] ],[
],[ t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0],
t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0], t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1],
t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1], t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2],
t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2], t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3]
t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3] ],[
],[ t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0],
t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0], t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1],
t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1], t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2],
t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2], t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3]
t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3] ],[
],[ t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0],
t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0], t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1],
t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1], t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2],
t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2], t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3]
t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3] ] ]
] ]
for mesh in node.meshes:
for mesh in node.meshes: for v in mesh.vertices:
for v in mesh.vertices: v = transform(v, transformation)
v = transform(v, transformation) bb_min[0] = min(bb_min[0], v[0])
bb_min[0] = min(bb_min[0], v[0]) bb_min[1] = min(bb_min[1], v[1])
bb_min[1] = min(bb_min[1], v[1]) bb_min[2] = min(bb_min[2], v[2])
bb_min[2] = min(bb_min[2], v[2]) bb_max[0] = max(bb_max[0], v[0])
bb_max[0] = max(bb_max[0], v[0]) bb_max[1] = max(bb_max[1], v[1])
bb_max[1] = max(bb_max[1], v[1]) bb_max[2] = max(bb_max[2], v[2])
bb_max[2] = max(bb_max[2], v[2])
for child in node.children:
for child in node.children: bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation)
bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation)
return bb_min, bb_max
return bb_min, bb_max
def try_load_functions(library_path, dll):
def try_load_functions(library_path, dll): '''
''' Try to bind to aiImportFile and aiReleaseImport
Try to bind to aiImportFile and aiReleaseImport
Arguments
Arguments ---------
--------- library_path: path to current lib
library_path: path to current lib dll: ctypes handle to library
dll: ctypes handle to library
Returns
Returns ---------
--------- If unsuccessful:
If unsuccessful: None
None If successful:
If successful: Tuple containing (library_path,
Tuple containing (library_path, load from filename function,
load from filename function, load from memory function,
load from memory function, export to filename function,
export to filename function, export to blob function,
export to blob function, release function,
release function, ctypes handle to assimp library)
ctypes handle to assimp library) '''
'''
try:
try: load = dll.aiImportFile
load = dll.aiImportFile release = dll.aiReleaseImport
release = dll.aiReleaseImport load_mem = dll.aiImportFileFromMemory
load_mem = dll.aiImportFileFromMemory export = dll.aiExportScene
export = dll.aiExportScene export2blob = dll.aiExportSceneToBlob
export2blob = dll.aiExportSceneToBlob except AttributeError:
except AttributeError: #OK, this is a library, but it doesn't have the functions we need
#OK, this is a library, but it doesn't have the functions we need return None
return None
# library found!
# library found! from .structs import Scene, ExportDataBlob
from .structs import Scene, ExportDataBlob load.restype = ctype.POINTER(Scene)
load.restype = POINTER(Scene) load_mem.restype = ctype.POINTER(Scene)
load_mem.restype = POINTER(Scene) export2blob.restype = ctype.POINTER(ExportDataBlob)
export2blob.restype = POINTER(ExportDataBlob) return (library_path, load, load_mem, export, export2blob, release, dll)
return (library_path, load, load_mem, export, export2blob, release, dll)
def search_library():
def search_library(): '''
''' Loads the assimp library.
Loads the assimp library. Throws exception AssimpError if no library_path is found
Throws exception AssimpError if no library_path is found
Returns: tuple, (load from filename function,
Returns: tuple, (load from filename function, load from memory function,
load from memory function, export to filename function,
export to filename function, export to blob function,
export to blob function, release function,
release function, dll)
dll) '''
''' #this path
#this path folder = os.path.dirname(__file__)
folder = os.path.dirname(__file__)
# silence 'DLL not found' message boxes on win
# silence 'DLL not found' message boxes on win try:
try: ctypes.windll.kernel32.SetErrorMode(0x8007)
ctypes.windll.kernel32.SetErrorMode(0x8007) except AttributeError:
except AttributeError: pass
pass
candidates = []
candidates = [] # test every file
# test every file for curfolder in [folder]+additional_dirs:
for curfolder in [folder]+additional_dirs: if os.path.isdir(curfolder):
if os.path.isdir(curfolder): for filename in os.listdir(curfolder):
for filename in os.listdir(curfolder): # our minimum requirement for candidates is that
# our minimum requirement for candidates is that # they should contain 'assimp' somewhere in
# they should contain 'assimp' somewhere in # their name
# their name if filename.lower().find('assimp')==-1 :
if filename.lower().find('assimp')==-1 : continue
continue is_out=1
is_out=1 for et in ext_whitelist:
for et in ext_whitelist: if et in filename.lower():
if et in filename.lower(): is_out=0
is_out=0 break
break if is_out:
if is_out: continue
continue
library_path = os.path.join(curfolder, filename)
library_path = os.path.join(curfolder, filename) logger.debug('Try ' + library_path)
logger.debug('Try ' + library_path) try:
try: dll = ctypes.cdll.LoadLibrary(library_path)
dll = ctypes.cdll.LoadLibrary(library_path) except Exception as e:
except Exception as e: logger.warning(str(e))
logger.warning(str(e)) # OK, this except is evil. But different OSs will throw different
# OK, this except is evil. But different OSs will throw different # errors. So just ignore any errors.
# errors. So just ignore any errors. continue
continue # see if the functions we need are in the dll
# see if the functions we need are in the dll loaded = try_load_functions(library_path, dll)
loaded = try_load_functions(library_path, dll) if loaded: candidates.append(loaded)
if loaded: candidates.append(loaded)
if not candidates:
if not candidates: # no library found
# no library found raise AssimpError("assimp library not found")
raise AssimpError("assimp library not found") else:
else: # get the newest library_path
# get the newest library_path candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates)
candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates) res = max(candidates, key=operator.itemgetter(0))[1]
res = max(candidates, key=operator.itemgetter(0))[1] logger.debug('Using assimp library located at ' + res[0])
logger.debug('Using assimp library located at ' + res[0])
# XXX: if there are 1000 dll/so files containing 'assimp'
# XXX: if there are 1000 dll/so files containing 'assimp' # in their name, do we have all of them in our address
# in their name, do we have all of them in our address # space now until gc kicks in?
# space now until gc kicks in?
# XXX: take version postfix of the .so on linux?
# XXX: take version postfix of the .so on linux? return res[1:]
return res[1:]
def hasattr_silent(object, name):
def hasattr_silent(object, name): """
""" Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2)
Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2) functionality of silently catching exceptions.
functionality of silently catching exceptions.
Returns the result of hasatter() or False if an exception was raised.
Returns the result of hasatter() or False if an exception was raised. """
"""
try:
try: return hasattr(object, name)
return hasattr(object, name) except AttributeError:
except: return False
return False

View File

@ -435,6 +435,7 @@ aiProcess_Debone = 0x4000000
aiProcess_GenEntityMeshes = 0x100000 aiProcess_GenEntityMeshes = 0x100000
aiProcess_OptimizeAnimations = 0x200000 aiProcess_OptimizeAnimations = 0x200000
aiProcess_FixTexturePaths = 0x200000 aiProcess_FixTexturePaths = 0x200000
aiProcess_EmbedTextures = 0x10000000,
## @def aiProcess_ConvertToLeftHanded ## @def aiProcess_ConvertToLeftHanded
# @brief Shortcut flag for Direct3D-based applications. # @brief Shortcut flag for Direct3D-based applications.

View File

@ -1,6 +1,6 @@
#-*- coding: UTF-8 -*- #-*- coding: UTF-8 -*-
from ctypes import POINTER, c_void_p, c_int, c_uint, c_char, c_float, Structure, c_char_p, c_double, c_ubyte, c_size_t, c_uint32 from ctypes import POINTER, c_void_p, c_uint, c_char, c_float, Structure, c_char_p, c_double, c_ubyte, c_size_t, c_uint32
class Vector2D(Structure): class Vector2D(Structure):
@ -70,7 +70,7 @@ class String(Structure):
See 'types.h' for details. See 'types.h' for details.
""" """
MAXLEN = 1024 MAXLEN = 1024
_fields_ = [ _fields_ = [
# Binary length of the string excluding the terminal 0. This is NOT the # Binary length of the string excluding the terminal 0. This is NOT the

View File

@ -24,12 +24,13 @@ This sample is based on several sources, including:
- ASSIMP's C++ SimpleOpenGL viewer - ASSIMP's C++ SimpleOpenGL viewer
""" """
import os, sys import sys
from OpenGL.GLUT import * from OpenGL.GLUT import *
from OpenGL.GLU import * from OpenGL.GLU import *
from OpenGL.GL import * from OpenGL.GL import *
import logging;logger = logging.getLogger("pyassimp_opengl") import logging
logger = logging.getLogger("pyassimp_opengl")
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
import math import math

View File

@ -5,7 +5,7 @@
This module demonstrates the functionality of PyAssimp. This module demonstrates the functionality of PyAssimp.
""" """
import os, sys import sys
import logging import logging
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
@ -50,8 +50,8 @@ def main(filename=None):
print(" colors:" + str(len(mesh.colors))) print(" colors:" + str(len(mesh.colors)))
tcs = mesh.texturecoords tcs = mesh.texturecoords
if tcs.any(): if tcs.any():
for index, tc in enumerate(tcs): for tc_index, tc in enumerate(tcs):
print(" texture-coords "+ str(index) + ":" + str(len(tcs[index])) + "first3:" + str(tcs[index][:3])) print(" texture-coords "+ str(tc_index) + ":" + str(len(tcs[tc_index])) + "first3:" + str(tcs[tc_index][:3]))
else: else:
print(" no texture coordinates") print(" no texture coordinates")

View File

@ -291,7 +291,9 @@ def main():
#s += "#endif\n" #s += "#endif\n"
output.write(templt.replace("<HERE>",s)) output.write(templt.replace("<HERE>",s))
# we got here, so no error
return 0
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(main()) sys.exit(main())

View File

@ -52,8 +52,8 @@ use_ifc_template = False
input_step_template_h = 'StepReaderGen.h.template' input_step_template_h = 'StepReaderGen.h.template'
input_step_template_cpp = 'StepReaderGen.cpp.template' input_step_template_cpp = 'StepReaderGen.cpp.template'
input_ifc_template_h = 'IFCReaderGen.h.template' input_ifc_template_h = 'IFCReaderGen.h.template'
input_ifc_template_cpp = 'IFCReaderGen.cpp.template' input_ifc_template_cpp = 'IFCReaderGen.cpp.template'
cpp_keywords = "class" cpp_keywords = "class"
@ -87,7 +87,7 @@ template_type = r"""
template_stub_decl = '\tDECL_CONV_STUB({type});\n' template_stub_decl = '\tDECL_CONV_STUB({type});\n'
template_schema = '\t\tSchemaEntry("{normalized_name}",&STEP::ObjectHelper<{type},{argcnt}>::Construct )\n' template_schema = '\t\tSchemaEntry("{normalized_name}",&STEP::ObjectHelper<{type},{argcnt}>::Construct )\n'
template_schema_type = '\t\tSchemaEntry("{normalized_name}",NULL )\n' template_schema_type = '\t\tSchemaEntry("{normalized_name}",nullptr )\n'
template_converter = r""" template_converter = r"""
// ----------------------------------------------------------------------------------------------------------- // -----------------------------------------------------------------------------------------------------------
template <> size_t GenericFill<{type}>(const DB& db, const LIST& params, {type}* in) template <> size_t GenericFill<{type}>(const DB& db, const LIST& params, {type}* in)
@ -99,7 +99,7 @@ template_converter_prologue_a = '\tsize_t base = GenericFill(db,params,static_ca
template_converter_prologue_b = '\tsize_t base = 0;\n' template_converter_prologue_b = '\tsize_t base = 0;\n'
template_converter_check_argcnt = '\tif (params.GetSize() < {max_arg}) {{ throw STEP::TypeError("expected {max_arg} arguments to {name}"); }}' template_converter_check_argcnt = '\tif (params.GetSize() < {max_arg}) {{ throw STEP::TypeError("expected {max_arg} arguments to {name}"); }}'
template_converter_code_per_field = r""" do {{ // convert the '{fieldname}' argument template_converter_code_per_field = r""" do {{ // convert the '{fieldname}' argument
boost::shared_ptr<const DataType> arg = params[base++];{handle_unset}{convert} std::shared_ptr<const DataType> arg = params[base++];{handle_unset}{convert}
}} while(0); }} while(0);
""" """
template_allow_optional = r""" template_allow_optional = r"""
@ -151,11 +151,8 @@ def handle_unset_args(field,entity,schema,argnum):
return n+template_allow_optional.format() return n+template_allow_optional.format()
def get_single_conversion(field,schema,argnum=0,classname='?'): def get_single_conversion(field,schema,argnum=0,classname='?'):
typen = field.type
name = field.name name = field.name
if field.collection: return template_convert_single.format(name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
typen = 'LIST'
return template_convert_single.format(type=typen,name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
def count_args_up(entity,schema): def count_args_up(entity,schema):
return len(entity.members) + (count_args_up(schema.entities[entity.parent],schema) if entity.parent else 0) return len(entity.members) + (count_args_up(schema.entities[entity.parent],schema) if entity.parent else 0)
@ -218,7 +215,7 @@ def get_derived(e,schema):
return res return res
def get_hierarchy(e,schema): def get_hierarchy(e,schema):
return get_derived(e.schema)+[e.name]+get_base_classes(e,schema) return get_derived(e, schema)+[e.name]+get_base_classes(e,schema)
def sort_entity_list(schema): def sort_entity_list(schema):
deps = [] deps = []
@ -300,5 +297,8 @@ def work(filename):
with open(output_file_cpp,'wt') as outp: with open(output_file_cpp,'wt') as outp:
outp.write(inp.read().replace('{schema-static-table}',schema_table).replace('{converter-impl}',converters)) outp.write(inp.read().replace('{schema-static-table}',schema_table).replace('{converter-impl}',converters))
# Finished without error, so return 0
return 0
if __name__ == "__main__": if __name__ == "__main__":
sys.exit(work(sys.argv[1] if len(sys.argv)>1 else 'schema.exp')) sys.exit(work(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))

View File

@ -43,7 +43,8 @@
"""Parse an EXPRESS file and extract basic information on all """Parse an EXPRESS file and extract basic information on all
entities and data types contained""" entities and data types contained"""
import sys, os, re import sys
import re
from collections import OrderedDict from collections import OrderedDict
re_match_entity = re.compile(r""" re_match_entity = re.compile(r"""

View File

@ -2,7 +2,7 @@
Open Asset Import Library (ASSIMP) Open Asset Import Library (ASSIMP)
---------------------------------------------------------------------- ----------------------------------------------------------------------
Copyright (c) 2006-2018, ASSIMP Development Team Copyright (c) 2006-2019, ASSIMP Development Team
All rights reserved. All rights reserved.
Redistribution and use of this software in source and binary forms, Redistribution and use of this software in source and binary forms,
@ -66,7 +66,7 @@ namespace STEP {
// ----------------------------------------------------------------------------------------------------------- // -----------------------------------------------------------------------------------------------------------
template <> size_t GenericFill<NotImplemented>(const STEP::DB& db, const LIST& params, NotImplemented* in) template <> size_t GenericFill<NotImplemented>(const STEP::DB& db, const LIST& params, NotImplemented* in)
{ {
return 0; return 0u;
} }

View File

@ -2,7 +2,7 @@
Open Asset Import Library (ASSIMP) Open Asset Import Library (ASSIMP)
---------------------------------------------------------------------- ----------------------------------------------------------------------
Copyright (c) 2006-2018, ASSIMP Development Team Copyright (c) 2006-2019, ASSIMP Development Team
All rights reserved. All rights reserved.
Redistribution and use of this software in source and binary forms, Redistribution and use of this software in source and binary forms,
@ -47,25 +47,23 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
namespace Assimp { namespace Assimp {
namespace StepFile { namespace StepFile {
using namespace STEP;
using namespace STEP::EXPRESS;
struct NotImplemented : public ObjectHelper<NotImplemented,0> {
};
// ****************************************************************************** using namespace STEP;
// StepFile Custom data types using namespace STEP::EXPRESS;
// ******************************************************************************
struct NotImplemented : public ObjectHelper<NotImplemented,0> {
};
// ******************************************************************************
// StepFile Custom data types
// ******************************************************************************
{types} {types}
// ******************************************************************************
// ****************************************************************************** // StepFile Entities
// StepFile Entities // ******************************************************************************
// ******************************************************************************
{predefs} {predefs}
{entities} {entities}
@ -73,11 +71,12 @@ namespace StepFile {
void GetSchema(EXPRESS::ConversionSchema& out); void GetSchema(EXPRESS::ConversionSchema& out);
} //! StepFile } //! StepFile
namespace STEP { namespace STEP {
// ****************************************************************************** // ******************************************************************************
// Converter stubs // Converter stubs
// ****************************************************************************** // ******************************************************************************
#define DECL_CONV_STUB(type) template <> size_t GenericFill<IFC::type>(const STEP::DB& db, const EXPRESS::LIST& params, IFC::type* in) #define DECL_CONV_STUB(type) template <> size_t GenericFill<IFC::type>(const STEP::DB& db, const EXPRESS::LIST& params, IFC::type* in)

View File

@ -228,7 +228,8 @@ int DoExport(const aiTexture* tx, FILE* p, const std::string& extension,
// Implementation of the assimp extract utility // Implementation of the assimp extract utility
int Assimp_Extract (const char* const* params, unsigned int num) int Assimp_Extract (const char* const* params, unsigned int num)
{ {
const char* const invalid = "assimp extract: Invalid number of arguments. See \'assimp extract --help\'\n"; const char* const invalid = "assimp extract: Invalid number of arguments. See \'assimp extract --help\'\n";
// assimp extract in out [options]
if (num < 1) { if (num < 1) {
printf(invalid); printf(invalid);
return 1; return 1;
@ -240,11 +241,7 @@ int Assimp_Extract (const char* const* params, unsigned int num)
return 0; return 0;
} }
// asssimp extract in out [options]
if (num < 1) {
printf(invalid);
return 1;
}
std::string in = std::string(params[0]); std::string in = std::string(params[0]);
std::string out = (num > 1 ? std::string(params[1]) : "-"); std::string out = (num > 1 ? std::string(params[1]) : "-");