Merge branch 'master' into issue_2390
commit
fa37018e08
|
@ -555,8 +555,6 @@ void WriteDump(const aiScene* scene, IOStream* io, bool shortened) {
|
||||||
mesh->mNormals[n].z);
|
mesh->mNormals[n].z);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
|
||||||
}
|
|
||||||
ioprintf(io,"\t\t</Normals>\n");
|
ioprintf(io,"\t\t</Normals>\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -416,7 +416,7 @@ template <> struct Structure :: _defaultInitializer<ErrorPolicy_Fail> {
|
||||||
void operator ()(T& /*out*/,const char* = "") {
|
void operator ()(T& /*out*/,const char* = "") {
|
||||||
// obviously, it is crucial that _DefaultInitializer is used
|
// obviously, it is crucial that _DefaultInitializer is used
|
||||||
// only from within a catch clause.
|
// only from within a catch clause.
|
||||||
throw;
|
throw DeadlyImportError("Constructing BlenderDNA Structure encountered an error");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -144,7 +144,7 @@ void COBImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
|
||||||
// check header
|
// check header
|
||||||
char head[32];
|
char head[32];
|
||||||
stream->CopyAndAdvance(head,32);
|
stream->CopyAndAdvance(head,32);
|
||||||
if (strncmp(head,"Caligari ",9)) {
|
if (strncmp(head,"Caligari ",9) != 0) {
|
||||||
ThrowException("Could not found magic id: `Caligari`");
|
ThrowException("Could not found magic id: `Caligari`");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -656,14 +656,14 @@ void COBImporter::ReadLght_Ascii(Scene& out, LineSplitter& splitter, const Chunk
|
||||||
ReadFloat3Tuple_Ascii(msh.color ,&rgb);
|
ReadFloat3Tuple_Ascii(msh.color ,&rgb);
|
||||||
|
|
||||||
SkipSpaces(&rgb);
|
SkipSpaces(&rgb);
|
||||||
if (strncmp(rgb,"cone angle",10)) {
|
if (strncmp(rgb,"cone angle",10) != 0) {
|
||||||
ASSIMP_LOG_WARN_F( "Expected `cone angle` entity in `color` line in `Lght` chunk ", nfo.id );
|
ASSIMP_LOG_WARN_F( "Expected `cone angle` entity in `color` line in `Lght` chunk ", nfo.id );
|
||||||
}
|
}
|
||||||
SkipSpaces(rgb+10,&rgb);
|
SkipSpaces(rgb+10,&rgb);
|
||||||
msh.angle = fast_atof(&rgb);
|
msh.angle = fast_atof(&rgb);
|
||||||
|
|
||||||
SkipSpaces(&rgb);
|
SkipSpaces(&rgb);
|
||||||
if (strncmp(rgb,"inner angle",11)) {
|
if (strncmp(rgb,"inner angle",11) != 0) {
|
||||||
ASSIMP_LOG_WARN_F( "Expected `inner angle` entity in `color` line in `Lght` chunk ", nfo.id);
|
ASSIMP_LOG_WARN_F( "Expected `inner angle` entity in `color` line in `Lght` chunk ", nfo.id);
|
||||||
}
|
}
|
||||||
SkipSpaces(rgb+11,&rgb);
|
SkipSpaces(rgb+11,&rgb);
|
||||||
|
@ -903,7 +903,7 @@ public:
|
||||||
if(nfo.size != static_cast<unsigned int>(-1)) {
|
if(nfo.size != static_cast<unsigned int>(-1)) {
|
||||||
try {
|
try {
|
||||||
reader.IncPtr( static_cast< int >( nfo.size ) - reader.GetCurrentPos() + cur );
|
reader.IncPtr( static_cast< int >( nfo.size ) - reader.GetCurrentPos() + cur );
|
||||||
} catch ( DeadlyImportError e ) {
|
} catch (const DeadlyImportError& e ) {
|
||||||
// out of limit so correct the value
|
// out of limit so correct the value
|
||||||
reader.IncPtr( reader.GetReadLimit() );
|
reader.IncPtr( reader.GetReadLimit() );
|
||||||
}
|
}
|
||||||
|
@ -1214,7 +1214,7 @@ void COBImporter::ReadGrou_Binary(COB::Scene& out, StreamReaderLE& reader, const
|
||||||
|
|
||||||
const chunk_guard cn(nfo,reader);
|
const chunk_guard cn(nfo,reader);
|
||||||
|
|
||||||
out.nodes.push_back(std::shared_ptr<Group>(new Group()));
|
out.nodes.push_back(std::make_shared<Group>());
|
||||||
Group& msh = (Group&)(*out.nodes.back().get());
|
Group& msh = (Group&)(*out.nodes.back().get());
|
||||||
msh = nfo;
|
msh = nfo;
|
||||||
|
|
||||||
|
|
|
@ -476,8 +476,11 @@ D3MFOpcPackage::D3MFOpcPackage(IOSystem* pIOHandler, const std::string& rFile)
|
||||||
mZipArchive->Close( fileStream );
|
mZipArchive->Close( fileStream );
|
||||||
|
|
||||||
} else if( file == D3MF::XmlTag::CONTENT_TYPES_ARCHIVE) {
|
} else if( file == D3MF::XmlTag::CONTENT_TYPES_ARCHIVE) {
|
||||||
|
ASSIMP_LOG_WARN_F("Ignored file of unsupported type CONTENT_TYPES_ARCHIVES",file);
|
||||||
|
} else {
|
||||||
|
ASSIMP_LOG_WARN_F("Ignored file of unknown type: ",file);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1717,22 +1717,22 @@ namespace Assimp {
|
||||||
if (!mesh)
|
if (!mesh)
|
||||||
{
|
{
|
||||||
for (const MeshMap::value_type& v : meshes_converted) {
|
for (const MeshMap::value_type& v : meshes_converted) {
|
||||||
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*> (v.first);
|
const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*> (v.first);
|
||||||
if (!mesh) {
|
if (!meshGeom) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const MatIndexArray& mats = mesh->GetMaterialIndices();
|
const MatIndexArray& mats = meshGeom->GetMaterialIndices();
|
||||||
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
int index = -1;
|
int index = -1;
|
||||||
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
||||||
if (mesh->GetTextureCoords(i).empty()) {
|
if (meshGeom->GetTextureCoords(i).empty()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const std::string& name = mesh->GetTextureCoordChannelName(i);
|
const std::string& name = meshGeom->GetTextureCoordChannelName(i);
|
||||||
if (name == uvSet) {
|
if (name == uvSet) {
|
||||||
index = static_cast<int>(i);
|
index = static_cast<int>(i);
|
||||||
break;
|
break;
|
||||||
|
@ -1840,22 +1840,22 @@ namespace Assimp {
|
||||||
if (!mesh)
|
if (!mesh)
|
||||||
{
|
{
|
||||||
for (const MeshMap::value_type& v : meshes_converted) {
|
for (const MeshMap::value_type& v : meshes_converted) {
|
||||||
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*> (v.first);
|
const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*> (v.first);
|
||||||
if (!mesh) {
|
if (!meshGeom) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const MatIndexArray& mats = mesh->GetMaterialIndices();
|
const MatIndexArray& mats = meshGeom->GetMaterialIndices();
|
||||||
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
int index = -1;
|
int index = -1;
|
||||||
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
||||||
if (mesh->GetTextureCoords(i).empty()) {
|
if (meshGeom->GetTextureCoords(i).empty()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const std::string& name = mesh->GetTextureCoordChannelName(i);
|
const std::string& name = meshGeom->GetTextureCoordChannelName(i);
|
||||||
if (name == uvSet) {
|
if (name == uvSet) {
|
||||||
index = static_cast<int>(i);
|
index = static_cast<int>(i);
|
||||||
break;
|
break;
|
||||||
|
@ -2196,22 +2196,22 @@ void FBXConverter::SetShadingPropertiesRaw(aiMaterial* out_mat, const PropertyTa
|
||||||
if (!mesh)
|
if (!mesh)
|
||||||
{
|
{
|
||||||
for (const MeshMap::value_type& v : meshes_converted) {
|
for (const MeshMap::value_type& v : meshes_converted) {
|
||||||
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*>(v.first);
|
const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*>(v.first);
|
||||||
if (!mesh) {
|
if (!meshGeom) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const MatIndexArray& mats = mesh->GetMaterialIndices();
|
const MatIndexArray& mats = meshGeom->GetMaterialIndices();
|
||||||
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
int index = -1;
|
int index = -1;
|
||||||
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
||||||
if (mesh->GetTextureCoords(i).empty()) {
|
if (meshGeom->GetTextureCoords(i).empty()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const std::string& name = mesh->GetTextureCoordChannelName(i);
|
const std::string& name = meshGeom->GetTextureCoordChannelName(i);
|
||||||
if (name == uvSet) {
|
if (name == uvSet) {
|
||||||
index = static_cast<int>(i);
|
index = static_cast<int>(i);
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -432,7 +432,7 @@ void FBX::Node::WritePropertyNodeAscii(
|
||||||
char buffer[32];
|
char buffer[32];
|
||||||
FBX::Node node(name);
|
FBX::Node node(name);
|
||||||
node.Begin(s, false, indent);
|
node.Begin(s, false, indent);
|
||||||
std::string vsize = std::to_string(v.size());
|
std::string vsize = to_string(v.size());
|
||||||
// *<size> {
|
// *<size> {
|
||||||
s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n");
|
s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n");
|
||||||
// indent + 1
|
// indent + 1
|
||||||
|
@ -468,7 +468,7 @@ void FBX::Node::WritePropertyNodeAscii(
|
||||||
char buffer[32];
|
char buffer[32];
|
||||||
FBX::Node node(name);
|
FBX::Node node(name);
|
||||||
node.Begin(s, false, indent);
|
node.Begin(s, false, indent);
|
||||||
std::string vsize = std::to_string(v.size());
|
std::string vsize = to_string(v.size());
|
||||||
// *<size> {
|
// *<size> {
|
||||||
s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n");
|
s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n");
|
||||||
// indent + 1
|
// indent + 1
|
||||||
|
|
|
@ -326,8 +326,11 @@ Video::Video(uint64_t id, const Element& element, const Document& doc, const std
|
||||||
content = new uint8_t[len];
|
content = new uint8_t[len];
|
||||||
::memcpy(content, data + 5, len);
|
::memcpy(content, data + 5, len);
|
||||||
}
|
}
|
||||||
} catch (runtime_error runtimeError) {
|
} catch (const runtime_error& runtimeError)
|
||||||
|
{
|
||||||
//we don't need the content data for contents that has already been loaded
|
//we don't need the content data for contents that has already been loaded
|
||||||
|
ASSIMP_LOG_DEBUG_F("Caught exception in FBXMaterial (likely because content was already loaded): ",
|
||||||
|
runtimeError.what());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -300,13 +300,10 @@ int ClampSpline(int idx, int size) {
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
inline void FindSuitableMultiple(int& angle)
|
inline void FindSuitableMultiple(int& angle)
|
||||||
{
|
{
|
||||||
if (angle < 3)angle = 3;
|
if (angle < 3) angle = 3;
|
||||||
else if (angle < 10) angle = 10;
|
else if (angle < 10) angle = 10;
|
||||||
else if (angle < 20) angle = 20;
|
else if (angle < 20) angle = 20;
|
||||||
else if (angle < 30) angle = 30;
|
else if (angle < 30) angle = 30;
|
||||||
else
|
|
||||||
{
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
|
@ -317,6 +314,8 @@ void IRRImporter::ComputeAnimations(Node* root, aiNode* real, std::vector<aiNode
|
||||||
// XXX totally WIP - doesn't produce proper results, need to evaluate
|
// XXX totally WIP - doesn't produce proper results, need to evaluate
|
||||||
// whether there's any use for Irrlicht's proprietary scene format
|
// whether there's any use for Irrlicht's proprietary scene format
|
||||||
// outside Irrlicht ...
|
// outside Irrlicht ...
|
||||||
|
// This also applies to the above function of FindSuitableMultiple and ClampSpline which are
|
||||||
|
// solely used in this function
|
||||||
|
|
||||||
if (root->animators.empty()) {
|
if (root->animators.empty()) {
|
||||||
return;
|
return;
|
||||||
|
@ -674,38 +673,38 @@ void IRRImporter::GenerateGraph(Node* root,aiNode* rootOut ,aiScene* scene,
|
||||||
// Get the loaded mesh from the scene and add it to
|
// Get the loaded mesh from the scene and add it to
|
||||||
// the list of all scenes to be attached to the
|
// the list of all scenes to be attached to the
|
||||||
// graph we're currently building
|
// graph we're currently building
|
||||||
aiScene* scene = batch.GetImport(root->id);
|
aiScene* localScene = batch.GetImport(root->id);
|
||||||
if (!scene) {
|
if (!localScene) {
|
||||||
ASSIMP_LOG_ERROR("IRR: Unable to load external file: " + root->meshPath);
|
ASSIMP_LOG_ERROR("IRR: Unable to load external file: " + root->meshPath);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
attach.push_back(AttachmentInfo(scene,rootOut));
|
attach.push_back(AttachmentInfo(localScene,rootOut));
|
||||||
|
|
||||||
// Now combine the material we've loaded for this mesh
|
// Now combine the material we've loaded for this mesh
|
||||||
// with the real materials we got from the file. As we
|
// with the real materials we got from the file. As we
|
||||||
// don't execute any pp-steps on the file, the numbers
|
// don't execute any pp-steps on the file, the numbers
|
||||||
// should be equal. If they are not, we can impossibly
|
// should be equal. If they are not, we can impossibly
|
||||||
// do this ...
|
// do this ...
|
||||||
if (root->materials.size() != (unsigned int)scene->mNumMaterials) {
|
if (root->materials.size() != (unsigned int)localScene->mNumMaterials) {
|
||||||
ASSIMP_LOG_WARN("IRR: Failed to match imported materials "
|
ASSIMP_LOG_WARN("IRR: Failed to match imported materials "
|
||||||
"with the materials found in the IRR scene file");
|
"with the materials found in the IRR scene file");
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
for (unsigned int i = 0; i < scene->mNumMaterials;++i) {
|
for (unsigned int i = 0; i < localScene->mNumMaterials;++i) {
|
||||||
// Delete the old material, we don't need it anymore
|
// Delete the old material, we don't need it anymore
|
||||||
delete scene->mMaterials[i];
|
delete localScene->mMaterials[i];
|
||||||
|
|
||||||
std::pair<aiMaterial*, unsigned int>& src = root->materials[i];
|
std::pair<aiMaterial*, unsigned int>& src = root->materials[i];
|
||||||
scene->mMaterials[i] = src.first;
|
localScene->mMaterials[i] = src.first;
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: Each mesh should have exactly one material assigned,
|
// NOTE: Each mesh should have exactly one material assigned,
|
||||||
// but we do it in a separate loop if this behaviour changes
|
// but we do it in a separate loop if this behaviour changes
|
||||||
// in future.
|
// in future.
|
||||||
for (unsigned int i = 0; i < scene->mNumMeshes;++i) {
|
for (unsigned int i = 0; i < localScene->mNumMeshes;++i) {
|
||||||
// Process material flags
|
// Process material flags
|
||||||
aiMesh* mesh = scene->mMeshes[i];
|
aiMesh* mesh = localScene->mMeshes[i];
|
||||||
|
|
||||||
|
|
||||||
// If "trans_vertex_alpha" mode is enabled, search all vertex colors
|
// If "trans_vertex_alpha" mode is enabled, search all vertex colors
|
||||||
|
|
|
@ -278,10 +278,10 @@ void STEP::ReadFile(DB& db,const EXPRESS::ConversionSchema& scheme,
|
||||||
std::transform( type.begin(), type.end(), type.begin(), &Assimp::ToLower<char> );
|
std::transform( type.begin(), type.end(), type.begin(), &Assimp::ToLower<char> );
|
||||||
const char* sz = scheme.GetStaticStringForToken(type);
|
const char* sz = scheme.GetStaticStringForToken(type);
|
||||||
if(sz) {
|
if(sz) {
|
||||||
const std::string::size_type len = n2-n1+1;
|
const std::string::size_type szLen = n2-n1+1;
|
||||||
char* const copysz = new char[len+1];
|
char* const copysz = new char[szLen+1];
|
||||||
std::copy(s.c_str()+n1,s.c_str()+n2+1,copysz);
|
std::copy(s.c_str()+n1,s.c_str()+n2+1,copysz);
|
||||||
copysz[len] = '\0';
|
copysz[szLen] = '\0';
|
||||||
db.InternInsert(new LazyObject(db,id,line,sz,copysz));
|
db.InternInsert(new LazyObject(db,id,line,sz,copysz));
|
||||||
}
|
}
|
||||||
if(!has_next) {
|
if(!has_next) {
|
||||||
|
|
|
@ -443,10 +443,10 @@ void MD5Importer::LoadMD5MeshFile ()
|
||||||
for (MD5::VertexList::const_iterator iter = meshSrc.mVertices.begin();iter != meshSrc.mVertices.end();++iter,++pv) {
|
for (MD5::VertexList::const_iterator iter = meshSrc.mVertices.begin();iter != meshSrc.mVertices.end();++iter,++pv) {
|
||||||
for (unsigned int jub = (*iter).mFirstWeight, w = jub; w < jub + (*iter).mNumWeights;++w)
|
for (unsigned int jub = (*iter).mFirstWeight, w = jub; w < jub + (*iter).mNumWeights;++w)
|
||||||
{
|
{
|
||||||
MD5::WeightDesc& desc = meshSrc.mWeights[w];
|
MD5::WeightDesc& weightDesc = meshSrc.mWeights[w];
|
||||||
/* FIX for some invalid exporters */
|
/* FIX for some invalid exporters */
|
||||||
if (!(desc.mWeight < AI_MD5_WEIGHT_EPSILON && desc.mWeight >= -AI_MD5_WEIGHT_EPSILON ))
|
if (!(weightDesc.mWeight < AI_MD5_WEIGHT_EPSILON && weightDesc.mWeight >= -AI_MD5_WEIGHT_EPSILON ))
|
||||||
++piCount[desc.mBone];
|
++piCount[weightDesc.mBone];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -493,20 +493,20 @@ void MD5Importer::LoadMD5MeshFile ()
|
||||||
if (w >= meshSrc.mWeights.size())
|
if (w >= meshSrc.mWeights.size())
|
||||||
throw DeadlyImportError("MD5MESH: Invalid weight index");
|
throw DeadlyImportError("MD5MESH: Invalid weight index");
|
||||||
|
|
||||||
MD5::WeightDesc& desc = meshSrc.mWeights[w];
|
MD5::WeightDesc& weightDesc = meshSrc.mWeights[w];
|
||||||
if ( desc.mWeight < AI_MD5_WEIGHT_EPSILON && desc.mWeight >= -AI_MD5_WEIGHT_EPSILON) {
|
if ( weightDesc.mWeight < AI_MD5_WEIGHT_EPSILON && weightDesc.mWeight >= -AI_MD5_WEIGHT_EPSILON) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const ai_real fNewWeight = desc.mWeight / fSum;
|
const ai_real fNewWeight = weightDesc.mWeight / fSum;
|
||||||
|
|
||||||
// transform the local position into worldspace
|
// transform the local position into worldspace
|
||||||
MD5::BoneDesc& boneSrc = meshParser.mJoints[desc.mBone];
|
MD5::BoneDesc& boneSrc = meshParser.mJoints[weightDesc.mBone];
|
||||||
const aiVector3D v = boneSrc.mRotationQuatConverted.Rotate (desc.vOffsetPosition);
|
const aiVector3D v = boneSrc.mRotationQuatConverted.Rotate (weightDesc.vOffsetPosition);
|
||||||
|
|
||||||
// use the original weight to compute the vertex position
|
// use the original weight to compute the vertex position
|
||||||
// (some MD5s seem to depend on the invalid weight values ...)
|
// (some MD5s seem to depend on the invalid weight values ...)
|
||||||
*pv += ((boneSrc.mPositionXYZ+v)* (ai_real)desc.mWeight);
|
*pv += ((boneSrc.mPositionXYZ+v)* (ai_real)weightDesc.mWeight);
|
||||||
|
|
||||||
aiBone* bone = mesh->mBones[boneSrc.mMap];
|
aiBone* bone = mesh->mBones[boneSrc.mMap];
|
||||||
*bone->mWeights++ = aiVertexWeight((unsigned int)(pv-mesh->mVertices),fNewWeight);
|
*bone->mWeights++ = aiVertexWeight((unsigned int)(pv-mesh->mVertices),fNewWeight);
|
||||||
|
|
|
@ -127,7 +127,7 @@ STLExporter::STLExporter(const char* _filename, const aiScene* pScene, bool expo
|
||||||
mOutput.write((char *)&meshnum, 4);
|
mOutput.write((char *)&meshnum, 4);
|
||||||
|
|
||||||
if (exportPointClouds) {
|
if (exportPointClouds) {
|
||||||
|
throw DeadlyExportError("This functionality is not yet implemented for binary output.");
|
||||||
}
|
}
|
||||||
|
|
||||||
for(unsigned int i = 0; i < pScene->mNumMeshes; ++i) {
|
for(unsigned int i = 0; i < pScene->mNumMeshes; ++i) {
|
||||||
|
|
|
@ -294,17 +294,17 @@ namespace glTF {
|
||||||
// filling object "compressedData"
|
// filling object "compressedData"
|
||||||
json_comp_data.SetObject();
|
json_comp_data.SetObject();
|
||||||
json_comp_data.AddMember("buffer", ptr_ext_comp->Buffer, w.mAl);
|
json_comp_data.AddMember("buffer", ptr_ext_comp->Buffer, w.mAl);
|
||||||
json_comp_data.AddMember("byteOffset", ptr_ext_comp->Offset, w.mAl);
|
json_comp_data.AddMember("byteOffset", static_cast<uint64_t>(ptr_ext_comp->Offset), w.mAl);
|
||||||
json_comp_data.AddMember("componentType", 5121, w.mAl);
|
json_comp_data.AddMember("componentType", 5121, w.mAl);
|
||||||
json_comp_data.AddMember("type", "SCALAR", w.mAl);
|
json_comp_data.AddMember("type", "SCALAR", w.mAl);
|
||||||
json_comp_data.AddMember("count", ptr_ext_comp->Count, w.mAl);
|
json_comp_data.AddMember("count", static_cast<uint64_t>(ptr_ext_comp->Count), w.mAl);
|
||||||
if(ptr_ext_comp->Binary)
|
if(ptr_ext_comp->Binary)
|
||||||
json_comp_data.AddMember("mode", "binary", w.mAl);
|
json_comp_data.AddMember("mode", "binary", w.mAl);
|
||||||
else
|
else
|
||||||
json_comp_data.AddMember("mode", "ascii", w.mAl);
|
json_comp_data.AddMember("mode", "ascii", w.mAl);
|
||||||
|
|
||||||
json_comp_data.AddMember("indicesCount", ptr_ext_comp->IndicesCount, w.mAl);
|
json_comp_data.AddMember("indicesCount", static_cast<uint64_t>(ptr_ext_comp->IndicesCount), w.mAl);
|
||||||
json_comp_data.AddMember("verticesCount", ptr_ext_comp->VerticesCount, w.mAl);
|
json_comp_data.AddMember("verticesCount", static_cast<uint64_t>(ptr_ext_comp->VerticesCount), w.mAl);
|
||||||
// filling object "Open3DGC-compression"
|
// filling object "Open3DGC-compression"
|
||||||
Value json_o3dgc;
|
Value json_o3dgc;
|
||||||
|
|
||||||
|
|
|
@ -245,7 +245,7 @@ inline Ref<Accessor> ExportData(Asset& a, std::string& meshName, Ref<Buffer>& bu
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
void GetMatScalar(const aiMaterial* mat, float& val, const char* propName, int type, int idx) {
|
void GetMatScalar(const aiMaterial* mat, float& val, const char* propName, int type, int idx) {
|
||||||
if (mat->Get(propName, type, idx, val) == AI_SUCCESS) {}
|
ai_assert(mat->Get(propName, type, idx, val) == AI_SUCCESS);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -21,7 +21,7 @@ FORMATS = ["CSM",
|
||||||
"STL",
|
"STL",
|
||||||
"IRR",
|
"IRR",
|
||||||
"Q3O",
|
"Q3O",
|
||||||
"Q3D"
|
"Q3D",
|
||||||
"MS3D",
|
"MS3D",
|
||||||
"Q3S",
|
"Q3S",
|
||||||
"ZGL",
|
"ZGL",
|
||||||
|
|
|
@ -1,280 +1,279 @@
|
||||||
#-*- coding: UTF-8 -*-
|
#-*- coding: UTF-8 -*-
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Some fancy helper functions.
|
Some fancy helper functions.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import ctypes
|
import ctypes
|
||||||
from ctypes import POINTER
|
import operator
|
||||||
import operator
|
|
||||||
|
from distutils.sysconfig import get_python_lib
|
||||||
from distutils.sysconfig import get_python_lib
|
import re
|
||||||
import re
|
import sys
|
||||||
import sys
|
|
||||||
|
try: import numpy
|
||||||
try: import numpy
|
except ImportError: numpy = None
|
||||||
except: numpy = None
|
|
||||||
|
import logging;logger = logging.getLogger("pyassimp")
|
||||||
import logging;logger = logging.getLogger("pyassimp")
|
|
||||||
|
from .errors import AssimpError
|
||||||
from .errors import AssimpError
|
|
||||||
|
additional_dirs, ext_whitelist = [],[]
|
||||||
additional_dirs, ext_whitelist = [],[]
|
|
||||||
|
# populate search directories and lists of allowed file extensions
|
||||||
# populate search directories and lists of allowed file extensions
|
# depending on the platform we're running on.
|
||||||
# depending on the platform we're running on.
|
if os.name=='posix':
|
||||||
if os.name=='posix':
|
additional_dirs.append('./')
|
||||||
additional_dirs.append('./')
|
additional_dirs.append('/usr/lib/')
|
||||||
additional_dirs.append('/usr/lib/')
|
additional_dirs.append('/usr/lib/x86_64-linux-gnu/')
|
||||||
additional_dirs.append('/usr/lib/x86_64-linux-gnu/')
|
additional_dirs.append('/usr/local/lib/')
|
||||||
additional_dirs.append('/usr/local/lib/')
|
|
||||||
|
if 'LD_LIBRARY_PATH' in os.environ:
|
||||||
if 'LD_LIBRARY_PATH' in os.environ:
|
additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item])
|
||||||
additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item])
|
|
||||||
|
# check if running from anaconda.
|
||||||
# check if running from anaconda.
|
if "conda" or "continuum" in sys.version.lower():
|
||||||
if "conda" or "continuum" in sys.version.lower():
|
cur_path = get_python_lib()
|
||||||
cur_path = get_python_lib()
|
pattern = re.compile('.*\/lib\/')
|
||||||
pattern = re.compile('.*\/lib\/')
|
conda_lib = pattern.match(cur_path).group()
|
||||||
conda_lib = pattern.match(cur_path).group()
|
logger.info("Adding Anaconda lib path:"+ conda_lib)
|
||||||
logger.info("Adding Anaconda lib path:"+ conda_lib)
|
additional_dirs.append(conda_lib)
|
||||||
additional_dirs.append(conda_lib)
|
|
||||||
|
# note - this won't catch libassimp.so.N.n, but
|
||||||
# note - this won't catch libassimp.so.N.n, but
|
# currently there's always a symlink called
|
||||||
# currently there's always a symlink called
|
# libassimp.so in /usr/local/lib.
|
||||||
# libassimp.so in /usr/local/lib.
|
ext_whitelist.append('.so')
|
||||||
ext_whitelist.append('.so')
|
# libassimp.dylib in /usr/local/lib
|
||||||
# libassimp.dylib in /usr/local/lib
|
ext_whitelist.append('.dylib')
|
||||||
ext_whitelist.append('.dylib')
|
|
||||||
|
elif os.name=='nt':
|
||||||
elif os.name=='nt':
|
ext_whitelist.append('.dll')
|
||||||
ext_whitelist.append('.dll')
|
path_dirs = os.environ['PATH'].split(';')
|
||||||
path_dirs = os.environ['PATH'].split(';')
|
additional_dirs.extend(path_dirs)
|
||||||
additional_dirs.extend(path_dirs)
|
|
||||||
|
def vec2tuple(x):
|
||||||
def vec2tuple(x):
|
""" Converts a VECTOR3D to a Tuple """
|
||||||
""" Converts a VECTOR3D to a Tuple """
|
return (x.x, x.y, x.z)
|
||||||
return (x.x, x.y, x.z)
|
|
||||||
|
def transform(vector3, matrix4x4):
|
||||||
def transform(vector3, matrix4x4):
|
""" Apply a transformation matrix on a 3D vector.
|
||||||
""" Apply a transformation matrix on a 3D vector.
|
|
||||||
|
:param vector3: array with 3 elements
|
||||||
:param vector3: array with 3 elements
|
:param matrix4x4: 4x4 matrix
|
||||||
:param matrix4x4: 4x4 matrix
|
"""
|
||||||
"""
|
if numpy:
|
||||||
if numpy:
|
return numpy.dot(matrix4x4, numpy.append(vector3, 1.))
|
||||||
return numpy.dot(matrix4x4, numpy.append(vector3, 1.))
|
else:
|
||||||
else:
|
m0,m1,m2,m3 = matrix4x4; x,y,z = vector3
|
||||||
m0,m1,m2,m3 = matrix4x4; x,y,z = vector3
|
return [
|
||||||
return [
|
m0[0]*x + m0[1]*y + m0[2]*z + m0[3],
|
||||||
m0[0]*x + m0[1]*y + m0[2]*z + m0[3],
|
m1[0]*x + m1[1]*y + m1[2]*z + m1[3],
|
||||||
m1[0]*x + m1[1]*y + m1[2]*z + m1[3],
|
m2[0]*x + m2[1]*y + m2[2]*z + m2[3],
|
||||||
m2[0]*x + m2[1]*y + m2[2]*z + m2[3],
|
m3[0]*x + m3[1]*y + m3[2]*z + m3[3]
|
||||||
m3[0]*x + m3[1]*y + m3[2]*z + m3[3]
|
]
|
||||||
]
|
|
||||||
|
def _inv(matrix4x4):
|
||||||
def _inv(matrix4x4):
|
m0,m1,m2,m3 = matrix4x4
|
||||||
m0,m1,m2,m3 = matrix4x4
|
|
||||||
|
det = m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \
|
||||||
det = m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \
|
m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \
|
||||||
m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \
|
m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \
|
||||||
m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \
|
m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \
|
||||||
m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \
|
m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \
|
||||||
m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \
|
m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \
|
||||||
m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \
|
m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \
|
||||||
m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \
|
m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \
|
||||||
m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \
|
m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \
|
||||||
m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \
|
m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \
|
||||||
m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \
|
m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \
|
||||||
m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \
|
m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3]
|
||||||
m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3]
|
|
||||||
|
return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det,
|
||||||
return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det,
|
( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det,
|
||||||
( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det,
|
( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det,
|
||||||
( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det,
|
( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det],
|
||||||
( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det],
|
[( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det,
|
||||||
[( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det,
|
( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det,
|
||||||
( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det,
|
( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det,
|
||||||
( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det,
|
( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det],
|
||||||
( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det],
|
[( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det,
|
||||||
[( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det,
|
( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det,
|
||||||
( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det,
|
( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det,
|
||||||
( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det,
|
( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det],
|
||||||
( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det],
|
[( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det,
|
||||||
[( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det,
|
( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det,
|
||||||
( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det,
|
( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det,
|
||||||
( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det,
|
( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]]
|
||||||
( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]]
|
|
||||||
|
def get_bounding_box(scene):
|
||||||
def get_bounding_box(scene):
|
bb_min = [1e10, 1e10, 1e10] # x,y,z
|
||||||
bb_min = [1e10, 1e10, 1e10] # x,y,z
|
bb_max = [-1e10, -1e10, -1e10] # x,y,z
|
||||||
bb_max = [-1e10, -1e10, -1e10] # x,y,z
|
inv = numpy.linalg.inv if numpy else _inv
|
||||||
inv = numpy.linalg.inv if numpy else _inv
|
return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation))
|
||||||
return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation))
|
|
||||||
|
def get_bounding_box_for_node(node, bb_min, bb_max, transformation):
|
||||||
def get_bounding_box_for_node(node, bb_min, bb_max, transformation):
|
|
||||||
|
if numpy:
|
||||||
if numpy:
|
transformation = numpy.dot(transformation, node.transformation)
|
||||||
transformation = numpy.dot(transformation, node.transformation)
|
else:
|
||||||
else:
|
t0,t1,t2,t3 = transformation
|
||||||
t0,t1,t2,t3 = transformation
|
T0,T1,T2,T3 = node.transformation
|
||||||
T0,T1,T2,T3 = node.transformation
|
transformation = [ [
|
||||||
transformation = [ [
|
t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0],
|
||||||
t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0],
|
t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1],
|
||||||
t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1],
|
t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2],
|
||||||
t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2],
|
t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3]
|
||||||
t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3]
|
],[
|
||||||
],[
|
t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0],
|
||||||
t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0],
|
t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1],
|
||||||
t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1],
|
t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2],
|
||||||
t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2],
|
t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3]
|
||||||
t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3]
|
],[
|
||||||
],[
|
t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0],
|
||||||
t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0],
|
t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1],
|
||||||
t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1],
|
t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2],
|
||||||
t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2],
|
t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3]
|
||||||
t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3]
|
],[
|
||||||
],[
|
t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0],
|
||||||
t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0],
|
t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1],
|
||||||
t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1],
|
t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2],
|
||||||
t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2],
|
t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3]
|
||||||
t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3]
|
] ]
|
||||||
] ]
|
|
||||||
|
for mesh in node.meshes:
|
||||||
for mesh in node.meshes:
|
for v in mesh.vertices:
|
||||||
for v in mesh.vertices:
|
v = transform(v, transformation)
|
||||||
v = transform(v, transformation)
|
bb_min[0] = min(bb_min[0], v[0])
|
||||||
bb_min[0] = min(bb_min[0], v[0])
|
bb_min[1] = min(bb_min[1], v[1])
|
||||||
bb_min[1] = min(bb_min[1], v[1])
|
bb_min[2] = min(bb_min[2], v[2])
|
||||||
bb_min[2] = min(bb_min[2], v[2])
|
bb_max[0] = max(bb_max[0], v[0])
|
||||||
bb_max[0] = max(bb_max[0], v[0])
|
bb_max[1] = max(bb_max[1], v[1])
|
||||||
bb_max[1] = max(bb_max[1], v[1])
|
bb_max[2] = max(bb_max[2], v[2])
|
||||||
bb_max[2] = max(bb_max[2], v[2])
|
|
||||||
|
|
||||||
|
for child in node.children:
|
||||||
for child in node.children:
|
bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation)
|
||||||
bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation)
|
|
||||||
|
return bb_min, bb_max
|
||||||
return bb_min, bb_max
|
|
||||||
|
def try_load_functions(library_path, dll):
|
||||||
def try_load_functions(library_path, dll):
|
'''
|
||||||
'''
|
Try to bind to aiImportFile and aiReleaseImport
|
||||||
Try to bind to aiImportFile and aiReleaseImport
|
|
||||||
|
Arguments
|
||||||
Arguments
|
---------
|
||||||
---------
|
library_path: path to current lib
|
||||||
library_path: path to current lib
|
dll: ctypes handle to library
|
||||||
dll: ctypes handle to library
|
|
||||||
|
Returns
|
||||||
Returns
|
---------
|
||||||
---------
|
If unsuccessful:
|
||||||
If unsuccessful:
|
None
|
||||||
None
|
If successful:
|
||||||
If successful:
|
Tuple containing (library_path,
|
||||||
Tuple containing (library_path,
|
load from filename function,
|
||||||
load from filename function,
|
load from memory function,
|
||||||
load from memory function,
|
export to filename function,
|
||||||
export to filename function,
|
export to blob function,
|
||||||
export to blob function,
|
release function,
|
||||||
release function,
|
ctypes handle to assimp library)
|
||||||
ctypes handle to assimp library)
|
'''
|
||||||
'''
|
|
||||||
|
try:
|
||||||
try:
|
load = dll.aiImportFile
|
||||||
load = dll.aiImportFile
|
release = dll.aiReleaseImport
|
||||||
release = dll.aiReleaseImport
|
load_mem = dll.aiImportFileFromMemory
|
||||||
load_mem = dll.aiImportFileFromMemory
|
export = dll.aiExportScene
|
||||||
export = dll.aiExportScene
|
export2blob = dll.aiExportSceneToBlob
|
||||||
export2blob = dll.aiExportSceneToBlob
|
except AttributeError:
|
||||||
except AttributeError:
|
#OK, this is a library, but it doesn't have the functions we need
|
||||||
#OK, this is a library, but it doesn't have the functions we need
|
return None
|
||||||
return None
|
|
||||||
|
# library found!
|
||||||
# library found!
|
from .structs import Scene, ExportDataBlob
|
||||||
from .structs import Scene, ExportDataBlob
|
load.restype = ctype.POINTER(Scene)
|
||||||
load.restype = POINTER(Scene)
|
load_mem.restype = ctype.POINTER(Scene)
|
||||||
load_mem.restype = POINTER(Scene)
|
export2blob.restype = ctype.POINTER(ExportDataBlob)
|
||||||
export2blob.restype = POINTER(ExportDataBlob)
|
return (library_path, load, load_mem, export, export2blob, release, dll)
|
||||||
return (library_path, load, load_mem, export, export2blob, release, dll)
|
|
||||||
|
def search_library():
|
||||||
def search_library():
|
'''
|
||||||
'''
|
Loads the assimp library.
|
||||||
Loads the assimp library.
|
Throws exception AssimpError if no library_path is found
|
||||||
Throws exception AssimpError if no library_path is found
|
|
||||||
|
Returns: tuple, (load from filename function,
|
||||||
Returns: tuple, (load from filename function,
|
load from memory function,
|
||||||
load from memory function,
|
export to filename function,
|
||||||
export to filename function,
|
export to blob function,
|
||||||
export to blob function,
|
release function,
|
||||||
release function,
|
dll)
|
||||||
dll)
|
'''
|
||||||
'''
|
#this path
|
||||||
#this path
|
folder = os.path.dirname(__file__)
|
||||||
folder = os.path.dirname(__file__)
|
|
||||||
|
# silence 'DLL not found' message boxes on win
|
||||||
# silence 'DLL not found' message boxes on win
|
try:
|
||||||
try:
|
ctypes.windll.kernel32.SetErrorMode(0x8007)
|
||||||
ctypes.windll.kernel32.SetErrorMode(0x8007)
|
except AttributeError:
|
||||||
except AttributeError:
|
pass
|
||||||
pass
|
|
||||||
|
candidates = []
|
||||||
candidates = []
|
# test every file
|
||||||
# test every file
|
for curfolder in [folder]+additional_dirs:
|
||||||
for curfolder in [folder]+additional_dirs:
|
if os.path.isdir(curfolder):
|
||||||
if os.path.isdir(curfolder):
|
for filename in os.listdir(curfolder):
|
||||||
for filename in os.listdir(curfolder):
|
# our minimum requirement for candidates is that
|
||||||
# our minimum requirement for candidates is that
|
# they should contain 'assimp' somewhere in
|
||||||
# they should contain 'assimp' somewhere in
|
# their name
|
||||||
# their name
|
if filename.lower().find('assimp')==-1 :
|
||||||
if filename.lower().find('assimp')==-1 :
|
continue
|
||||||
continue
|
is_out=1
|
||||||
is_out=1
|
for et in ext_whitelist:
|
||||||
for et in ext_whitelist:
|
if et in filename.lower():
|
||||||
if et in filename.lower():
|
is_out=0
|
||||||
is_out=0
|
break
|
||||||
break
|
if is_out:
|
||||||
if is_out:
|
continue
|
||||||
continue
|
|
||||||
|
library_path = os.path.join(curfolder, filename)
|
||||||
library_path = os.path.join(curfolder, filename)
|
logger.debug('Try ' + library_path)
|
||||||
logger.debug('Try ' + library_path)
|
try:
|
||||||
try:
|
dll = ctypes.cdll.LoadLibrary(library_path)
|
||||||
dll = ctypes.cdll.LoadLibrary(library_path)
|
except Exception as e:
|
||||||
except Exception as e:
|
logger.warning(str(e))
|
||||||
logger.warning(str(e))
|
# OK, this except is evil. But different OSs will throw different
|
||||||
# OK, this except is evil. But different OSs will throw different
|
# errors. So just ignore any errors.
|
||||||
# errors. So just ignore any errors.
|
continue
|
||||||
continue
|
# see if the functions we need are in the dll
|
||||||
# see if the functions we need are in the dll
|
loaded = try_load_functions(library_path, dll)
|
||||||
loaded = try_load_functions(library_path, dll)
|
if loaded: candidates.append(loaded)
|
||||||
if loaded: candidates.append(loaded)
|
|
||||||
|
if not candidates:
|
||||||
if not candidates:
|
# no library found
|
||||||
# no library found
|
raise AssimpError("assimp library not found")
|
||||||
raise AssimpError("assimp library not found")
|
else:
|
||||||
else:
|
# get the newest library_path
|
||||||
# get the newest library_path
|
candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates)
|
||||||
candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates)
|
res = max(candidates, key=operator.itemgetter(0))[1]
|
||||||
res = max(candidates, key=operator.itemgetter(0))[1]
|
logger.debug('Using assimp library located at ' + res[0])
|
||||||
logger.debug('Using assimp library located at ' + res[0])
|
|
||||||
|
# XXX: if there are 1000 dll/so files containing 'assimp'
|
||||||
# XXX: if there are 1000 dll/so files containing 'assimp'
|
# in their name, do we have all of them in our address
|
||||||
# in their name, do we have all of them in our address
|
# space now until gc kicks in?
|
||||||
# space now until gc kicks in?
|
|
||||||
|
# XXX: take version postfix of the .so on linux?
|
||||||
# XXX: take version postfix of the .so on linux?
|
return res[1:]
|
||||||
return res[1:]
|
|
||||||
|
def hasattr_silent(object, name):
|
||||||
def hasattr_silent(object, name):
|
"""
|
||||||
"""
|
Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2)
|
||||||
Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2)
|
functionality of silently catching exceptions.
|
||||||
functionality of silently catching exceptions.
|
|
||||||
|
Returns the result of hasatter() or False if an exception was raised.
|
||||||
Returns the result of hasatter() or False if an exception was raised.
|
"""
|
||||||
"""
|
|
||||||
|
try:
|
||||||
try:
|
return hasattr(object, name)
|
||||||
return hasattr(object, name)
|
except AttributeError:
|
||||||
except:
|
return False
|
||||||
return False
|
|
||||||
|
|
|
@ -435,6 +435,7 @@ aiProcess_Debone = 0x4000000
|
||||||
aiProcess_GenEntityMeshes = 0x100000
|
aiProcess_GenEntityMeshes = 0x100000
|
||||||
aiProcess_OptimizeAnimations = 0x200000
|
aiProcess_OptimizeAnimations = 0x200000
|
||||||
aiProcess_FixTexturePaths = 0x200000
|
aiProcess_FixTexturePaths = 0x200000
|
||||||
|
aiProcess_EmbedTextures = 0x10000000,
|
||||||
|
|
||||||
## @def aiProcess_ConvertToLeftHanded
|
## @def aiProcess_ConvertToLeftHanded
|
||||||
# @brief Shortcut flag for Direct3D-based applications.
|
# @brief Shortcut flag for Direct3D-based applications.
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#-*- coding: UTF-8 -*-
|
#-*- coding: UTF-8 -*-
|
||||||
|
|
||||||
from ctypes import POINTER, c_void_p, c_int, c_uint, c_char, c_float, Structure, c_char_p, c_double, c_ubyte, c_size_t, c_uint32
|
from ctypes import POINTER, c_void_p, c_uint, c_char, c_float, Structure, c_char_p, c_double, c_ubyte, c_size_t, c_uint32
|
||||||
|
|
||||||
|
|
||||||
class Vector2D(Structure):
|
class Vector2D(Structure):
|
||||||
|
@ -70,7 +70,7 @@ class String(Structure):
|
||||||
See 'types.h' for details.
|
See 'types.h' for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
MAXLEN = 1024
|
MAXLEN = 1024
|
||||||
|
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
# Binary length of the string excluding the terminal 0. This is NOT the
|
# Binary length of the string excluding the terminal 0. This is NOT the
|
||||||
|
|
|
@ -24,12 +24,13 @@ This sample is based on several sources, including:
|
||||||
- ASSIMP's C++ SimpleOpenGL viewer
|
- ASSIMP's C++ SimpleOpenGL viewer
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os, sys
|
import sys
|
||||||
from OpenGL.GLUT import *
|
from OpenGL.GLUT import *
|
||||||
from OpenGL.GLU import *
|
from OpenGL.GLU import *
|
||||||
from OpenGL.GL import *
|
from OpenGL.GL import *
|
||||||
|
|
||||||
import logging;logger = logging.getLogger("pyassimp_opengl")
|
import logging
|
||||||
|
logger = logging.getLogger("pyassimp_opengl")
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
import math
|
import math
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
This module demonstrates the functionality of PyAssimp.
|
This module demonstrates the functionality of PyAssimp.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os, sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
@ -50,8 +50,8 @@ def main(filename=None):
|
||||||
print(" colors:" + str(len(mesh.colors)))
|
print(" colors:" + str(len(mesh.colors)))
|
||||||
tcs = mesh.texturecoords
|
tcs = mesh.texturecoords
|
||||||
if tcs.any():
|
if tcs.any():
|
||||||
for index, tc in enumerate(tcs):
|
for tc_index, tc in enumerate(tcs):
|
||||||
print(" texture-coords "+ str(index) + ":" + str(len(tcs[index])) + "first3:" + str(tcs[index][:3]))
|
print(" texture-coords "+ str(tc_index) + ":" + str(len(tcs[tc_index])) + "first3:" + str(tcs[tc_index][:3]))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print(" no texture coordinates")
|
print(" no texture coordinates")
|
||||||
|
|
|
@ -291,7 +291,9 @@ def main():
|
||||||
#s += "#endif\n"
|
#s += "#endif\n"
|
||||||
|
|
||||||
output.write(templt.replace("<HERE>",s))
|
output.write(templt.replace("<HERE>",s))
|
||||||
|
|
||||||
|
# we got here, so no error
|
||||||
|
return 0
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|
|
@ -151,11 +151,8 @@ def handle_unset_args(field,entity,schema,argnum):
|
||||||
return n+template_allow_optional.format()
|
return n+template_allow_optional.format()
|
||||||
|
|
||||||
def get_single_conversion(field,schema,argnum=0,classname='?'):
|
def get_single_conversion(field,schema,argnum=0,classname='?'):
|
||||||
typen = field.type
|
|
||||||
name = field.name
|
name = field.name
|
||||||
if field.collection:
|
return template_convert_single.format(name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
|
||||||
typen = 'LIST'
|
|
||||||
return template_convert_single.format(type=typen,name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
|
|
||||||
|
|
||||||
def count_args_up(entity,schema):
|
def count_args_up(entity,schema):
|
||||||
return len(entity.members) + (count_args_up(schema.entities[entity.parent],schema) if entity.parent else 0)
|
return len(entity.members) + (count_args_up(schema.entities[entity.parent],schema) if entity.parent else 0)
|
||||||
|
@ -218,7 +215,7 @@ def get_derived(e,schema):
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def get_hierarchy(e,schema):
|
def get_hierarchy(e,schema):
|
||||||
return get_derived(e.schema)+[e.name]+get_base_classes(e,schema)
|
return get_derived(e, schema)+[e.name]+get_base_classes(e,schema)
|
||||||
|
|
||||||
def sort_entity_list(schema):
|
def sort_entity_list(schema):
|
||||||
deps = []
|
deps = []
|
||||||
|
@ -300,5 +297,8 @@ def work(filename):
|
||||||
with open(output_file_cpp,'wt') as outp:
|
with open(output_file_cpp,'wt') as outp:
|
||||||
outp.write(inp.read().replace('{schema-static-table}',schema_table).replace('{converter-impl}',converters))
|
outp.write(inp.read().replace('{schema-static-table}',schema_table).replace('{converter-impl}',converters))
|
||||||
|
|
||||||
|
# Finished without error, so return 0
|
||||||
|
return 0
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(work(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))
|
sys.exit(work(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))
|
||||||
|
|
|
@ -43,7 +43,8 @@
|
||||||
"""Parse an EXPRESS file and extract basic information on all
|
"""Parse an EXPRESS file and extract basic information on all
|
||||||
entities and data types contained"""
|
entities and data types contained"""
|
||||||
|
|
||||||
import sys, os, re
|
import sys
|
||||||
|
import re
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
re_match_entity = re.compile(r"""
|
re_match_entity = re.compile(r"""
|
||||||
|
|
|
@ -228,7 +228,8 @@ int DoExport(const aiTexture* tx, FILE* p, const std::string& extension,
|
||||||
// Implementation of the assimp extract utility
|
// Implementation of the assimp extract utility
|
||||||
int Assimp_Extract (const char* const* params, unsigned int num)
|
int Assimp_Extract (const char* const* params, unsigned int num)
|
||||||
{
|
{
|
||||||
const char* const invalid = "assimp extract: Invalid number of arguments. See \'assimp extract --help\'\n";
|
const char* const invalid = "assimp extract: Invalid number of arguments. See \'assimp extract --help\'\n";
|
||||||
|
// assimp extract in out [options]
|
||||||
if (num < 1) {
|
if (num < 1) {
|
||||||
printf(invalid);
|
printf(invalid);
|
||||||
return 1;
|
return 1;
|
||||||
|
@ -240,11 +241,7 @@ int Assimp_Extract (const char* const* params, unsigned int num)
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// asssimp extract in out [options]
|
|
||||||
if (num < 1) {
|
|
||||||
printf(invalid);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string in = std::string(params[0]);
|
std::string in = std::string(params[0]);
|
||||||
std::string out = (num > 1 ? std::string(params[1]) : "-");
|
std::string out = (num > 1 ? std::string(params[1]) : "-");
|
||||||
|
|
Loading…
Reference in New Issue