Merge branch 'master' into issue_2390
commit
fa37018e08
|
@ -555,8 +555,6 @@ void WriteDump(const aiScene* scene, IOStream* io, bool shortened) {
|
|||
mesh->mNormals[n].z);
|
||||
}
|
||||
}
|
||||
else {
|
||||
}
|
||||
ioprintf(io,"\t\t</Normals>\n");
|
||||
}
|
||||
|
||||
|
|
|
@ -416,7 +416,7 @@ template <> struct Structure :: _defaultInitializer<ErrorPolicy_Fail> {
|
|||
void operator ()(T& /*out*/,const char* = "") {
|
||||
// obviously, it is crucial that _DefaultInitializer is used
|
||||
// only from within a catch clause.
|
||||
throw;
|
||||
throw DeadlyImportError("Constructing BlenderDNA Structure encountered an error");
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -144,7 +144,7 @@ void COBImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
|
|||
// check header
|
||||
char head[32];
|
||||
stream->CopyAndAdvance(head,32);
|
||||
if (strncmp(head,"Caligari ",9)) {
|
||||
if (strncmp(head,"Caligari ",9) != 0) {
|
||||
ThrowException("Could not found magic id: `Caligari`");
|
||||
}
|
||||
|
||||
|
@ -656,14 +656,14 @@ void COBImporter::ReadLght_Ascii(Scene& out, LineSplitter& splitter, const Chunk
|
|||
ReadFloat3Tuple_Ascii(msh.color ,&rgb);
|
||||
|
||||
SkipSpaces(&rgb);
|
||||
if (strncmp(rgb,"cone angle",10)) {
|
||||
if (strncmp(rgb,"cone angle",10) != 0) {
|
||||
ASSIMP_LOG_WARN_F( "Expected `cone angle` entity in `color` line in `Lght` chunk ", nfo.id );
|
||||
}
|
||||
SkipSpaces(rgb+10,&rgb);
|
||||
msh.angle = fast_atof(&rgb);
|
||||
|
||||
SkipSpaces(&rgb);
|
||||
if (strncmp(rgb,"inner angle",11)) {
|
||||
if (strncmp(rgb,"inner angle",11) != 0) {
|
||||
ASSIMP_LOG_WARN_F( "Expected `inner angle` entity in `color` line in `Lght` chunk ", nfo.id);
|
||||
}
|
||||
SkipSpaces(rgb+11,&rgb);
|
||||
|
@ -903,7 +903,7 @@ public:
|
|||
if(nfo.size != static_cast<unsigned int>(-1)) {
|
||||
try {
|
||||
reader.IncPtr( static_cast< int >( nfo.size ) - reader.GetCurrentPos() + cur );
|
||||
} catch ( DeadlyImportError e ) {
|
||||
} catch (const DeadlyImportError& e ) {
|
||||
// out of limit so correct the value
|
||||
reader.IncPtr( reader.GetReadLimit() );
|
||||
}
|
||||
|
@ -1214,7 +1214,7 @@ void COBImporter::ReadGrou_Binary(COB::Scene& out, StreamReaderLE& reader, const
|
|||
|
||||
const chunk_guard cn(nfo,reader);
|
||||
|
||||
out.nodes.push_back(std::shared_ptr<Group>(new Group()));
|
||||
out.nodes.push_back(std::make_shared<Group>());
|
||||
Group& msh = (Group&)(*out.nodes.back().get());
|
||||
msh = nfo;
|
||||
|
||||
|
|
|
@ -476,8 +476,11 @@ D3MFOpcPackage::D3MFOpcPackage(IOSystem* pIOHandler, const std::string& rFile)
|
|||
mZipArchive->Close( fileStream );
|
||||
|
||||
} else if( file == D3MF::XmlTag::CONTENT_TYPES_ARCHIVE) {
|
||||
|
||||
ASSIMP_LOG_WARN_F("Ignored file of unsupported type CONTENT_TYPES_ARCHIVES",file);
|
||||
} else {
|
||||
ASSIMP_LOG_WARN_F("Ignored file of unknown type: ",file);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1717,22 +1717,22 @@ namespace Assimp {
|
|||
if (!mesh)
|
||||
{
|
||||
for (const MeshMap::value_type& v : meshes_converted) {
|
||||
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*> (v.first);
|
||||
if (!mesh) {
|
||||
const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*> (v.first);
|
||||
if (!meshGeom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const MatIndexArray& mats = mesh->GetMaterialIndices();
|
||||
const MatIndexArray& mats = meshGeom->GetMaterialIndices();
|
||||
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
int index = -1;
|
||||
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
||||
if (mesh->GetTextureCoords(i).empty()) {
|
||||
if (meshGeom->GetTextureCoords(i).empty()) {
|
||||
break;
|
||||
}
|
||||
const std::string& name = mesh->GetTextureCoordChannelName(i);
|
||||
const std::string& name = meshGeom->GetTextureCoordChannelName(i);
|
||||
if (name == uvSet) {
|
||||
index = static_cast<int>(i);
|
||||
break;
|
||||
|
@ -1840,22 +1840,22 @@ namespace Assimp {
|
|||
if (!mesh)
|
||||
{
|
||||
for (const MeshMap::value_type& v : meshes_converted) {
|
||||
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*> (v.first);
|
||||
if (!mesh) {
|
||||
const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*> (v.first);
|
||||
if (!meshGeom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const MatIndexArray& mats = mesh->GetMaterialIndices();
|
||||
const MatIndexArray& mats = meshGeom->GetMaterialIndices();
|
||||
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
int index = -1;
|
||||
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
||||
if (mesh->GetTextureCoords(i).empty()) {
|
||||
if (meshGeom->GetTextureCoords(i).empty()) {
|
||||
break;
|
||||
}
|
||||
const std::string& name = mesh->GetTextureCoordChannelName(i);
|
||||
const std::string& name = meshGeom->GetTextureCoordChannelName(i);
|
||||
if (name == uvSet) {
|
||||
index = static_cast<int>(i);
|
||||
break;
|
||||
|
@ -2196,22 +2196,22 @@ void FBXConverter::SetShadingPropertiesRaw(aiMaterial* out_mat, const PropertyTa
|
|||
if (!mesh)
|
||||
{
|
||||
for (const MeshMap::value_type& v : meshes_converted) {
|
||||
const MeshGeometry* const mesh = dynamic_cast<const MeshGeometry*>(v.first);
|
||||
if (!mesh) {
|
||||
const MeshGeometry* const meshGeom = dynamic_cast<const MeshGeometry*>(v.first);
|
||||
if (!meshGeom) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const MatIndexArray& mats = mesh->GetMaterialIndices();
|
||||
const MatIndexArray& mats = meshGeom->GetMaterialIndices();
|
||||
if (std::find(mats.begin(), mats.end(), matIndex) == mats.end()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
int index = -1;
|
||||
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS; ++i) {
|
||||
if (mesh->GetTextureCoords(i).empty()) {
|
||||
if (meshGeom->GetTextureCoords(i).empty()) {
|
||||
break;
|
||||
}
|
||||
const std::string& name = mesh->GetTextureCoordChannelName(i);
|
||||
const std::string& name = meshGeom->GetTextureCoordChannelName(i);
|
||||
if (name == uvSet) {
|
||||
index = static_cast<int>(i);
|
||||
break;
|
||||
|
|
|
@ -432,7 +432,7 @@ void FBX::Node::WritePropertyNodeAscii(
|
|||
char buffer[32];
|
||||
FBX::Node node(name);
|
||||
node.Begin(s, false, indent);
|
||||
std::string vsize = std::to_string(v.size());
|
||||
std::string vsize = to_string(v.size());
|
||||
// *<size> {
|
||||
s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n");
|
||||
// indent + 1
|
||||
|
@ -468,7 +468,7 @@ void FBX::Node::WritePropertyNodeAscii(
|
|||
char buffer[32];
|
||||
FBX::Node node(name);
|
||||
node.Begin(s, false, indent);
|
||||
std::string vsize = std::to_string(v.size());
|
||||
std::string vsize = to_string(v.size());
|
||||
// *<size> {
|
||||
s.PutChar('*'); s.PutString(vsize); s.PutString(" {\n");
|
||||
// indent + 1
|
||||
|
|
|
@ -326,8 +326,11 @@ Video::Video(uint64_t id, const Element& element, const Document& doc, const std
|
|||
content = new uint8_t[len];
|
||||
::memcpy(content, data + 5, len);
|
||||
}
|
||||
} catch (runtime_error runtimeError) {
|
||||
} catch (const runtime_error& runtimeError)
|
||||
{
|
||||
//we don't need the content data for contents that has already been loaded
|
||||
ASSIMP_LOG_DEBUG_F("Caught exception in FBXMaterial (likely because content was already loaded): ",
|
||||
runtimeError.what());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -300,13 +300,10 @@ int ClampSpline(int idx, int size) {
|
|||
// ------------------------------------------------------------------------------------------------
|
||||
inline void FindSuitableMultiple(int& angle)
|
||||
{
|
||||
if (angle < 3)angle = 3;
|
||||
if (angle < 3) angle = 3;
|
||||
else if (angle < 10) angle = 10;
|
||||
else if (angle < 20) angle = 20;
|
||||
else if (angle < 30) angle = 30;
|
||||
else
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
@ -317,6 +314,8 @@ void IRRImporter::ComputeAnimations(Node* root, aiNode* real, std::vector<aiNode
|
|||
// XXX totally WIP - doesn't produce proper results, need to evaluate
|
||||
// whether there's any use for Irrlicht's proprietary scene format
|
||||
// outside Irrlicht ...
|
||||
// This also applies to the above function of FindSuitableMultiple and ClampSpline which are
|
||||
// solely used in this function
|
||||
|
||||
if (root->animators.empty()) {
|
||||
return;
|
||||
|
@ -674,38 +673,38 @@ void IRRImporter::GenerateGraph(Node* root,aiNode* rootOut ,aiScene* scene,
|
|||
// Get the loaded mesh from the scene and add it to
|
||||
// the list of all scenes to be attached to the
|
||||
// graph we're currently building
|
||||
aiScene* scene = batch.GetImport(root->id);
|
||||
if (!scene) {
|
||||
aiScene* localScene = batch.GetImport(root->id);
|
||||
if (!localScene) {
|
||||
ASSIMP_LOG_ERROR("IRR: Unable to load external file: " + root->meshPath);
|
||||
break;
|
||||
}
|
||||
attach.push_back(AttachmentInfo(scene,rootOut));
|
||||
attach.push_back(AttachmentInfo(localScene,rootOut));
|
||||
|
||||
// Now combine the material we've loaded for this mesh
|
||||
// with the real materials we got from the file. As we
|
||||
// don't execute any pp-steps on the file, the numbers
|
||||
// should be equal. If they are not, we can impossibly
|
||||
// do this ...
|
||||
if (root->materials.size() != (unsigned int)scene->mNumMaterials) {
|
||||
if (root->materials.size() != (unsigned int)localScene->mNumMaterials) {
|
||||
ASSIMP_LOG_WARN("IRR: Failed to match imported materials "
|
||||
"with the materials found in the IRR scene file");
|
||||
|
||||
break;
|
||||
}
|
||||
for (unsigned int i = 0; i < scene->mNumMaterials;++i) {
|
||||
for (unsigned int i = 0; i < localScene->mNumMaterials;++i) {
|
||||
// Delete the old material, we don't need it anymore
|
||||
delete scene->mMaterials[i];
|
||||
delete localScene->mMaterials[i];
|
||||
|
||||
std::pair<aiMaterial*, unsigned int>& src = root->materials[i];
|
||||
scene->mMaterials[i] = src.first;
|
||||
localScene->mMaterials[i] = src.first;
|
||||
}
|
||||
|
||||
// NOTE: Each mesh should have exactly one material assigned,
|
||||
// but we do it in a separate loop if this behaviour changes
|
||||
// in future.
|
||||
for (unsigned int i = 0; i < scene->mNumMeshes;++i) {
|
||||
for (unsigned int i = 0; i < localScene->mNumMeshes;++i) {
|
||||
// Process material flags
|
||||
aiMesh* mesh = scene->mMeshes[i];
|
||||
aiMesh* mesh = localScene->mMeshes[i];
|
||||
|
||||
|
||||
// If "trans_vertex_alpha" mode is enabled, search all vertex colors
|
||||
|
|
|
@ -278,10 +278,10 @@ void STEP::ReadFile(DB& db,const EXPRESS::ConversionSchema& scheme,
|
|||
std::transform( type.begin(), type.end(), type.begin(), &Assimp::ToLower<char> );
|
||||
const char* sz = scheme.GetStaticStringForToken(type);
|
||||
if(sz) {
|
||||
const std::string::size_type len = n2-n1+1;
|
||||
char* const copysz = new char[len+1];
|
||||
const std::string::size_type szLen = n2-n1+1;
|
||||
char* const copysz = new char[szLen+1];
|
||||
std::copy(s.c_str()+n1,s.c_str()+n2+1,copysz);
|
||||
copysz[len] = '\0';
|
||||
copysz[szLen] = '\0';
|
||||
db.InternInsert(new LazyObject(db,id,line,sz,copysz));
|
||||
}
|
||||
if(!has_next) {
|
||||
|
|
|
@ -443,10 +443,10 @@ void MD5Importer::LoadMD5MeshFile ()
|
|||
for (MD5::VertexList::const_iterator iter = meshSrc.mVertices.begin();iter != meshSrc.mVertices.end();++iter,++pv) {
|
||||
for (unsigned int jub = (*iter).mFirstWeight, w = jub; w < jub + (*iter).mNumWeights;++w)
|
||||
{
|
||||
MD5::WeightDesc& desc = meshSrc.mWeights[w];
|
||||
MD5::WeightDesc& weightDesc = meshSrc.mWeights[w];
|
||||
/* FIX for some invalid exporters */
|
||||
if (!(desc.mWeight < AI_MD5_WEIGHT_EPSILON && desc.mWeight >= -AI_MD5_WEIGHT_EPSILON ))
|
||||
++piCount[desc.mBone];
|
||||
if (!(weightDesc.mWeight < AI_MD5_WEIGHT_EPSILON && weightDesc.mWeight >= -AI_MD5_WEIGHT_EPSILON ))
|
||||
++piCount[weightDesc.mBone];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -493,20 +493,20 @@ void MD5Importer::LoadMD5MeshFile ()
|
|||
if (w >= meshSrc.mWeights.size())
|
||||
throw DeadlyImportError("MD5MESH: Invalid weight index");
|
||||
|
||||
MD5::WeightDesc& desc = meshSrc.mWeights[w];
|
||||
if ( desc.mWeight < AI_MD5_WEIGHT_EPSILON && desc.mWeight >= -AI_MD5_WEIGHT_EPSILON) {
|
||||
MD5::WeightDesc& weightDesc = meshSrc.mWeights[w];
|
||||
if ( weightDesc.mWeight < AI_MD5_WEIGHT_EPSILON && weightDesc.mWeight >= -AI_MD5_WEIGHT_EPSILON) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const ai_real fNewWeight = desc.mWeight / fSum;
|
||||
const ai_real fNewWeight = weightDesc.mWeight / fSum;
|
||||
|
||||
// transform the local position into worldspace
|
||||
MD5::BoneDesc& boneSrc = meshParser.mJoints[desc.mBone];
|
||||
const aiVector3D v = boneSrc.mRotationQuatConverted.Rotate (desc.vOffsetPosition);
|
||||
MD5::BoneDesc& boneSrc = meshParser.mJoints[weightDesc.mBone];
|
||||
const aiVector3D v = boneSrc.mRotationQuatConverted.Rotate (weightDesc.vOffsetPosition);
|
||||
|
||||
// use the original weight to compute the vertex position
|
||||
// (some MD5s seem to depend on the invalid weight values ...)
|
||||
*pv += ((boneSrc.mPositionXYZ+v)* (ai_real)desc.mWeight);
|
||||
*pv += ((boneSrc.mPositionXYZ+v)* (ai_real)weightDesc.mWeight);
|
||||
|
||||
aiBone* bone = mesh->mBones[boneSrc.mMap];
|
||||
*bone->mWeights++ = aiVertexWeight((unsigned int)(pv-mesh->mVertices),fNewWeight);
|
||||
|
|
|
@ -127,7 +127,7 @@ STLExporter::STLExporter(const char* _filename, const aiScene* pScene, bool expo
|
|||
mOutput.write((char *)&meshnum, 4);
|
||||
|
||||
if (exportPointClouds) {
|
||||
|
||||
throw DeadlyExportError("This functionality is not yet implemented for binary output.");
|
||||
}
|
||||
|
||||
for(unsigned int i = 0; i < pScene->mNumMeshes; ++i) {
|
||||
|
|
|
@ -294,17 +294,17 @@ namespace glTF {
|
|||
// filling object "compressedData"
|
||||
json_comp_data.SetObject();
|
||||
json_comp_data.AddMember("buffer", ptr_ext_comp->Buffer, w.mAl);
|
||||
json_comp_data.AddMember("byteOffset", ptr_ext_comp->Offset, w.mAl);
|
||||
json_comp_data.AddMember("byteOffset", static_cast<uint64_t>(ptr_ext_comp->Offset), w.mAl);
|
||||
json_comp_data.AddMember("componentType", 5121, w.mAl);
|
||||
json_comp_data.AddMember("type", "SCALAR", w.mAl);
|
||||
json_comp_data.AddMember("count", ptr_ext_comp->Count, w.mAl);
|
||||
json_comp_data.AddMember("count", static_cast<uint64_t>(ptr_ext_comp->Count), w.mAl);
|
||||
if(ptr_ext_comp->Binary)
|
||||
json_comp_data.AddMember("mode", "binary", w.mAl);
|
||||
else
|
||||
json_comp_data.AddMember("mode", "ascii", w.mAl);
|
||||
|
||||
json_comp_data.AddMember("indicesCount", ptr_ext_comp->IndicesCount, w.mAl);
|
||||
json_comp_data.AddMember("verticesCount", ptr_ext_comp->VerticesCount, w.mAl);
|
||||
json_comp_data.AddMember("indicesCount", static_cast<uint64_t>(ptr_ext_comp->IndicesCount), w.mAl);
|
||||
json_comp_data.AddMember("verticesCount", static_cast<uint64_t>(ptr_ext_comp->VerticesCount), w.mAl);
|
||||
// filling object "Open3DGC-compression"
|
||||
Value json_o3dgc;
|
||||
|
||||
|
|
|
@ -245,7 +245,7 @@ inline Ref<Accessor> ExportData(Asset& a, std::string& meshName, Ref<Buffer>& bu
|
|||
|
||||
namespace {
|
||||
void GetMatScalar(const aiMaterial* mat, float& val, const char* propName, int type, int idx) {
|
||||
if (mat->Get(propName, type, idx, val) == AI_SUCCESS) {}
|
||||
ai_assert(mat->Get(propName, type, idx, val) == AI_SUCCESS);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -21,7 +21,7 @@ FORMATS = ["CSM",
|
|||
"STL",
|
||||
"IRR",
|
||||
"Q3O",
|
||||
"Q3D"
|
||||
"Q3D",
|
||||
"MS3D",
|
||||
"Q3S",
|
||||
"ZGL",
|
||||
|
|
|
@ -1,280 +1,279 @@
|
|||
#-*- coding: UTF-8 -*-
|
||||
|
||||
"""
|
||||
Some fancy helper functions.
|
||||
"""
|
||||
|
||||
import os
|
||||
import ctypes
|
||||
from ctypes import POINTER
|
||||
import operator
|
||||
|
||||
from distutils.sysconfig import get_python_lib
|
||||
import re
|
||||
import sys
|
||||
|
||||
try: import numpy
|
||||
except: numpy = None
|
||||
|
||||
import logging;logger = logging.getLogger("pyassimp")
|
||||
|
||||
from .errors import AssimpError
|
||||
|
||||
additional_dirs, ext_whitelist = [],[]
|
||||
|
||||
# populate search directories and lists of allowed file extensions
|
||||
# depending on the platform we're running on.
|
||||
if os.name=='posix':
|
||||
additional_dirs.append('./')
|
||||
additional_dirs.append('/usr/lib/')
|
||||
additional_dirs.append('/usr/lib/x86_64-linux-gnu/')
|
||||
additional_dirs.append('/usr/local/lib/')
|
||||
|
||||
if 'LD_LIBRARY_PATH' in os.environ:
|
||||
additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item])
|
||||
|
||||
# check if running from anaconda.
|
||||
if "conda" or "continuum" in sys.version.lower():
|
||||
cur_path = get_python_lib()
|
||||
pattern = re.compile('.*\/lib\/')
|
||||
conda_lib = pattern.match(cur_path).group()
|
||||
logger.info("Adding Anaconda lib path:"+ conda_lib)
|
||||
additional_dirs.append(conda_lib)
|
||||
|
||||
# note - this won't catch libassimp.so.N.n, but
|
||||
# currently there's always a symlink called
|
||||
# libassimp.so in /usr/local/lib.
|
||||
ext_whitelist.append('.so')
|
||||
# libassimp.dylib in /usr/local/lib
|
||||
ext_whitelist.append('.dylib')
|
||||
|
||||
elif os.name=='nt':
|
||||
ext_whitelist.append('.dll')
|
||||
path_dirs = os.environ['PATH'].split(';')
|
||||
additional_dirs.extend(path_dirs)
|
||||
|
||||
def vec2tuple(x):
|
||||
""" Converts a VECTOR3D to a Tuple """
|
||||
return (x.x, x.y, x.z)
|
||||
|
||||
def transform(vector3, matrix4x4):
|
||||
""" Apply a transformation matrix on a 3D vector.
|
||||
|
||||
:param vector3: array with 3 elements
|
||||
:param matrix4x4: 4x4 matrix
|
||||
"""
|
||||
if numpy:
|
||||
return numpy.dot(matrix4x4, numpy.append(vector3, 1.))
|
||||
else:
|
||||
m0,m1,m2,m3 = matrix4x4; x,y,z = vector3
|
||||
return [
|
||||
m0[0]*x + m0[1]*y + m0[2]*z + m0[3],
|
||||
m1[0]*x + m1[1]*y + m1[2]*z + m1[3],
|
||||
m2[0]*x + m2[1]*y + m2[2]*z + m2[3],
|
||||
m3[0]*x + m3[1]*y + m3[2]*z + m3[3]
|
||||
]
|
||||
|
||||
def _inv(matrix4x4):
|
||||
m0,m1,m2,m3 = matrix4x4
|
||||
|
||||
det = m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \
|
||||
m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \
|
||||
m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \
|
||||
m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \
|
||||
m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \
|
||||
m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \
|
||||
m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \
|
||||
m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \
|
||||
m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \
|
||||
m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \
|
||||
m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \
|
||||
m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3]
|
||||
|
||||
return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det,
|
||||
( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det,
|
||||
( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det,
|
||||
( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det],
|
||||
[( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det,
|
||||
( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det,
|
||||
( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det,
|
||||
( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det],
|
||||
[( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det,
|
||||
( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det,
|
||||
( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det,
|
||||
( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det],
|
||||
[( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det,
|
||||
( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det,
|
||||
( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det,
|
||||
( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]]
|
||||
|
||||
def get_bounding_box(scene):
|
||||
bb_min = [1e10, 1e10, 1e10] # x,y,z
|
||||
bb_max = [-1e10, -1e10, -1e10] # x,y,z
|
||||
inv = numpy.linalg.inv if numpy else _inv
|
||||
return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation))
|
||||
|
||||
def get_bounding_box_for_node(node, bb_min, bb_max, transformation):
|
||||
|
||||
if numpy:
|
||||
transformation = numpy.dot(transformation, node.transformation)
|
||||
else:
|
||||
t0,t1,t2,t3 = transformation
|
||||
T0,T1,T2,T3 = node.transformation
|
||||
transformation = [ [
|
||||
t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0],
|
||||
t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1],
|
||||
t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2],
|
||||
t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3]
|
||||
],[
|
||||
t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0],
|
||||
t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1],
|
||||
t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2],
|
||||
t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3]
|
||||
],[
|
||||
t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0],
|
||||
t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1],
|
||||
t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2],
|
||||
t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3]
|
||||
],[
|
||||
t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0],
|
||||
t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1],
|
||||
t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2],
|
||||
t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3]
|
||||
] ]
|
||||
|
||||
for mesh in node.meshes:
|
||||
for v in mesh.vertices:
|
||||
v = transform(v, transformation)
|
||||
bb_min[0] = min(bb_min[0], v[0])
|
||||
bb_min[1] = min(bb_min[1], v[1])
|
||||
bb_min[2] = min(bb_min[2], v[2])
|
||||
bb_max[0] = max(bb_max[0], v[0])
|
||||
bb_max[1] = max(bb_max[1], v[1])
|
||||
bb_max[2] = max(bb_max[2], v[2])
|
||||
|
||||
|
||||
for child in node.children:
|
||||
bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation)
|
||||
|
||||
return bb_min, bb_max
|
||||
|
||||
def try_load_functions(library_path, dll):
|
||||
'''
|
||||
Try to bind to aiImportFile and aiReleaseImport
|
||||
|
||||
Arguments
|
||||
---------
|
||||
library_path: path to current lib
|
||||
dll: ctypes handle to library
|
||||
|
||||
Returns
|
||||
---------
|
||||
If unsuccessful:
|
||||
None
|
||||
If successful:
|
||||
Tuple containing (library_path,
|
||||
load from filename function,
|
||||
load from memory function,
|
||||
export to filename function,
|
||||
export to blob function,
|
||||
release function,
|
||||
ctypes handle to assimp library)
|
||||
'''
|
||||
|
||||
try:
|
||||
load = dll.aiImportFile
|
||||
release = dll.aiReleaseImport
|
||||
load_mem = dll.aiImportFileFromMemory
|
||||
export = dll.aiExportScene
|
||||
export2blob = dll.aiExportSceneToBlob
|
||||
except AttributeError:
|
||||
#OK, this is a library, but it doesn't have the functions we need
|
||||
return None
|
||||
|
||||
# library found!
|
||||
from .structs import Scene, ExportDataBlob
|
||||
load.restype = POINTER(Scene)
|
||||
load_mem.restype = POINTER(Scene)
|
||||
export2blob.restype = POINTER(ExportDataBlob)
|
||||
return (library_path, load, load_mem, export, export2blob, release, dll)
|
||||
|
||||
def search_library():
|
||||
'''
|
||||
Loads the assimp library.
|
||||
Throws exception AssimpError if no library_path is found
|
||||
|
||||
Returns: tuple, (load from filename function,
|
||||
load from memory function,
|
||||
export to filename function,
|
||||
export to blob function,
|
||||
release function,
|
||||
dll)
|
||||
'''
|
||||
#this path
|
||||
folder = os.path.dirname(__file__)
|
||||
|
||||
# silence 'DLL not found' message boxes on win
|
||||
try:
|
||||
ctypes.windll.kernel32.SetErrorMode(0x8007)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
candidates = []
|
||||
# test every file
|
||||
for curfolder in [folder]+additional_dirs:
|
||||
if os.path.isdir(curfolder):
|
||||
for filename in os.listdir(curfolder):
|
||||
# our minimum requirement for candidates is that
|
||||
# they should contain 'assimp' somewhere in
|
||||
# their name
|
||||
if filename.lower().find('assimp')==-1 :
|
||||
continue
|
||||
is_out=1
|
||||
for et in ext_whitelist:
|
||||
if et in filename.lower():
|
||||
is_out=0
|
||||
break
|
||||
if is_out:
|
||||
continue
|
||||
|
||||
library_path = os.path.join(curfolder, filename)
|
||||
logger.debug('Try ' + library_path)
|
||||
try:
|
||||
dll = ctypes.cdll.LoadLibrary(library_path)
|
||||
except Exception as e:
|
||||
logger.warning(str(e))
|
||||
# OK, this except is evil. But different OSs will throw different
|
||||
# errors. So just ignore any errors.
|
||||
continue
|
||||
# see if the functions we need are in the dll
|
||||
loaded = try_load_functions(library_path, dll)
|
||||
if loaded: candidates.append(loaded)
|
||||
|
||||
if not candidates:
|
||||
# no library found
|
||||
raise AssimpError("assimp library not found")
|
||||
else:
|
||||
# get the newest library_path
|
||||
candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates)
|
||||
res = max(candidates, key=operator.itemgetter(0))[1]
|
||||
logger.debug('Using assimp library located at ' + res[0])
|
||||
|
||||
# XXX: if there are 1000 dll/so files containing 'assimp'
|
||||
# in their name, do we have all of them in our address
|
||||
# space now until gc kicks in?
|
||||
|
||||
# XXX: take version postfix of the .so on linux?
|
||||
return res[1:]
|
||||
|
||||
def hasattr_silent(object, name):
|
||||
"""
|
||||
Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2)
|
||||
functionality of silently catching exceptions.
|
||||
|
||||
Returns the result of hasatter() or False if an exception was raised.
|
||||
"""
|
||||
|
||||
try:
|
||||
return hasattr(object, name)
|
||||
except:
|
||||
return False
|
||||
#-*- coding: UTF-8 -*-
|
||||
|
||||
"""
|
||||
Some fancy helper functions.
|
||||
"""
|
||||
|
||||
import os
|
||||
import ctypes
|
||||
import operator
|
||||
|
||||
from distutils.sysconfig import get_python_lib
|
||||
import re
|
||||
import sys
|
||||
|
||||
try: import numpy
|
||||
except ImportError: numpy = None
|
||||
|
||||
import logging;logger = logging.getLogger("pyassimp")
|
||||
|
||||
from .errors import AssimpError
|
||||
|
||||
additional_dirs, ext_whitelist = [],[]
|
||||
|
||||
# populate search directories and lists of allowed file extensions
|
||||
# depending on the platform we're running on.
|
||||
if os.name=='posix':
|
||||
additional_dirs.append('./')
|
||||
additional_dirs.append('/usr/lib/')
|
||||
additional_dirs.append('/usr/lib/x86_64-linux-gnu/')
|
||||
additional_dirs.append('/usr/local/lib/')
|
||||
|
||||
if 'LD_LIBRARY_PATH' in os.environ:
|
||||
additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item])
|
||||
|
||||
# check if running from anaconda.
|
||||
if "conda" or "continuum" in sys.version.lower():
|
||||
cur_path = get_python_lib()
|
||||
pattern = re.compile('.*\/lib\/')
|
||||
conda_lib = pattern.match(cur_path).group()
|
||||
logger.info("Adding Anaconda lib path:"+ conda_lib)
|
||||
additional_dirs.append(conda_lib)
|
||||
|
||||
# note - this won't catch libassimp.so.N.n, but
|
||||
# currently there's always a symlink called
|
||||
# libassimp.so in /usr/local/lib.
|
||||
ext_whitelist.append('.so')
|
||||
# libassimp.dylib in /usr/local/lib
|
||||
ext_whitelist.append('.dylib')
|
||||
|
||||
elif os.name=='nt':
|
||||
ext_whitelist.append('.dll')
|
||||
path_dirs = os.environ['PATH'].split(';')
|
||||
additional_dirs.extend(path_dirs)
|
||||
|
||||
def vec2tuple(x):
|
||||
""" Converts a VECTOR3D to a Tuple """
|
||||
return (x.x, x.y, x.z)
|
||||
|
||||
def transform(vector3, matrix4x4):
|
||||
""" Apply a transformation matrix on a 3D vector.
|
||||
|
||||
:param vector3: array with 3 elements
|
||||
:param matrix4x4: 4x4 matrix
|
||||
"""
|
||||
if numpy:
|
||||
return numpy.dot(matrix4x4, numpy.append(vector3, 1.))
|
||||
else:
|
||||
m0,m1,m2,m3 = matrix4x4; x,y,z = vector3
|
||||
return [
|
||||
m0[0]*x + m0[1]*y + m0[2]*z + m0[3],
|
||||
m1[0]*x + m1[1]*y + m1[2]*z + m1[3],
|
||||
m2[0]*x + m2[1]*y + m2[2]*z + m2[3],
|
||||
m3[0]*x + m3[1]*y + m3[2]*z + m3[3]
|
||||
]
|
||||
|
||||
def _inv(matrix4x4):
|
||||
m0,m1,m2,m3 = matrix4x4
|
||||
|
||||
det = m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \
|
||||
m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \
|
||||
m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \
|
||||
m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \
|
||||
m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \
|
||||
m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \
|
||||
m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \
|
||||
m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \
|
||||
m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \
|
||||
m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \
|
||||
m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \
|
||||
m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3]
|
||||
|
||||
return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det,
|
||||
( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det,
|
||||
( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det,
|
||||
( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det],
|
||||
[( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det,
|
||||
( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det,
|
||||
( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det,
|
||||
( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det],
|
||||
[( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det,
|
||||
( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det,
|
||||
( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det,
|
||||
( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det],
|
||||
[( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det,
|
||||
( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det,
|
||||
( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det,
|
||||
( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]]
|
||||
|
||||
def get_bounding_box(scene):
|
||||
bb_min = [1e10, 1e10, 1e10] # x,y,z
|
||||
bb_max = [-1e10, -1e10, -1e10] # x,y,z
|
||||
inv = numpy.linalg.inv if numpy else _inv
|
||||
return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation))
|
||||
|
||||
def get_bounding_box_for_node(node, bb_min, bb_max, transformation):
|
||||
|
||||
if numpy:
|
||||
transformation = numpy.dot(transformation, node.transformation)
|
||||
else:
|
||||
t0,t1,t2,t3 = transformation
|
||||
T0,T1,T2,T3 = node.transformation
|
||||
transformation = [ [
|
||||
t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0],
|
||||
t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1],
|
||||
t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2],
|
||||
t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3]
|
||||
],[
|
||||
t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0],
|
||||
t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1],
|
||||
t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2],
|
||||
t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3]
|
||||
],[
|
||||
t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0],
|
||||
t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1],
|
||||
t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2],
|
||||
t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3]
|
||||
],[
|
||||
t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0],
|
||||
t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1],
|
||||
t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2],
|
||||
t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3]
|
||||
] ]
|
||||
|
||||
for mesh in node.meshes:
|
||||
for v in mesh.vertices:
|
||||
v = transform(v, transformation)
|
||||
bb_min[0] = min(bb_min[0], v[0])
|
||||
bb_min[1] = min(bb_min[1], v[1])
|
||||
bb_min[2] = min(bb_min[2], v[2])
|
||||
bb_max[0] = max(bb_max[0], v[0])
|
||||
bb_max[1] = max(bb_max[1], v[1])
|
||||
bb_max[2] = max(bb_max[2], v[2])
|
||||
|
||||
|
||||
for child in node.children:
|
||||
bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation)
|
||||
|
||||
return bb_min, bb_max
|
||||
|
||||
def try_load_functions(library_path, dll):
|
||||
'''
|
||||
Try to bind to aiImportFile and aiReleaseImport
|
||||
|
||||
Arguments
|
||||
---------
|
||||
library_path: path to current lib
|
||||
dll: ctypes handle to library
|
||||
|
||||
Returns
|
||||
---------
|
||||
If unsuccessful:
|
||||
None
|
||||
If successful:
|
||||
Tuple containing (library_path,
|
||||
load from filename function,
|
||||
load from memory function,
|
||||
export to filename function,
|
||||
export to blob function,
|
||||
release function,
|
||||
ctypes handle to assimp library)
|
||||
'''
|
||||
|
||||
try:
|
||||
load = dll.aiImportFile
|
||||
release = dll.aiReleaseImport
|
||||
load_mem = dll.aiImportFileFromMemory
|
||||
export = dll.aiExportScene
|
||||
export2blob = dll.aiExportSceneToBlob
|
||||
except AttributeError:
|
||||
#OK, this is a library, but it doesn't have the functions we need
|
||||
return None
|
||||
|
||||
# library found!
|
||||
from .structs import Scene, ExportDataBlob
|
||||
load.restype = ctype.POINTER(Scene)
|
||||
load_mem.restype = ctype.POINTER(Scene)
|
||||
export2blob.restype = ctype.POINTER(ExportDataBlob)
|
||||
return (library_path, load, load_mem, export, export2blob, release, dll)
|
||||
|
||||
def search_library():
|
||||
'''
|
||||
Loads the assimp library.
|
||||
Throws exception AssimpError if no library_path is found
|
||||
|
||||
Returns: tuple, (load from filename function,
|
||||
load from memory function,
|
||||
export to filename function,
|
||||
export to blob function,
|
||||
release function,
|
||||
dll)
|
||||
'''
|
||||
#this path
|
||||
folder = os.path.dirname(__file__)
|
||||
|
||||
# silence 'DLL not found' message boxes on win
|
||||
try:
|
||||
ctypes.windll.kernel32.SetErrorMode(0x8007)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
candidates = []
|
||||
# test every file
|
||||
for curfolder in [folder]+additional_dirs:
|
||||
if os.path.isdir(curfolder):
|
||||
for filename in os.listdir(curfolder):
|
||||
# our minimum requirement for candidates is that
|
||||
# they should contain 'assimp' somewhere in
|
||||
# their name
|
||||
if filename.lower().find('assimp')==-1 :
|
||||
continue
|
||||
is_out=1
|
||||
for et in ext_whitelist:
|
||||
if et in filename.lower():
|
||||
is_out=0
|
||||
break
|
||||
if is_out:
|
||||
continue
|
||||
|
||||
library_path = os.path.join(curfolder, filename)
|
||||
logger.debug('Try ' + library_path)
|
||||
try:
|
||||
dll = ctypes.cdll.LoadLibrary(library_path)
|
||||
except Exception as e:
|
||||
logger.warning(str(e))
|
||||
# OK, this except is evil. But different OSs will throw different
|
||||
# errors. So just ignore any errors.
|
||||
continue
|
||||
# see if the functions we need are in the dll
|
||||
loaded = try_load_functions(library_path, dll)
|
||||
if loaded: candidates.append(loaded)
|
||||
|
||||
if not candidates:
|
||||
# no library found
|
||||
raise AssimpError("assimp library not found")
|
||||
else:
|
||||
# get the newest library_path
|
||||
candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates)
|
||||
res = max(candidates, key=operator.itemgetter(0))[1]
|
||||
logger.debug('Using assimp library located at ' + res[0])
|
||||
|
||||
# XXX: if there are 1000 dll/so files containing 'assimp'
|
||||
# in their name, do we have all of them in our address
|
||||
# space now until gc kicks in?
|
||||
|
||||
# XXX: take version postfix of the .so on linux?
|
||||
return res[1:]
|
||||
|
||||
def hasattr_silent(object, name):
|
||||
"""
|
||||
Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2)
|
||||
functionality of silently catching exceptions.
|
||||
|
||||
Returns the result of hasatter() or False if an exception was raised.
|
||||
"""
|
||||
|
||||
try:
|
||||
return hasattr(object, name)
|
||||
except AttributeError:
|
||||
return False
|
||||
|
|
|
@ -435,6 +435,7 @@ aiProcess_Debone = 0x4000000
|
|||
aiProcess_GenEntityMeshes = 0x100000
|
||||
aiProcess_OptimizeAnimations = 0x200000
|
||||
aiProcess_FixTexturePaths = 0x200000
|
||||
aiProcess_EmbedTextures = 0x10000000,
|
||||
|
||||
## @def aiProcess_ConvertToLeftHanded
|
||||
# @brief Shortcut flag for Direct3D-based applications.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#-*- coding: UTF-8 -*-
|
||||
|
||||
from ctypes import POINTER, c_void_p, c_int, c_uint, c_char, c_float, Structure, c_char_p, c_double, c_ubyte, c_size_t, c_uint32
|
||||
from ctypes import POINTER, c_void_p, c_uint, c_char, c_float, Structure, c_char_p, c_double, c_ubyte, c_size_t, c_uint32
|
||||
|
||||
|
||||
class Vector2D(Structure):
|
||||
|
@ -70,7 +70,7 @@ class String(Structure):
|
|||
See 'types.h' for details.
|
||||
"""
|
||||
|
||||
MAXLEN = 1024
|
||||
MAXLEN = 1024
|
||||
|
||||
_fields_ = [
|
||||
# Binary length of the string excluding the terminal 0. This is NOT the
|
||||
|
|
|
@ -24,12 +24,13 @@ This sample is based on several sources, including:
|
|||
- ASSIMP's C++ SimpleOpenGL viewer
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
import sys
|
||||
from OpenGL.GLUT import *
|
||||
from OpenGL.GLU import *
|
||||
from OpenGL.GL import *
|
||||
|
||||
import logging;logger = logging.getLogger("pyassimp_opengl")
|
||||
import logging
|
||||
logger = logging.getLogger("pyassimp_opengl")
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
import math
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
This module demonstrates the functionality of PyAssimp.
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
import sys
|
||||
import logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
@ -50,8 +50,8 @@ def main(filename=None):
|
|||
print(" colors:" + str(len(mesh.colors)))
|
||||
tcs = mesh.texturecoords
|
||||
if tcs.any():
|
||||
for index, tc in enumerate(tcs):
|
||||
print(" texture-coords "+ str(index) + ":" + str(len(tcs[index])) + "first3:" + str(tcs[index][:3]))
|
||||
for tc_index, tc in enumerate(tcs):
|
||||
print(" texture-coords "+ str(tc_index) + ":" + str(len(tcs[tc_index])) + "first3:" + str(tcs[tc_index][:3]))
|
||||
|
||||
else:
|
||||
print(" no texture coordinates")
|
||||
|
|
|
@ -291,7 +291,9 @@ def main():
|
|||
#s += "#endif\n"
|
||||
|
||||
output.write(templt.replace("<HERE>",s))
|
||||
|
||||
|
||||
# we got here, so no error
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
|
|
|
@ -151,11 +151,8 @@ def handle_unset_args(field,entity,schema,argnum):
|
|||
return n+template_allow_optional.format()
|
||||
|
||||
def get_single_conversion(field,schema,argnum=0,classname='?'):
|
||||
typen = field.type
|
||||
name = field.name
|
||||
if field.collection:
|
||||
typen = 'LIST'
|
||||
return template_convert_single.format(type=typen,name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
|
||||
return template_convert_single.format(name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
|
||||
|
||||
def count_args_up(entity,schema):
|
||||
return len(entity.members) + (count_args_up(schema.entities[entity.parent],schema) if entity.parent else 0)
|
||||
|
@ -218,7 +215,7 @@ def get_derived(e,schema):
|
|||
return res
|
||||
|
||||
def get_hierarchy(e,schema):
|
||||
return get_derived(e.schema)+[e.name]+get_base_classes(e,schema)
|
||||
return get_derived(e, schema)+[e.name]+get_base_classes(e,schema)
|
||||
|
||||
def sort_entity_list(schema):
|
||||
deps = []
|
||||
|
@ -300,5 +297,8 @@ def work(filename):
|
|||
with open(output_file_cpp,'wt') as outp:
|
||||
outp.write(inp.read().replace('{schema-static-table}',schema_table).replace('{converter-impl}',converters))
|
||||
|
||||
# Finished without error, so return 0
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(work(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))
|
||||
|
|
|
@ -43,7 +43,8 @@
|
|||
"""Parse an EXPRESS file and extract basic information on all
|
||||
entities and data types contained"""
|
||||
|
||||
import sys, os, re
|
||||
import sys
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
|
||||
re_match_entity = re.compile(r"""
|
||||
|
|
|
@ -228,7 +228,8 @@ int DoExport(const aiTexture* tx, FILE* p, const std::string& extension,
|
|||
// Implementation of the assimp extract utility
|
||||
int Assimp_Extract (const char* const* params, unsigned int num)
|
||||
{
|
||||
const char* const invalid = "assimp extract: Invalid number of arguments. See \'assimp extract --help\'\n";
|
||||
const char* const invalid = "assimp extract: Invalid number of arguments. See \'assimp extract --help\'\n";
|
||||
// assimp extract in out [options]
|
||||
if (num < 1) {
|
||||
printf(invalid);
|
||||
return 1;
|
||||
|
@ -240,11 +241,7 @@ int Assimp_Extract (const char* const* params, unsigned int num)
|
|||
return 0;
|
||||
}
|
||||
|
||||
// asssimp extract in out [options]
|
||||
if (num < 1) {
|
||||
printf(invalid);
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
std::string in = std::string(params[0]);
|
||||
std::string out = (num > 1 ? std::string(params[1]) : "-");
|
||||
|
|
Loading…
Reference in New Issue