2012-11-10 16:01:55 +00:00
"""
PyAssimp
This is the main - module of PyAssimp .
"""
import sys
if sys . version_info < ( 2 , 6 ) :
raise ' pyassimp: need python 2.6 or newer '
2016-03-27 19:12:22 +00:00
# xrange was renamed range in Python 3 and the original range from Python 2 was removed.
# To keep compatibility with both Python 2 and 3, xrange is set to range for version 3.0 and up.
if sys . version_info > = ( 3 , 0 ) :
xrange = range
2012-11-10 16:01:55 +00:00
import ctypes
import os
2015-09-28 18:15:06 +00:00
try : import numpy
except : numpy = None
2012-11-10 16:01:55 +00:00
2014-09-04 05:04:00 +00:00
import logging
logger = logging . getLogger ( " pyassimp " )
# attach default null handler to logger so it doesn't complain
# even if you don't attach another handler to logger
logger . addHandler ( logging . NullHandler ( ) )
2012-11-10 16:01:55 +00:00
from . import structs
from . import helper
2015-01-28 03:47:05 +00:00
from . import postprocess
from . errors import AssimpError
2012-11-10 16:01:55 +00:00
2015-01-28 03:47:05 +00:00
class AssimpLib ( object ) :
"""
Assimp - Singleton
"""
2018-09-21 00:31:21 +00:00
load , load_mem , export , export_blob , release , dll = helper . search_library ( )
2015-01-28 03:47:05 +00:00
_assimp_lib = AssimpLib ( )
2012-11-10 16:01:55 +00:00
def make_tuple ( ai_obj , type = None ) :
res = None
2015-09-28 18:15:06 +00:00
#notes:
# ai_obj._fields_ = [ ("attr", c_type), ... ]
# getattr(ai_obj, e[0]).__class__ == float
2012-11-10 16:01:55 +00:00
if isinstance ( ai_obj , structs . Matrix4x4 ) :
2015-09-28 18:15:06 +00:00
if numpy :
res = numpy . array ( [ getattr ( ai_obj , e [ 0 ] ) for e in ai_obj . _fields_ ] ) . reshape ( ( 4 , 4 ) )
#import pdb;pdb.set_trace()
else :
res = [ getattr ( ai_obj , e [ 0 ] ) for e in ai_obj . _fields_ ]
res = [ res [ i : i + 4 ] for i in xrange ( 0 , 16 , 4 ) ]
2012-11-10 16:01:55 +00:00
elif isinstance ( ai_obj , structs . Matrix3x3 ) :
2015-09-28 18:15:06 +00:00
if numpy :
res = numpy . array ( [ getattr ( ai_obj , e [ 0 ] ) for e in ai_obj . _fields_ ] ) . reshape ( ( 3 , 3 ) )
else :
res = [ getattr ( ai_obj , e [ 0 ] ) for e in ai_obj . _fields_ ]
res = [ res [ i : i + 3 ] for i in xrange ( 0 , 9 , 3 ) ]
2012-11-10 16:01:55 +00:00
else :
2015-09-28 18:15:06 +00:00
if numpy :
res = numpy . array ( [ getattr ( ai_obj , e [ 0 ] ) for e in ai_obj . _fields_ ] )
else :
res = [ getattr ( ai_obj , e [ 0 ] ) for e in ai_obj . _fields_ ]
2012-11-10 16:01:55 +00:00
return res
2018-09-01 22:51:14 +00:00
# Returns unicode object for Python 2, and str object for Python 3.
def _convert_assimp_string ( assimp_string ) :
try :
return unicode ( assimp_string . data , errors = ' ignore ' )
except :
return str ( assimp_string . data , errors = ' ignore ' )
2013-11-03 08:53:58 +00:00
# It is faster and more correct to have an init function for each assimp class
def _init_face ( aiFace ) :
2014-11-22 05:24:32 +00:00
aiFace . indices = [ aiFace . mIndices [ i ] for i in range ( aiFace . mNumIndices ) ]
2014-09-04 05:04:00 +00:00
assimp_struct_inits = { structs . Face : _init_face }
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
def call_init ( obj , caller = None ) :
2013-11-03 08:53:58 +00:00
if helper . hasattr_silent ( obj , ' contents ' ) : #pointer
_init ( obj . contents , obj , caller )
else :
_init ( obj , parent = caller )
def _is_init_type ( obj ) :
if helper . hasattr_silent ( obj , ' contents ' ) : #pointer
return _is_init_type ( obj [ 0 ] )
# null-pointer case that arises when we reach a mesh attribute
# like mBitangents which use mNumVertices rather than mNumBitangents
# so it breaks the 'is iterable' check.
# Basically:
# FIXME!
2017-03-23 16:11:52 +00:00
elif not bool ( obj ) :
2013-11-03 08:53:58 +00:00
return False
tname = obj . __class__ . __name__
return not ( tname [ : 2 ] == ' c_ ' or tname == ' Structure ' \
or tname == ' POINTER ' ) and not isinstance ( obj , int )
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
def _init ( self , target = None , parent = None ) :
"""
2014-09-04 05:04:00 +00:00
Custom initialize ( ) for C structs , adds safely accessible member functionality .
2012-11-10 16:01:55 +00:00
: param target : set the object which receive the added methods . Useful when manipulating
pointers , to skip the intermediate ' contents ' deferencing .
"""
if not target :
target = self
2017-03-23 16:11:52 +00:00
dirself = dir ( self )
2013-11-03 08:53:58 +00:00
for m in dirself :
2012-11-10 16:01:55 +00:00
if m . startswith ( " _ " ) :
continue
if m . startswith ( ' mNum ' ) :
2013-11-03 08:53:58 +00:00
if ' m ' + m [ 4 : ] in dirself :
2012-11-10 16:01:55 +00:00
continue # will be processed later on
else :
2013-11-03 08:53:58 +00:00
name = m [ 1 : ] . lower ( )
obj = getattr ( self , m )
2012-11-10 16:01:55 +00:00
setattr ( target , name , obj )
2013-11-03 08:53:58 +00:00
continue
2012-11-10 16:01:55 +00:00
2013-11-03 08:53:58 +00:00
if m == ' mName ' :
2018-09-01 22:51:14 +00:00
target . name = str ( _convert_assimp_string ( self . mName ) )
2018-09-01 08:11:59 +00:00
target . __class__ . __repr__ = lambda x : str ( x . __class__ ) + " ( " + getattr ( x , ' name ' , ' ' ) + " ) "
target . __class__ . __str__ = lambda x : getattr ( x , ' name ' , ' ' )
2013-11-03 08:53:58 +00:00
continue
2017-03-23 16:11:52 +00:00
2013-11-03 08:53:58 +00:00
name = m [ 1 : ] . lower ( )
2012-11-10 16:01:55 +00:00
2013-11-03 08:53:58 +00:00
obj = getattr ( self , m )
2012-11-10 16:01:55 +00:00
# Create tuples
2015-01-28 03:47:05 +00:00
if isinstance ( obj , structs . assimp_structs_as_tuple ) :
2012-11-10 16:01:55 +00:00
setattr ( target , name , make_tuple ( obj ) )
logger . debug ( str ( self ) + " : Added array " + str ( getattr ( target , name ) ) + " as self. " + name . lower ( ) )
continue
if m . startswith ( ' m ' ) :
if name == " parent " :
setattr ( target , name , parent )
logger . debug ( " Added a parent as self. " + name )
continue
2013-06-03 08:38:30 +00:00
if helper . hasattr_silent ( self , ' mNum ' + m [ 1 : ] ) :
2012-11-10 16:01:55 +00:00
length = getattr ( self , ' mNum ' + m [ 1 : ] )
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
# -> special case: properties are
# stored as a dict.
if m == ' mProperties ' :
setattr ( target , name , _get_properties ( obj , length ) )
continue
if not length : # empty!
setattr ( target , name , [ ] )
logger . debug ( str ( self ) + " : " + name + " is an empty list. " )
continue
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
try :
2015-01-28 03:47:05 +00:00
if obj . _type_ in structs . assimp_structs_as_tuple :
2015-09-28 18:15:06 +00:00
if numpy :
setattr ( target , name , numpy . array ( [ make_tuple ( obj [ i ] ) for i in range ( length ) ] , dtype = numpy . float32 ) )
2012-11-10 16:01:55 +00:00
2015-09-28 18:15:06 +00:00
logger . debug ( str ( self ) + " : Added an array of numpy arrays (type " + str ( type ( obj ) ) + " ) as self. " + name )
else :
setattr ( target , name , [ make_tuple ( obj [ i ] ) for i in range ( length ) ] )
2017-03-23 16:11:52 +00:00
2015-09-28 18:15:06 +00:00
logger . debug ( str ( self ) + " : Added a list of lists (type " + str ( type ( obj ) ) + " ) as self. " + name )
2012-11-10 16:01:55 +00:00
else :
setattr ( target , name , [ obj [ i ] for i in range ( length ) ] ) #TODO: maybe not necessary to recreate an array?
logger . debug ( str ( self ) + " : Added list of " + str ( obj ) + " " + name + " as self. " + name + " (type: " + str ( type ( obj ) ) + " ) " )
# initialize array elements
2013-11-03 08:53:58 +00:00
try :
init = assimp_struct_inits [ type ( obj [ 0 ] ) ]
except KeyError :
if _is_init_type ( obj [ 0 ] ) :
for e in getattr ( target , name ) :
call_init ( e , target )
else :
for e in getattr ( target , name ) :
init ( e )
2012-11-10 16:01:55 +00:00
except IndexError :
logger . error ( " in " + str ( self ) + " : mismatch between mNum " + name + " and the actual amount of data in m " + name + " . This may be due to version mismatch between libassimp and pyassimp. Quitting now. " )
sys . exit ( 1 )
except ValueError as e :
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
logger . error ( " In " + str ( self ) + " -> " + name + " : " + str ( e ) + " . Quitting now. " )
if " setting an array element with a sequence " in str ( e ) :
logger . error ( " Note that pyassimp does not currently "
" support meshes with mixed triangles "
" and quads. Try to load your mesh with "
" a post-processing to triangulate your "
" faces. " )
2014-11-23 04:24:17 +00:00
raise e
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
else : # starts with 'm' but not iterable
setattr ( target , name , obj )
logger . debug ( " Added " + name + " as self. " + name + " (type: " + str ( type ( obj ) ) + " ) " )
2017-03-23 16:11:52 +00:00
2013-11-03 08:53:58 +00:00
if _is_init_type ( obj ) :
call_init ( obj , target )
2012-11-10 16:01:55 +00:00
if isinstance ( self , structs . Mesh ) :
_finalize_mesh ( self , target )
if isinstance ( self , structs . Texture ) :
_finalize_texture ( self , target )
2018-09-01 22:52:24 +00:00
if isinstance ( self , structs . Metadata ) :
_finalize_metadata ( self , target )
2012-11-10 16:01:55 +00:00
return self
def pythonize_assimp ( type , obj , scene ) :
""" This method modify the Assimp data structures
to make them easier to work with in Python .
Supported operations :
- MESH : replace a list of mesh IDs by reference to these meshes
- ADDTRANSFORMATION : add a reference to an object ' s transformation taken from their associated node.
: param type : the type of modification to operate ( cf above )
: param obj : the input object to modify
: param scene : a reference to the whole scene
"""
if type == " MESH " :
meshes = [ ]
for i in obj :
meshes . append ( scene . meshes [ i ] )
return meshes
if type == " ADDTRANSFORMATION " :
def getnode ( node , name ) :
if node . name == name : return node
for child in node . children :
n = getnode ( child , name )
if n : return n
node = getnode ( scene . rootnode , obj . name )
if not node :
raise AssimpError ( " Object " + str ( obj ) + " has no associated node! " )
setattr ( obj , " transformation " , node . transformation )
def recur_pythonize ( node , scene ) :
2014-09-04 05:04:00 +00:00
'''
Recursively call pythonize_assimp on
2012-11-10 16:01:55 +00:00
nodes tree to apply several post - processing to
pythonize the assimp datastructures .
2014-09-04 05:04:00 +00:00
'''
2012-11-10 16:01:55 +00:00
node . meshes = pythonize_assimp ( " MESH " , node . meshes , scene )
for mesh in node . meshes :
mesh . material = scene . materials [ mesh . materialindex ]
for cam in scene . cameras :
pythonize_assimp ( " ADDTRANSFORMATION " , cam , scene )
for c in node . children :
recur_pythonize ( c , scene )
2017-03-23 16:11:52 +00:00
def load ( filename ,
2015-01-28 03:47:05 +00:00
file_type = None ,
processing = postprocess . aiProcess_Triangulate ) :
2014-09-04 05:04:00 +00:00
'''
Load a model into a scene . On failure throws AssimpError .
2017-03-23 16:11:52 +00:00
2014-09-04 05:04:00 +00:00
Arguments
- - - - - - - - -
filename : Either a filename or a file object to load model from .
If a file object is passed , file_type MUST be specified
Otherwise Assimp has no idea which importer to use .
2017-03-23 16:11:52 +00:00
This is named ' filename ' so as to not break legacy code .
2015-01-28 03:47:05 +00:00
processing : assimp postprocessing parameters . Verbose keywords are imported
from postprocessing , and the parameters can be combined bitwise to
generate the final processing value . Note that the default value will
triangulate quad faces . Example of generating other possible values :
2017-03-23 16:11:52 +00:00
processing = ( pyassimp . postprocess . aiProcess_Triangulate |
2015-01-28 03:47:05 +00:00
pyassimp . postprocess . aiProcess_OptimizeMeshes )
file_type : string of file extension , such as ' stl '
2017-03-23 16:11:52 +00:00
2014-09-04 05:04:00 +00:00
Returns
- - - - - - - - -
2015-01-28 03:47:05 +00:00
Scene object with model data
2014-09-04 05:04:00 +00:00
'''
2017-03-23 16:11:52 +00:00
2014-09-04 05:04:00 +00:00
if hasattr ( filename , ' read ' ) :
2018-12-23 13:28:40 +00:00
# This is the case where a file object has been passed to load.
# It is calling the following function:
# const aiScene* aiImportFileFromMemory(const char* pBuffer,
# unsigned int pLength,
# unsigned int pFlags,
# const char* pHint)
2014-09-04 05:04:00 +00:00
if file_type == None :
raise AssimpError ( ' File type must be specified when passing file objects! ' )
data = filename . read ( )
2017-03-23 16:11:52 +00:00
model = _assimp_lib . load_mem ( data ,
len ( data ) ,
processing ,
2014-09-04 05:04:00 +00:00
file_type )
else :
# a filename string has been passed
2018-05-15 13:22:19 +00:00
model = _assimp_lib . load ( filename . encode ( sys . getfilesystemencoding ( ) ) , processing )
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
if not model :
2014-09-04 05:04:00 +00:00
raise AssimpError ( ' Could not import file! ' )
2013-11-03 08:53:58 +00:00
scene = _init ( model . contents )
2012-11-10 16:01:55 +00:00
recur_pythonize ( scene . rootnode , scene )
return scene
2016-04-03 12:03:16 +00:00
def export ( scene ,
2017-03-23 16:11:52 +00:00
filename ,
2016-04-03 12:03:16 +00:00
file_type = None ,
processing = postprocess . aiProcess_Triangulate ) :
'''
Export a scene . On failure throws AssimpError .
2017-03-23 16:11:52 +00:00
2016-04-03 12:03:16 +00:00
Arguments
- - - - - - - - -
scene : scene to export .
2017-03-23 16:11:52 +00:00
filename : Filename that the scene should be exported to .
2016-04-03 12:03:16 +00:00
file_type : string of file exporter to use . For example " collada " .
processing : assimp postprocessing parameters . Verbose keywords are imported
from postprocessing , and the parameters can be combined bitwise to
generate the final processing value . Note that the default value will
triangulate quad faces . Example of generating other possible values :
2017-03-23 16:11:52 +00:00
processing = ( pyassimp . postprocess . aiProcess_Triangulate |
2016-04-03 12:03:16 +00:00
pyassimp . postprocess . aiProcess_OptimizeMeshes )
'''
from ctypes import pointer
2018-05-15 13:22:19 +00:00
exportStatus = _assimp_lib . export ( pointer ( scene ) , file_type . encode ( " ascii " ) , filename . encode ( sys . getfilesystemencoding ( ) ) , processing )
2016-04-03 12:03:16 +00:00
if exportStatus != 0 :
raise AssimpError ( ' Could not export scene! ' )
2018-09-21 00:31:21 +00:00
def export_blob ( scene ,
file_type = None ,
processing = postprocess . aiProcess_Triangulate ) :
'''
Export a scene and return a blob in the correct format . On failure throws AssimpError .
Arguments
- - - - - - - - -
scene : scene to export .
file_type : string of file exporter to use . For example " collada " .
processing : assimp postprocessing parameters . Verbose keywords are imported
from postprocessing , and the parameters can be combined bitwise to
generate the final processing value . Note that the default value will
triangulate quad faces . Example of generating other possible values :
processing = ( pyassimp . postprocess . aiProcess_Triangulate |
pyassimp . postprocess . aiProcess_OptimizeMeshes )
Returns
- - - - - - - - -
2018-09-21 00:51:38 +00:00
Pointer to structs . ExportDataBlob
2018-09-21 00:31:21 +00:00
'''
from ctypes import pointer
exportBlobPtr = _assimp_lib . export_blob ( pointer ( scene ) , file_type . encode ( " ascii " ) , processing )
if exportBlobPtr == 0 :
raise AssimpError ( ' Could not export scene to blob! ' )
return exportBlobPtr
2012-11-10 16:01:55 +00:00
def release ( scene ) :
from ctypes import pointer
_assimp_lib . release ( pointer ( scene ) )
def _finalize_texture ( tex , target ) :
setattr ( target , " achformathint " , tex . achFormatHint )
2015-09-28 18:15:06 +00:00
if numpy :
data = numpy . array ( [ make_tuple ( getattr ( tex , " pcData " ) [ i ] ) for i in range ( tex . mWidth * tex . mHeight ) ] )
else :
data = [ make_tuple ( getattr ( tex , " pcData " ) [ i ] ) for i in range ( tex . mWidth * tex . mHeight ) ]
2012-11-10 16:01:55 +00:00
setattr ( target , " data " , data )
def _finalize_mesh ( mesh , target ) :
""" Building of meshes is a bit specific.
We override here the various datasets that can
not be process as regular fields .
For instance , the length of the normals array is
mNumVertices ( no mNumNormals is available )
"""
nb_vertices = getattr ( mesh , " mNumVertices " )
def fill ( name ) :
mAttr = getattr ( mesh , name )
2015-09-28 18:15:06 +00:00
if numpy :
if mAttr :
data = numpy . array ( [ make_tuple ( getattr ( mesh , name ) [ i ] ) for i in range ( nb_vertices ) ] , dtype = numpy . float32 )
setattr ( target , name [ 1 : ] . lower ( ) , data )
else :
setattr ( target , name [ 1 : ] . lower ( ) , numpy . array ( [ ] , dtype = " float32 " ) )
2012-11-10 16:01:55 +00:00
else :
2015-09-28 18:15:06 +00:00
if mAttr :
data = [ make_tuple ( getattr ( mesh , name ) [ i ] ) for i in range ( nb_vertices ) ]
setattr ( target , name [ 1 : ] . lower ( ) , data )
else :
setattr ( target , name [ 1 : ] . lower ( ) , [ ] )
2012-11-10 16:01:55 +00:00
def fillarray ( name ) :
mAttr = getattr ( mesh , name )
data = [ ]
for index , mSubAttr in enumerate ( mAttr ) :
if mSubAttr :
data . append ( [ make_tuple ( getattr ( mesh , name ) [ index ] [ i ] ) for i in range ( nb_vertices ) ] )
2015-09-28 18:15:06 +00:00
if numpy :
setattr ( target , name [ 1 : ] . lower ( ) , numpy . array ( data , dtype = numpy . float32 ) )
else :
setattr ( target , name [ 1 : ] . lower ( ) , data )
2012-11-10 16:01:55 +00:00
fill ( " mNormals " )
fill ( " mTangents " )
fill ( " mBitangents " )
fillarray ( " mColors " )
fillarray ( " mTextureCoords " )
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
# prepare faces
2015-09-28 18:15:06 +00:00
if numpy :
faces = numpy . array ( [ f . indices for f in target . faces ] , dtype = numpy . int32 )
else :
faces = [ f . indices for f in target . faces ]
2012-11-10 16:01:55 +00:00
setattr ( target , ' faces ' , faces )
2018-09-01 22:52:24 +00:00
def _init_metadata_entry ( entry ) :
from ctypes import POINTER , c_bool , c_int32 , c_uint64 , c_float , c_double , cast
entry . type = entry . mType
if entry . type == structs . MetadataEntry . AI_BOOL :
entry . data = cast ( entry . mData , POINTER ( c_bool ) ) . contents . value
elif entry . type == structs . MetadataEntry . AI_INT32 :
entry . data = cast ( entry . mData , POINTER ( c_int32 ) ) . contents . value
elif entry . type == structs . MetadataEntry . AI_UINT64 :
entry . data = cast ( entry . mData , POINTER ( c_uint64 ) ) . contents . value
elif entry . type == structs . MetadataEntry . AI_FLOAT :
entry . data = cast ( entry . mData , POINTER ( c_float ) ) . contents . value
elif entry . type == structs . MetadataEntry . AI_DOUBLE :
entry . data = cast ( entry . mData , POINTER ( c_double ) ) . contents . value
elif entry . type == structs . MetadataEntry . AI_AISTRING :
assimp_string = cast ( entry . mData , POINTER ( structs . String ) ) . contents
entry . data = _convert_assimp_string ( assimp_string )
elif entry . type == structs . MetadataEntry . AI_AIVECTOR3D :
assimp_vector = cast ( entry . mData , POINTER ( structs . Vector3D ) ) . contents
entry . data = make_tuple ( assimp_vector )
return entry
def _finalize_metadata ( metadata , target ) :
""" Building the metadata object is a bit specific.
Firstly , there are two separate arrays : one with metadata keys and one
with metadata values , and there are no corresponding mNum * attributes ,
so the C arrays are not converted to Python arrays using the generic
code in the _init function .
Secondly , a metadata entry value has to be cast according to declared
metadata entry type .
"""
length = metadata . mNumProperties
setattr ( target , ' keys ' , [ str ( _convert_assimp_string ( metadata . mKeys [ i ] ) ) for i in range ( length ) ] )
setattr ( target , ' values ' , [ _init_metadata_entry ( metadata . mValues [ i ] ) for i in range ( length ) ] )
2013-07-27 23:57:31 +00:00
class PropertyGetter ( dict ) :
2013-07-28 20:22:49 +00:00
def __getitem__ ( self , key ) :
semantic = 0
if isinstance ( key , tuple ) :
key , semantic = key
2013-07-27 23:57:31 +00:00
return dict . __getitem__ ( self , ( key , semantic ) )
def keys ( self ) :
for k in dict . keys ( self ) :
yield k [ 0 ]
def __iter__ ( self ) :
return self . keys ( )
def items ( self ) :
for k , v in dict . items ( self ) :
yield k [ 0 ] , v
2017-03-23 16:11:52 +00:00
def _get_properties ( properties , length ) :
2012-11-10 16:01:55 +00:00
"""
Convenience Function to get the material properties as a dict
and values in a python format .
"""
result = { }
#read all properties
for p in [ properties [ i ] for i in range ( length ) ] :
#the name
p = p . contents
2018-09-01 22:51:14 +00:00
key = str ( _convert_assimp_string ( p . mKey ) )
key = ( key . split ( ' . ' ) [ 1 ] , p . mSemantic )
2012-11-10 16:01:55 +00:00
#the data
from ctypes import POINTER , cast , c_int , c_float , sizeof
if p . mType == 1 :
arr = cast ( p . mData , POINTER ( c_float * int ( p . mDataLength / sizeof ( c_float ) ) ) ) . contents
2012-11-09 17:06:31 +00:00
value = [ x for x in arr ]
2012-11-10 16:01:55 +00:00
elif p . mType == 3 : #string can't be an array
2018-09-01 22:51:14 +00:00
value = _convert_assimp_string ( cast ( p . mData , POINTER ( structs . MaterialPropertyString ) ) . contents )
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
elif p . mType == 4 :
arr = cast ( p . mData , POINTER ( c_int * int ( p . mDataLength / sizeof ( c_int ) ) ) ) . contents
2012-11-09 17:06:31 +00:00
value = [ x for x in arr ]
2012-11-10 16:01:55 +00:00
else :
value = p . mData [ : p . mDataLength ]
2012-11-09 17:06:31 +00:00
if len ( value ) == 1 :
[ value ] = value
2012-11-10 16:01:55 +00:00
result [ key ] = value
2013-07-27 23:57:31 +00:00
return PropertyGetter ( result )
2012-11-10 16:01:55 +00:00
def decompose_matrix ( matrix ) :
if not isinstance ( matrix , structs . Matrix4x4 ) :
raise AssimpError ( " pyassimp.decompose_matrix failed: Not a Matrix4x4! " )
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
scaling = structs . Vector3D ( )
rotation = structs . Quaternion ( )
position = structs . Vector3D ( )
2017-03-23 16:11:52 +00:00
2012-11-10 16:01:55 +00:00
from ctypes import byref , pointer
_assimp_lib . dll . aiDecomposeMatrix ( pointer ( matrix ) , byref ( scaling ) , byref ( rotation ) , byref ( position ) )
return scaling . _init ( ) , rotation . _init ( ) , position . _init ( )
2017-07-21 05:48:07 +00:00