Merge branch 'master' into Coverity-findings
commit
96032ca341
|
@ -27,7 +27,6 @@ compiler:
|
|||
|
||||
env:
|
||||
global:
|
||||
# COVERITY_SCAN_TOKEN
|
||||
- secure: "lZ7pHQvl5dpZWzBQAaIMf0wqrvtcZ4wiZKeIZjf83TEsflW8+z0uTpIuN30ZV6Glth/Sq1OhLnTP5+N57fZU/1ebA5twHdvP4bS5CIUUg71/CXQZNl36xeaqvxsG/xRrdpKOsPdjAOsQ9KPTQulsX43XDLS7CasMiLvYOpqKcPc="
|
||||
- PV=r8e PLATF=linux-x86_64 NDK_HOME=${TRAVIS_BUILD_DIR}/android-ndk-${PV} PATH=${PATH}:${NDK_HOME}
|
||||
|
||||
|
@ -56,8 +55,7 @@ install:
|
|||
- if [ $ANDROID ]; then wget -c http://dl.google.com/android/ndk/android-ndk-${PV}-${PLATF}.tar.bz2 && tar xf android-ndk-${PV}-${PLATF}.tar.bz2 ; fi
|
||||
|
||||
before_script:
|
||||
# init coverage to 0 (optional)
|
||||
- if [ "$TRAVIS_OS_NAME" = "linux" ]; then cd ${TRAVIS_BUILD_DIR} && lcov --directory . --zerocounters ; fi
|
||||
cmake . -DASSIMP_ENABLE_BOOST_WORKAROUND=YES
|
||||
|
||||
script:
|
||||
- export COVERALLS_SERVICE_NAME=travis-ci
|
||||
|
@ -72,6 +70,6 @@ addons:
|
|||
project:
|
||||
name: "assimp/assimp"
|
||||
notification_email: kim.kulling@googlemail.com
|
||||
build_command_prepend: "cmake"
|
||||
build_command: "make"
|
||||
build_command_prepend: "cmake . -DASSIMP_ENABLE_BOOST_WORKAROUND=YES"
|
||||
build_command: "make -j4"
|
||||
branch_pattern: coverity_scan
|
||||
|
|
|
@ -89,14 +89,16 @@ static const aiImporterDesc desc = {
|
|||
// ------------------------------------------------------------------------------------------------
|
||||
// Constructor to be privately used by Importer
|
||||
IRRImporter::IRRImporter()
|
||||
: fps(),
|
||||
configSpeedFlag()
|
||||
{}
|
||||
: fps()
|
||||
, configSpeedFlag(){
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Destructor, private as well
|
||||
IRRImporter::~IRRImporter()
|
||||
{}
|
||||
IRRImporter::~IRRImporter() {
|
||||
// empty
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// Returns whether the class can handle the format of the given file.
|
||||
|
@ -107,7 +109,7 @@ bool IRRImporter::CanRead( const std::string& pFile, IOSystem* pIOHandler, bool
|
|||
} else if (extension == "xml" || checkSig) {
|
||||
/* If CanRead() is called in order to check whether we
|
||||
* support a specific file extension in general pIOHandler
|
||||
* might be NULL and it's our duty to return true here.
|
||||
* might be nullptr and it's our duty to return true here.
|
||||
*/
|
||||
if (nullptr == pIOHandler ) {
|
||||
return true;
|
||||
|
@ -290,8 +292,8 @@ void IRRImporter::CopyMaterial(std::vector<aiMaterial*>& materials,
|
|||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
inline int ClampSpline(int idx, int size)
|
||||
{
|
||||
inline
|
||||
int ClampSpline(int idx, int size) {
|
||||
return ( idx<0 ? size+idx : ( idx>=size ? idx-size : idx ) );
|
||||
}
|
||||
|
||||
|
@ -310,7 +312,7 @@ inline void FindSuitableMultiple(int& angle)
|
|||
// ------------------------------------------------------------------------------------------------
|
||||
void IRRImporter::ComputeAnimations(Node* root, aiNode* real, std::vector<aiNodeAnim*>& anims)
|
||||
{
|
||||
ai_assert(NULL != root && NULL != real);
|
||||
ai_assert(nullptr != root && nullptr != real);
|
||||
|
||||
// XXX totally WIP - doesn't produce proper results, need to evaluate
|
||||
// whether there's any use for Irrlicht's proprietary scene format
|
||||
|
@ -521,7 +523,8 @@ void IRRImporter::ComputeAnimations(Node* root, aiNode* real, std::vector<aiNode
|
|||
// We have no point in the spline. That's bad. Really bad.
|
||||
ASSIMP_LOG_WARN("IRR: Spline animators with no points defined");
|
||||
|
||||
delete anim;anim = nullptr;
|
||||
delete anim;
|
||||
anim = nullptr;
|
||||
break;
|
||||
}
|
||||
else if (size == 1) {
|
||||
|
@ -905,8 +908,9 @@ void IRRImporter::InternReadFile( const std::string& pFile,
|
|||
std::unique_ptr<IOStream> file( pIOHandler->Open( pFile));
|
||||
|
||||
// Check whether we can read from the file
|
||||
if( file.get() == NULL)
|
||||
if (file.get() == nullptr) {
|
||||
throw DeadlyImportError("Failed to open IRR file " + pFile + "");
|
||||
}
|
||||
|
||||
// Construct the irrXML parser
|
||||
CIrrXML_IOStreamReader st(file.get());
|
||||
|
@ -914,14 +918,14 @@ void IRRImporter::InternReadFile( const std::string& pFile,
|
|||
|
||||
// The root node of the scene
|
||||
Node* root = new Node(Node::DUMMY);
|
||||
root->parent = NULL;
|
||||
root->parent = nullptr;
|
||||
root->name = "<IRRSceneRoot>";
|
||||
|
||||
// Current node parent
|
||||
Node* curParent = root;
|
||||
|
||||
// Scenegraph node we're currently working on
|
||||
Node* curNode = NULL;
|
||||
Node* curNode = nullptr;
|
||||
|
||||
// List of output cameras
|
||||
std::vector<aiCamera*> cameras;
|
||||
|
@ -1048,7 +1052,7 @@ void IRRImporter::InternReadFile( const std::string& pFile,
|
|||
continue;
|
||||
}
|
||||
|
||||
Animator* curAnim = NULL;
|
||||
Animator* curAnim = nullptr;
|
||||
|
||||
// Materials can occur for nearly any type of node
|
||||
if (inMaterials && curNode->type != Node::DUMMY) {
|
||||
|
@ -1353,7 +1357,7 @@ void IRRImporter::InternReadFile( const std::string& pFile,
|
|||
}
|
||||
else curParent = curParent->parent;
|
||||
}
|
||||
else curNode = NULL;
|
||||
else curNode = nullptr;
|
||||
}
|
||||
// clear all flags
|
||||
else if (!ASSIMP_stricmp(reader->getNodeName(),"materials")) {
|
||||
|
@ -1479,7 +1483,8 @@ void IRRImporter::InternReadFile( const std::string& pFile,
|
|||
/* Finished ... everything destructs automatically and all
|
||||
* temporary scenes have already been deleted by MergeScenes()
|
||||
*/
|
||||
return;
|
||||
|
||||
delete root;
|
||||
}
|
||||
|
||||
#endif // !! ASSIMP_BUILD_NO_IRR_IMPORTER
|
||||
|
|
|
@ -659,8 +659,8 @@ void ProcessMetadata(uint64_t relDefinesByPropertiesID, ConversionData& conv, Me
|
|||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el, ConversionData& conv, std::vector<TempOpening>* collect_openings = NULL)
|
||||
{
|
||||
aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el, ConversionData& conv,
|
||||
std::vector<TempOpening>* collect_openings = nullptr ) {
|
||||
const STEP::DB::RefMap& refs = conv.db.GetRefs();
|
||||
|
||||
// skip over space and annotation nodes - usually, these have no meaning in Assimp's context
|
||||
|
@ -675,12 +675,12 @@ aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el
|
|||
if(conv.settings.skipAnnotations) {
|
||||
if(el.ToPtr<Schema_2x3::IfcAnnotation>()) {
|
||||
IFCImporter::LogDebug("skipping IfcAnnotation entity due to importer settings");
|
||||
return NULL;
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
// add an output node for this spatial structure
|
||||
std::unique_ptr<aiNode> nd(new aiNode());
|
||||
aiNode *nd(new aiNode );
|
||||
nd->mName.Set(el.GetClassName()+"_"+(el.Name?el.Name.Get():"Unnamed")+"_"+el.GlobalId);
|
||||
nd->mParent = parent;
|
||||
|
||||
|
@ -693,8 +693,7 @@ aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el
|
|||
if (children.first==children.second) {
|
||||
// handles single property set
|
||||
ProcessMetadata((*children.first).second, conv, properties);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// handles multiple property sets (currently all property sets are merged,
|
||||
// which may not be the best solution in the long run)
|
||||
for (STEP::DB::RefMap::const_iterator it=children.first; it!=children.second; ++it) {
|
||||
|
@ -751,7 +750,7 @@ aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el
|
|||
continue;
|
||||
}
|
||||
|
||||
aiNode* const ndnew = ProcessSpatialStructure(nd.get(),pro,conv,NULL);
|
||||
aiNode* const ndnew = ProcessSpatialStructure(nd,pro,conv,nullptr);
|
||||
if(ndnew) {
|
||||
subnodes.push_back( ndnew );
|
||||
}
|
||||
|
@ -765,7 +764,7 @@ aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el
|
|||
// move opening elements to a separate node since they are semantically different than elements that are just 'contained'
|
||||
std::unique_ptr<aiNode> nd_aggr(new aiNode());
|
||||
nd_aggr->mName.Set("$RelVoidsElement");
|
||||
nd_aggr->mParent = nd.get();
|
||||
nd_aggr->mParent = nd;
|
||||
|
||||
nd_aggr->mTransformation = nd->mTransformation;
|
||||
|
||||
|
@ -810,7 +809,7 @@ aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el
|
|||
// move aggregate elements to a separate node since they are semantically different than elements that are just 'contained'
|
||||
std::unique_ptr<aiNode> nd_aggr(new aiNode());
|
||||
nd_aggr->mName.Set("$RelAggregates");
|
||||
nd_aggr->mParent = nd.get();
|
||||
nd_aggr->mParent = nd;
|
||||
|
||||
nd_aggr->mTransformation = nd->mTransformation;
|
||||
|
||||
|
@ -835,19 +834,18 @@ aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el
|
|||
}
|
||||
|
||||
if (!skipGeometry) {
|
||||
ProcessProductRepresentation(el,nd.get(),subnodes,conv);
|
||||
conv.apply_openings = conv.collect_openings = NULL;
|
||||
ProcessProductRepresentation(el, nd, subnodes, conv);
|
||||
conv.apply_openings = conv.collect_openings = nullptr;
|
||||
}
|
||||
|
||||
if (subnodes.size()) {
|
||||
nd->mChildren = new aiNode*[subnodes.size()]();
|
||||
for(aiNode* nd2 : subnodes) {
|
||||
nd->mChildren[nd->mNumChildren++] = nd2;
|
||||
nd2->mParent = nd.get();
|
||||
nd2->mParent = nd;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(...) {
|
||||
} catch(...) {
|
||||
// it hurts, but I don't want to pull boost::ptr_vector into -noboost only for these few spots here
|
||||
std::for_each(subnodes.begin(),subnodes.end(),delete_fun<aiNode>());
|
||||
throw;
|
||||
|
@ -855,7 +853,7 @@ aiNode* ProcessSpatialStructure(aiNode* parent, const Schema_2x3::IfcProduct& el
|
|||
|
||||
ai_assert(conv.already_processed.find(el.GetID()) != conv.already_processed.end());
|
||||
conv.already_processed.erase(conv.already_processed.find(el.GetID()));
|
||||
return nd.release();
|
||||
return nd;
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -302,13 +302,14 @@ void Q3DImporter::InternReadFile( const std::string& pFile,
|
|||
case 't':
|
||||
|
||||
pScene->mNumTextures = numTextures;
|
||||
if (!numTextures)break;
|
||||
if (!numTextures) {
|
||||
break;
|
||||
}
|
||||
pScene->mTextures = new aiTexture*[pScene->mNumTextures];
|
||||
// to make sure we won't crash if we leave through an exception
|
||||
::memset(pScene->mTextures,0,sizeof(void*)*pScene->mNumTextures);
|
||||
for (unsigned int i = 0; i < pScene->mNumTextures; ++i)
|
||||
{
|
||||
aiTexture* tex = pScene->mTextures[i] = new aiTexture();
|
||||
for (unsigned int i = 0; i < pScene->mNumTextures; ++i) {
|
||||
aiTexture* tex = pScene->mTextures[i] = new aiTexture;
|
||||
|
||||
// skip the texture name
|
||||
while (stream.GetI1());
|
||||
|
@ -317,15 +318,16 @@ void Q3DImporter::InternReadFile( const std::string& pFile,
|
|||
tex->mWidth = (unsigned int)stream.GetI4();
|
||||
tex->mHeight = (unsigned int)stream.GetI4();
|
||||
|
||||
if (!tex->mWidth || !tex->mHeight)
|
||||
if (!tex->mWidth || !tex->mHeight) {
|
||||
throw DeadlyImportError("Quick3D: Invalid texture. Width or height is zero");
|
||||
}
|
||||
|
||||
unsigned int mul = tex->mWidth * tex->mHeight;
|
||||
aiTexel* begin = tex->pcData = new aiTexel[mul];
|
||||
aiTexel* const end = & begin [mul];
|
||||
aiTexel* const end = & begin[mul-1] +1;
|
||||
|
||||
for (;begin != end; ++begin)
|
||||
{
|
||||
|
||||
for (;begin != end; ++begin) {
|
||||
begin->r = stream.GetI1();
|
||||
begin->g = stream.GetI1();
|
||||
begin->b = stream.GetI1();
|
||||
|
|
|
@ -182,7 +182,7 @@ void STLImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
|
|||
std::unique_ptr<IOStream> file( pIOHandler->Open( pFile, "rb"));
|
||||
|
||||
// Check whether we can read from the file
|
||||
if( file.get() == NULL) {
|
||||
if( file.get() == nullptr) {
|
||||
throw DeadlyImportError( "Failed to open STL file " + pFile + ".");
|
||||
}
|
||||
|
||||
|
@ -190,11 +190,11 @@ void STLImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
|
|||
|
||||
// allocate storage and copy the contents of the file to a memory buffer
|
||||
// (terminate it with zero)
|
||||
std::vector<char> mBuffer2;
|
||||
TextFileToBuffer(file.get(),mBuffer2);
|
||||
std::vector<char> buffer2;
|
||||
TextFileToBuffer(file.get(),buffer2);
|
||||
|
||||
this->pScene = pScene;
|
||||
this->mBuffer = &mBuffer2[0];
|
||||
this->mBuffer = &buffer2[0];
|
||||
|
||||
// the default vertex color is light gray.
|
||||
clrColorDefault.r = clrColorDefault.g = clrColorDefault.b = clrColorDefault.a = (ai_real) 0.6;
|
||||
|
@ -231,6 +231,8 @@ void STLImporter::InternReadFile( const std::string& pFile, aiScene* pScene, IOS
|
|||
pScene->mNumMaterials = 1;
|
||||
pScene->mMaterials = new aiMaterial*[1];
|
||||
pScene->mMaterials[0] = pcMat;
|
||||
|
||||
mBuffer = nullptr;
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
|
|
@ -643,14 +643,11 @@ void ExportSkin(Asset& mAsset, const aiMesh* aimesh, Ref<Mesh>& meshRef, Ref<Buf
|
|||
Ref<Buffer> buf = vertexJointAccessor->bufferView->buffer;
|
||||
uint8_t* arrys = new uint8_t[bytesLen];
|
||||
unsigned int i = 0;
|
||||
uint8_t* data = new uint8_t[s_bytesPerComp];
|
||||
for ( unsigned int j = 0; j <= bytesLen; j += bytesPerComp ){
|
||||
size_t len_p = offset + j;
|
||||
float f_value = *(float *)&buf->GetPointer()[len_p];
|
||||
unsigned short c = static_cast<unsigned short>(f_value);
|
||||
::memset(data, 0, s_bytesPerComp * sizeof(uint8_t));
|
||||
data = (uint8_t*)&c;
|
||||
memcpy(&arrys[i*s_bytesPerComp], data, s_bytesPerComp);
|
||||
memcpy(&arrys[i*s_bytesPerComp], &c, s_bytesPerComp);
|
||||
++i;
|
||||
}
|
||||
buf->ReplaceData_joint(offset, bytesLen, arrys, bytesLen);
|
||||
|
@ -659,10 +656,10 @@ void ExportSkin(Asset& mAsset, const aiMesh* aimesh, Ref<Mesh>& meshRef, Ref<Buf
|
|||
|
||||
p.attributes.joint.push_back( vertexJointAccessor );
|
||||
delete[] arrys;
|
||||
delete[] data;
|
||||
}
|
||||
|
||||
Ref<Accessor> vertexWeightAccessor = ExportData(mAsset, skinRef->id, bufferRef, aimesh->mNumVertices, vertexWeightData, AttribType::VEC4, AttribType::VEC4, ComponentType_FLOAT);
|
||||
Ref<Accessor> vertexWeightAccessor = ExportData(mAsset, skinRef->id, bufferRef, aimesh->mNumVertices,
|
||||
vertexWeightData, AttribType::VEC4, AttribType::VEC4, ComponentType_FLOAT);
|
||||
if ( vertexWeightAccessor ) {
|
||||
p.attributes.weight.push_back( vertexWeightAccessor );
|
||||
}
|
||||
|
@ -751,8 +748,7 @@ void glTF2Exporter::ExportMeshes()
|
|||
}
|
||||
|
||||
/*************** Vertex colors ****************/
|
||||
for (unsigned int indexColorChannel = 0; indexColorChannel < aim->GetNumColorChannels(); ++indexColorChannel)
|
||||
{
|
||||
for (unsigned int indexColorChannel = 0; indexColorChannel < aim->GetNumColorChannels(); ++indexColorChannel) {
|
||||
Ref<Accessor> c = ExportData(*mAsset, meshId, b, aim->mNumVertices, aim->mColors[indexColorChannel], AttribType::VEC4, AttribType::VEC4, ComponentType_FLOAT, false);
|
||||
if (c)
|
||||
p.attributes.color.push_back(c);
|
||||
|
@ -798,8 +794,12 @@ void glTF2Exporter::ExportMeshes()
|
|||
CopyValue(inverseBindMatricesData[idx_joint], invBindMatrixData[idx_joint]);
|
||||
}
|
||||
|
||||
Ref<Accessor> invBindMatrixAccessor = ExportData(*mAsset, skinName, b, static_cast<unsigned int>(inverseBindMatricesData.size()), invBindMatrixData, AttribType::MAT4, AttribType::MAT4, ComponentType_FLOAT);
|
||||
if (invBindMatrixAccessor) skinRef->inverseBindMatrices = invBindMatrixAccessor;
|
||||
Ref<Accessor> invBindMatrixAccessor = ExportData(*mAsset, skinName, b,
|
||||
static_cast<unsigned int>(inverseBindMatricesData.size()),
|
||||
invBindMatrixData, AttribType::MAT4, AttribType::MAT4, ComponentType_FLOAT);
|
||||
if (invBindMatrixAccessor) {
|
||||
skinRef->inverseBindMatrices = invBindMatrixAccessor;
|
||||
}
|
||||
|
||||
// Identity Matrix =====> skinRef->bindShapeMatrix
|
||||
// Temporary. Hard-coded identity matrix here
|
||||
|
@ -827,10 +827,11 @@ void glTF2Exporter::ExportMeshes()
|
|||
meshNode->skeletons.push_back(rootJoint);
|
||||
meshNode->skin = skinRef;
|
||||
}
|
||||
delete[] invBindMatrixData;
|
||||
}
|
||||
}
|
||||
|
||||
//merges a node's multiple meshes (with one primitive each) into one mesh with multiple primitives
|
||||
// Merges a node's multiple meshes (with one primitive each) into one mesh with multiple primitives
|
||||
void glTF2Exporter::MergeMeshes()
|
||||
{
|
||||
for (unsigned int n = 0; n < mAsset->nodes.Size(); ++n) {
|
||||
|
|
|
@ -101,17 +101,17 @@ glTFExporter::glTFExporter(const char* filename, IOSystem* pIOSystem, const aiSc
|
|||
{
|
||||
aiScene* sceneCopy_tmp;
|
||||
SceneCombiner::CopyScene(&sceneCopy_tmp, pScene);
|
||||
std::unique_ptr<aiScene> sceneCopy(sceneCopy_tmp);
|
||||
aiScene *sceneCopy(sceneCopy_tmp);
|
||||
|
||||
SplitLargeMeshesProcess_Triangle tri_splitter;
|
||||
tri_splitter.SetLimit(0xffff);
|
||||
tri_splitter.Execute(sceneCopy.get());
|
||||
tri_splitter.Execute(sceneCopy);
|
||||
|
||||
SplitLargeMeshesProcess_Vertex vert_splitter;
|
||||
vert_splitter.SetLimit(0xffff);
|
||||
vert_splitter.Execute(sceneCopy.get());
|
||||
vert_splitter.Execute(sceneCopy);
|
||||
|
||||
mScene = sceneCopy.get();
|
||||
mScene = sceneCopy;
|
||||
|
||||
mAsset.reset( new glTF::Asset( pIOSystem ) );
|
||||
|
||||
|
|
|
@ -48,11 +48,12 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
#ifndef AI_STREAMREADER_H_INCLUDED
|
||||
#define AI_STREAMREADER_H_INCLUDED
|
||||
|
||||
#include <assimp/IOStream.hpp>
|
||||
#include <assimp/Defines.h>
|
||||
|
||||
#include "ByteSwapper.h"
|
||||
#include "Exceptional.h"
|
||||
#include <memory>
|
||||
#include <assimp/IOStream.hpp>
|
||||
#include <assimp/Defines.h>
|
||||
|
||||
namespace Assimp {
|
||||
|
||||
|
@ -314,7 +315,7 @@ private:
|
|||
const size_t read = stream->Read(current,1,s);
|
||||
// (read < s) can only happen if the stream was opened in text mode, in which case FileSize() is not reliable
|
||||
ai_assert(read <= s);
|
||||
end = limit = &buffer[read];
|
||||
end = limit = &buffer[read-1] + 1;
|
||||
}
|
||||
|
||||
private:
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
PyAssimp Readme
|
||||
===============
|
||||
|
||||
A simple Python wrapper for Assimp using `ctypes` to access the library.
|
||||
Requires Python >= 2.6.
|
||||
|
||||
Python 3 support is mostly here, but not well tested.
|
||||
|
||||
Note that pyassimp is not complete. Many ASSIMP features are missing.
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
### Complete example: 3D viewer
|
||||
|
||||
`pyassimp` comes with a simple 3D viewer that shows how to load and display a 3D
|
||||
model using a shader-based OpenGL pipeline.
|
||||
|
||||
![Screenshot](3d_viewer_screenshot.png)
|
||||
|
||||
To use it, from within `/port/PyAssimp`:
|
||||
|
||||
```console
|
||||
$ cd scripts
|
||||
$ python ./3D-viewer <path to your model>
|
||||
```
|
||||
|
||||
You can use this code as starting point in your applications.
|
||||
|
||||
### Writing your own code
|
||||
|
||||
To get started with `pyassimp`, examine the simpler `sample.py` script in `scripts/`,
|
||||
which illustrates the basic usage. All Assimp data structures are wrapped using
|
||||
`ctypes`. All the data+length fields in Assimp's data structures (such as
|
||||
`aiMesh::mNumVertices`, `aiMesh::mVertices`) are replaced by simple python
|
||||
lists, so you can call `len()` on them to get their respective size and access
|
||||
members using `[]`.
|
||||
|
||||
For example, to load a file named `hello.3ds` and print the first
|
||||
vertex of the first mesh, you would do (proper error handling
|
||||
substituted by assertions ...):
|
||||
|
||||
```python
|
||||
|
||||
from pyassimp import *
|
||||
scene = load('hello.3ds')
|
||||
|
||||
assert len(scene.meshes)
|
||||
mesh = scene.meshes[0]
|
||||
|
||||
assert len(mesh.vertices)
|
||||
print(mesh.vertices[0])
|
||||
|
||||
# don't forget this one, or you will leak!
|
||||
release(scene)
|
||||
|
||||
```
|
||||
|
||||
Another example to list the 'top nodes' in a
|
||||
scene:
|
||||
|
||||
```python
|
||||
|
||||
from pyassimp import *
|
||||
scene = load('hello.3ds')
|
||||
|
||||
for c in scene.rootnode.children:
|
||||
print(str(c))
|
||||
|
||||
release(scene)
|
||||
|
||||
```
|
||||
|
||||
INSTALL
|
||||
-------
|
||||
|
||||
Install `pyassimp` by running:
|
||||
|
||||
```console
|
||||
$ python setup.py install
|
||||
```
|
||||
|
||||
PyAssimp requires a assimp dynamic library (`DLL` on windows,
|
||||
`.so` on linux, `.dynlib` on macOS) in order to work. The default search directories are:
|
||||
- the current directory
|
||||
- on linux additionally: `/usr/lib`, `/usr/local/lib`,
|
||||
`/usr/lib/x86_64-linux-gnu`
|
||||
|
||||
To build that library, refer to the Assimp master `INSTALL`
|
||||
instructions. To look in more places, edit `./pyassimp/helper.py`.
|
||||
There's an `additional_dirs` list waiting for your entries.
|
|
@ -468,7 +468,7 @@ class PyAssimp3DViewer:
|
|||
try:
|
||||
self.set_shaders_v130()
|
||||
self.prepare_shaders()
|
||||
except RuntimeError, message:
|
||||
except RuntimeError as message:
|
||||
sys.stderr.write("%s\n" % message)
|
||||
sys.stdout.write("Could not compile shaders in version 1.30, trying version 1.20\n")
|
||||
|
||||
|
|
Loading…
Reference in New Issue