pull/536/head
Kim Kulling 2015-04-14 12:04:47 +02:00
commit 56e8dc5a43
14 changed files with 17727 additions and 8097 deletions

View File

@ -15,12 +15,12 @@ env:
matrix: matrix:
- LINUX=1 TRAVIS_NO_EXPORT=YES - LINUX=1 TRAVIS_NO_EXPORT=YES
- LINUX=1 TRAVIS_NO_EXPORT=NO - LINUX=1 TRAVIS_NO_EXPORT=NO
- LINUX=1 TRAVIS_STATIC_BUILD=ON - LINUX=1 SHARED_BUILD=ON
- LINUX=1 TRAVIS_STATIC_BUILD=OFF - LINUX=1 SHARED_BUILD=OFF
- WINDOWS=1 TRAVIS_NO_EXPORT=YES - WINDOWS=1 TRAVIS_NO_EXPORT=YES
- WINDOWS=1 TRAVIS_NO_EXPORT=NO - WINDOWS=1 TRAVIS_NO_EXPORT=NO
- WINDOWS=1 TRAVIS_STATIC_BUILD=ON - WINDOWS=1 SHARED_BUILD=ON
- WINDOWS=1 TRAVIS_STATIC_BUILD=OFF - WINDOWS=1 SHARED_BUILD=OFF
- ANDROID=1 - ANDROID=1
language: cpp language: cpp
@ -35,10 +35,18 @@ install:
script: script:
- if [ $ANDROID ]; then - if [ $ANDROID ]; then
ant -v -Dmy.dir=${TRAVIS_BUILD_DIR} -f ${TRAVIS_BUILD_DIR}/port/jassimp/build.xml ndk-jni ; ant -v -Dmy.dir=${TRAVIS_BUILD_DIR} -f ${TRAVIS_BUILD_DIR}/port/jassimp/build.xml ndk-jni ;
elif [ $WINDOWS -a $CC = "gcc" ]; then
sudo sh -c "wget http://source.winehq.org/git/wine.git/commitdiff_plain/86781a6a524fa336f893ffd0a87373ffd306913c?hp=076edfe9d4b6cd39b6cf41b9f1d3e18688cc8673 -O - | patch -p 1 -d /usr/x86_64-w64-mingw32" ;
sudo sh -c "wget https://www.winehq.org/pipermail/wine-patches/2012-February/111438.html -O - | patch -p 1 -d /usr/x86_64-w64-mingw32" ;
cmake -G "Unix Makefiles" -DASSIMP_NO_EXPORT=$TRAVIS_NO_EXPORT -DBUILD_SHARED_LIBS=$SHARED_BUILD -DCMAKE_TOOLCHAIN_FILE=cmake-modules/MinGW_x86_64.cmake ;
cmake --build . ;
make install ;
elif [ $WINDOWS ]; then
echo "Skip compile with non-gcc setting." ;
elif [ $RESERVED ]; then elif [ $RESERVED ]; then
echo "Reserved condition" ; echo "Reserved condition" ;
else else
cmake -G "Unix Makefiles" -DASSIMP_ENABLE_BOOST_WORKAROUND=YES -DASSIMP_NO_EXPORT=$TRAVIS_NO_EXPORT -STATIC_BUILD=$TRAVIS_STATIC_BUILD ; cmake -G "Unix Makefiles" -DASSIMP_ENABLE_BOOST_WORKAROUND=YES -DASSIMP_NO_EXPORT=$TRAVIS_NO_EXPORT -DBUILD_SHARED_LIBS=$SHARED_BUILD ;
make ; make ;
sudo make install ; sudo make install ;
sudo ldconfig ; sudo ldconfig ;

View File

@ -1,6 +1,12 @@
cmake_minimum_required( VERSION 2.8 ) cmake_minimum_required( VERSION 2.8 )
PROJECT( Assimp ) PROJECT( Assimp )
option(BUILD_SHARED_LIBS "Build package with shared libraries." ON)
if(NOT BUILD_SHARED_LIBS)
#set(CMAKE_EXE_LINKER_FLAGS "-static")
set(LINK_SEARCH_START_STATIC TRUE)
endif(NOT BUILD_SHARED_LIBS)
# Define here the needed parameters # Define here the needed parameters
set (ASSIMP_VERSION_MAJOR 3) set (ASSIMP_VERSION_MAJOR 3)
set (ASSIMP_VERSION_MINOR 1) set (ASSIMP_VERSION_MINOR 1)
@ -88,9 +94,6 @@ SET( ASSIMP_BIN_INSTALL_DIR "bin" CACHE PATH
SET(CMAKE_DEBUG_POSTFIX "d" CACHE STRING "Debug Postfitx for lib, samples and tools") SET(CMAKE_DEBUG_POSTFIX "d" CACHE STRING "Debug Postfitx for lib, samples and tools")
# Allow the user to build a shared or static library
option ( BUILD_SHARED_LIBS "Build a shared version of the library" ON )
# Only generate this target if no higher-level project already has # Only generate this target if no higher-level project already has
IF (NOT TARGET uninstall) IF (NOT TARGET uninstall)
# add make uninstall capability # add make uninstall capability

View File

@ -0,0 +1,16 @@
# this one sets internal to crosscompile (in theory)
SET(CMAKE_SYSTEM_NAME Windows)
# the minimalistic settings
SET(CMAKE_C_COMPILER "/usr/bin/x86_64-w64-mingw32-gcc")
SET(CMAKE_CXX_COMPILER "/usr/bin/x86_64-w64-mingw32-g++")
SET(CMAKE_RC_COMPILER "/usr/bin/x86_64-w64-mingw32-windres")
# where is the target (so called staging) environment
SET(CMAKE_FIND_ROOT_PATH /usr/x86_64-w64-mingw32)
# search for programs in the build host directories (default BOTH)
#SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
# for libraries and headers in the target directories
SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)

View File

@ -148,13 +148,17 @@ void COBImporter::InternReadFile( const std::string& pFile,
} }
DefaultLogger::get()->info("File format tag: "+std::string(head+9,6)); DefaultLogger::get()->info("File format tag: "+std::string(head+9,6));
void (COBImporter::* load)(Scene&,StreamReaderLE*)= head[15]=='A'?&COBImporter::ReadAsciiFile:&COBImporter::ReadBinaryFile;
if (head[16]!='L') { if (head[16]!='L') {
ThrowException("File is big-endian, which is not supported"); ThrowException("File is big-endian, which is not supported");
} }
// load data into intermediate structures // load data into intermediate structures
(this->*load)(scene,stream.get()); if (head[15]=='A') {
ReadAsciiFile(scene, stream.get());
}
else {
ReadBinaryFile(scene, stream.get());
}
if(scene.nodes.empty()) { if(scene.nodes.empty()) {
ThrowException("No nodes loaded"); ThrowException("No nodes loaded");
} }

View File

@ -522,43 +522,23 @@ IfcMatrix3 DerivePlaneCoordinateSpace(const TempMesh& curmesh, bool& ok, IfcVect
return m; return m;
} }
// Extrudes the given polygon along the direction, converts it into an opening or applies all openings as necessary.
// ------------------------------------------------------------------------------------------------ void ProcessExtrudedArea(const IfcExtrudedAreaSolid& solid, const TempMesh& curve,
void ProcessExtrudedAreaSolid(const IfcExtrudedAreaSolid& solid, TempMesh& result, const IfcVector3& extrusionDir, TempMesh& result, ConversionData &conv, bool collect_openings)
ConversionData& conv, bool collect_openings)
{ {
TempMesh meshout; // Outline: 'curve' is now a list of vertex points forming the underlying profile, extrude along the given axis,
// forming new triangles.
// First read the profile description const bool has_area = solid.SweptArea->ProfileType == "AREA" && curve.verts.size() > 2;
if(!ProcessProfile(*solid.SweptArea,meshout,conv) || meshout.verts.size()<=1) { if( solid.Depth < 1e-6 ) {
return; if( has_area ) {
} result.Append(curve);
IfcVector3 dir;
ConvertDirection(dir,solid.ExtrudedDirection);
dir *= solid.Depth; /*
if(conv.collect_openings && !conv.apply_openings) {
dir *= 1000.0;
} */
// Outline: assuming that `meshout.verts` is now a list of vertex points forming
// the underlying profile, extrude along the given axis, forming new
// triangles.
std::vector<IfcVector3>& in = meshout.verts;
const size_t size=in.size();
const bool has_area = solid.SweptArea->ProfileType == "AREA" && size>2;
if(solid.Depth < 1e-6) {
if(has_area) {
result = meshout;
} }
return; return;
} }
result.verts.reserve(size*(has_area?4:2)); result.verts.reserve(curve.verts.size()*(has_area ? 4 : 2));
result.vertcnt.reserve(meshout.vertcnt.size()+2); result.vertcnt.reserve(curve.verts.size() + 2);
std::vector<IfcVector3> in = curve.verts;
// First step: transform all vertices into the target coordinate space // First step: transform all vertices into the target coordinate space
IfcMatrix4 trafo; IfcMatrix4 trafo;
@ -566,7 +546,7 @@ void ProcessExtrudedAreaSolid(const IfcExtrudedAreaSolid& solid, TempMesh& resul
IfcVector3 vmin, vmax; IfcVector3 vmin, vmax;
MinMaxChooser<IfcVector3>()(vmin, vmax); MinMaxChooser<IfcVector3>()(vmin, vmax);
BOOST_FOREACH(IfcVector3& v,in) { BOOST_FOREACH(IfcVector3& v, in) {
v *= trafo; v *= trafo;
vmin = std::min(vmin, v); vmin = std::min(vmin, v);
@ -575,93 +555,91 @@ void ProcessExtrudedAreaSolid(const IfcExtrudedAreaSolid& solid, TempMesh& resul
vmax -= vmin; vmax -= vmin;
const IfcFloat diag = vmax.Length(); const IfcFloat diag = vmax.Length();
IfcVector3 dir = IfcMatrix3(trafo) * extrusionDir;
IfcVector3 min = in[0];
dir *= IfcMatrix3(trafo);
// reverse profile polygon if it's winded in the wrong direction in relation to the extrusion direction // reverse profile polygon if it's winded in the wrong direction in relation to the extrusion direction
IfcVector3 profileNormal = TempMesh::ComputePolygonNormal( in.data(), in.size()); IfcVector3 profileNormal = TempMesh::ComputePolygonNormal(in.data(), in.size());
if( profileNormal * dir < 0.0 ) if( profileNormal * dir < 0.0 )
std::reverse( in.begin(), in.end()); std::reverse(in.begin(), in.end());
std::vector<IfcVector3> nors; std::vector<IfcVector3> nors;
const bool openings = !!conv.apply_openings && conv.apply_openings->size(); const bool openings = !!conv.apply_openings && conv.apply_openings->size();
// Compute the normal vectors for all opening polygons as a prerequisite // Compute the normal vectors for all opening polygons as a prerequisite
// to TryAddOpenings_Poly2Tri() // to TryAddOpenings_Poly2Tri()
// XXX this belongs into the aforementioned function // XXX this belongs into the aforementioned function
if (openings) { if( openings ) {
if (!conv.settings.useCustomTriangulation) { if( !conv.settings.useCustomTriangulation ) {
// it is essential to apply the openings in the correct spatial order. The direction // it is essential to apply the openings in the correct spatial order. The direction
// doesn't matter, but we would screw up if we started with e.g. a door in between // doesn't matter, but we would screw up if we started with e.g. a door in between
// two windows. // two windows.
std::sort(conv.apply_openings->begin(),conv.apply_openings->end(), std::sort(conv.apply_openings->begin(), conv.apply_openings->end(), TempOpening::DistanceSorter(in[0]));
TempOpening::DistanceSorter(min));
} }
nors.reserve(conv.apply_openings->size()); nors.reserve(conv.apply_openings->size());
BOOST_FOREACH(TempOpening& t,*conv.apply_openings) { BOOST_FOREACH(TempOpening& t, *conv.apply_openings) {
TempMesh& bounds = *t.profileMesh.get(); TempMesh& bounds = *t.profileMesh.get();
if (bounds.verts.size() <= 2) { if( bounds.verts.size() <= 2 ) {
nors.push_back(IfcVector3()); nors.push_back(IfcVector3());
continue; continue;
} }
nors.push_back(((bounds.verts[2]-bounds.verts[0])^(bounds.verts[1]-bounds.verts[0]) ).Normalize()); nors.push_back(((bounds.verts[2] - bounds.verts[0]) ^ (bounds.verts[1] - bounds.verts[0])).Normalize());
} }
} }
TempMesh temp; TempMesh temp;
TempMesh& curmesh = openings ? temp : result; TempMesh& curmesh = openings ? temp : result;
std::vector<IfcVector3>& out = curmesh.verts; std::vector<IfcVector3>& out = curmesh.verts;
size_t sides_with_openings = 0; size_t sides_with_openings = 0;
for(size_t i = 0; i < size; ++i) { for( size_t i = 0; i < in.size(); ++i ) {
const size_t next = (i+1)%size; const size_t next = (i + 1) % in.size();
curmesh.vertcnt.push_back(4); curmesh.vertcnt.push_back(4);
out.push_back(in[i]); out.push_back(in[i]);
out.push_back(in[next]); out.push_back(in[next]);
out.push_back(in[next]+dir); out.push_back(in[next] + dir);
out.push_back(in[i]+dir); out.push_back(in[i] + dir);
if(openings) { if( openings ) {
if((in[i]-in[next]).Length() > diag * 0.1 && GenerateOpenings(*conv.apply_openings,nors,temp,true, true, dir)) { if( (in[i] - in[next]).Length() > diag * 0.1 && GenerateOpenings(*conv.apply_openings, nors, temp, true, true, dir) ) {
++sides_with_openings; ++sides_with_openings;
} }
result.Append(temp); result.Append(temp);
temp.Clear(); temp.Clear();
} }
} }
if(openings) { if( openings ) {
BOOST_FOREACH(TempOpening& opening, *conv.apply_openings) { BOOST_FOREACH(TempOpening& opening, *conv.apply_openings) {
if (!opening.wallPoints.empty()) { if( !opening.wallPoints.empty() ) {
IFCImporter::LogError("failed to generate all window caps"); IFCImporter::LogError("failed to generate all window caps");
} }
opening.wallPoints.clear(); opening.wallPoints.clear();
} }
} }
size_t sides_with_v_openings = 0;
if(has_area) {
for(size_t n = 0; n < 2; ++n) { size_t sides_with_v_openings = 0;
if( has_area ) {
for( size_t n = 0; n < 2; ++n ) {
if( n > 0 ) { if( n > 0 ) {
for(size_t i = 0; i < size; ++i ) for( size_t i = 0; i < in.size(); ++i )
out.push_back(in[i]+dir); out.push_back(in[i] + dir);
} else { }
for(size_t i = size; i--; ) else {
for( size_t i = in.size(); i--; )
out.push_back(in[i]); out.push_back(in[i]);
} }
curmesh.vertcnt.push_back(size); curmesh.vertcnt.push_back(in.size());
if(openings && size > 2) { if( openings && in.size() > 2 ) {
if(GenerateOpenings(*conv.apply_openings,nors,temp,true, true, dir)) { if( GenerateOpenings(*conv.apply_openings, nors, temp, true, true, dir) ) {
++sides_with_v_openings; ++sides_with_v_openings;
} }
@ -671,7 +649,7 @@ void ProcessExtrudedAreaSolid(const IfcExtrudedAreaSolid& solid, TempMesh& resul
} }
} }
if(openings && ((sides_with_openings == 1 && sides_with_openings) || (sides_with_v_openings == 2 && sides_with_v_openings))) { if( openings && ((sides_with_openings == 1 && sides_with_openings) || (sides_with_v_openings == 2 && sides_with_v_openings)) ) {
IFCImporter::LogWarn("failed to resolve all openings, presumably their topology is not supported by Assimp"); IFCImporter::LogWarn("failed to resolve all openings, presumably their topology is not supported by Assimp");
} }
@ -679,17 +657,58 @@ void ProcessExtrudedAreaSolid(const IfcExtrudedAreaSolid& solid, TempMesh& resul
// If this is an opening element, store both the extruded mesh and the 2D profile mesh // If this is an opening element, store both the extruded mesh and the 2D profile mesh
// it was created from. Return an empty mesh to the caller. // it was created from. Return an empty mesh to the caller.
if(collect_openings && !result.IsEmpty()) { if( collect_openings && !result.IsEmpty() ) {
ai_assert(conv.collect_openings); ai_assert(conv.collect_openings);
boost::shared_ptr<TempMesh> profile = boost::shared_ptr<TempMesh>(new TempMesh()); boost::shared_ptr<TempMesh> profile = boost::shared_ptr<TempMesh>(new TempMesh());
profile->Swap(result); profile->Swap(result);
boost::shared_ptr<TempMesh> profile2D = boost::shared_ptr<TempMesh>(new TempMesh()); boost::shared_ptr<TempMesh> profile2D = boost::shared_ptr<TempMesh>(new TempMesh());
profile2D->Swap(meshout); profile2D->verts.insert(profile2D->verts.end(), in.begin(), in.end());
conv.collect_openings->push_back(TempOpening(&solid,dir,profile, profile2D)); profile2D->vertcnt.push_back(in.size());
conv.collect_openings->push_back(TempOpening(&solid, dir, profile, profile2D));
ai_assert(result.IsEmpty()); ai_assert(result.IsEmpty());
} }
}
// ------------------------------------------------------------------------------------------------
void ProcessExtrudedAreaSolid(const IfcExtrudedAreaSolid& solid, TempMesh& result,
ConversionData& conv, bool collect_openings)
{
TempMesh meshout;
// First read the profile description.
if(!ProcessProfile(*solid.SweptArea,meshout,conv) || meshout.verts.size()<=1) {
return;
}
IfcVector3 dir;
ConvertDirection(dir,solid.ExtrudedDirection);
dir *= solid.Depth;
// Some profiles bring their own holes, for which we need to provide a container. This all is somewhat backwards,
// and there's still so many corner cases uncovered - we really need a generic solution to all of this hole carving.
std::vector<TempOpening> fisherPriceMyFirstOpenings;
std::vector<TempOpening>* oldApplyOpenings = conv.apply_openings;
if( const IfcArbitraryProfileDefWithVoids* const cprofile = solid.SweptArea->ToPtr<IfcArbitraryProfileDefWithVoids>() ) {
if( !cprofile->InnerCurves.empty() ) {
// read all inner curves and extrude them to form proper openings.
std::vector<TempOpening>* oldCollectOpenings = conv.collect_openings;
conv.collect_openings = &fisherPriceMyFirstOpenings;
BOOST_FOREACH(const IfcCurve* curve, cprofile->InnerCurves) {
TempMesh curveMesh, tempMesh;
ProcessCurve(*curve, curveMesh, conv);
ProcessExtrudedArea(solid, curveMesh, dir, tempMesh, conv, true);
}
// and then apply those to the geometry we're about to generate
conv.apply_openings = conv.collect_openings;
conv.collect_openings = oldCollectOpenings;
}
}
ProcessExtrudedArea(solid, meshout, dir, result, conv, collect_openings);
conv.apply_openings = oldApplyOpenings;
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -784,7 +803,7 @@ bool ProcessGeometricItem(const IfcRepresentationItem& geo, unsigned int matid,
meshtmp->RemoveDegenerates(); meshtmp->RemoveDegenerates();
if(fix_orientation) { if(fix_orientation) {
meshtmp->FixupFaceOrientation(); // meshtmp->FixupFaceOrientation();
} }
aiMesh* const mesh = meshtmp->ToMesh(); aiMesh* const mesh = meshtmp->ToMesh();

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -271,6 +271,7 @@ IfcFloat ConvertSIPrefix(const std::string& prefix);
// IFCProfile.cpp // IFCProfile.cpp
bool ProcessProfile(const IfcProfileDef& prof, TempMesh& meshout, ConversionData& conv); bool ProcessProfile(const IfcProfileDef& prof, TempMesh& meshout, ConversionData& conv);
bool ProcessCurve(const IfcCurve& curve, TempMesh& meshout, ConversionData& conv);
// IFCMaterial.cpp // IFCMaterial.cpp
unsigned int ProcessMaterials(uint64_t id, unsigned int prevMatId, ConversionData& conv, bool forceDefaultMat); unsigned int ProcessMaterials(uint64_t id, unsigned int prevMatId, ConversionData& conv, bool forceDefaultMat);

View File

@ -1023,7 +1023,9 @@ void SceneCombiner::CopyScene(aiScene** _dest,const aiScene* src,bool allocate)
dest->mFlags = src->mFlags; dest->mFlags = src->mFlags;
// source private data might be NULL if the scene is user-allocated (i.e. for use with the export API) // source private data might be NULL if the scene is user-allocated (i.e. for use with the export API)
ScenePriv(dest)->mPPStepsApplied = ScenePriv(src) ? ScenePriv(src)->mPPStepsApplied : 0; if (dest->mPrivate != NULL) {
ScenePriv(dest)->mPPStepsApplied = ScenePriv(src) ? ScenePriv(src)->mPPStepsApplied : 0;
}
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------

View File

@ -44,6 +44,7 @@
entities and data types contained""" entities and data types contained"""
import sys, os, re import sys, os, re
from collections import OrderedDict
re_match_entity = re.compile(r""" re_match_entity = re.compile(r"""
ENTITY\s+(\w+)\s* # 'ENTITY foo' ENTITY\s+(\w+)\s* # 'ENTITY foo'
@ -68,8 +69,8 @@ re_match_field = re.compile(r"""
class Schema: class Schema:
def __init__(self): def __init__(self):
self.entities = {} self.entities = OrderedDict()
self.types = {} self.types = OrderedDict()
class Entity: class Entity:
def __init__(self,name,parent,members): def __init__(self,name,parent,members):

View File

@ -1,16 +1,17 @@
# ============================================================================== # ==============================================================================
# List of IFC structures needed by Assimp # List of IFC structures needed by Assimp
# ============================================================================== # ==============================================================================
# use genentitylist.sh to update this list # use genentitylist.sh to update this list
# This machine-generated list is not complete, it lacks many intermediate # This machine-generated list is not complete, it lacks many intermediate
# classes in the inheritance hierarchy. Those are magically augmented by the # classes in the inheritance hierarchy. Those are magically augmented by the
# code generator. Also, the names of all used entities need to be present # code generator. Also, the names of all used entities need to be present
# in the source code for this to work. # in the source code for this to work.
IfcAnnotation IfcAnnotation
IfcArbitraryClosedProfileDef IfcArbitraryClosedProfileDef
IfcArbitraryOpenProfileDef IfcArbitraryOpenProfileDef
IfcArbitraryProfileDefWithVoids
IfcAxis1Placement IfcAxis1Placement
IfcAxis2Placement IfcAxis2Placement
IfcAxis2Placement2D IfcAxis2Placement2D

File diff suppressed because it is too large Load Diff

View File

@ -3,83 +3,81 @@ Assimp Regression Test Suite
1) How does it work? 1) How does it work?
--------------------------------------------------------------------------------- ---------------------------------------------------------------------------------
run.py checks all model in the <root>/test/models folder and compares the result run.py checks all model in the <root>/test/models* folders and compares the result
against a regression database provided by us (db.zip). If the test passes against a regression database provided with assimp (db.zip). A few failures
successfully, Assimp definitely WORKS perfectly on your system. A few failures are totally fine (see sections 7+). You need to worry if a huge
are totally fine as well (see sections 7+). You need to worry if a huge majority of all files in a particular format (or post-processing configuration)
majority of all files in a particular format or post-processing configuration fails as this might be a sign of a recent regression in assimp's codebase or
fails - this might be a sign of a recent regression in assimp's codebase. gross incompatibility with your system or compiler.
2) What do I need? 2) What do I need?
--------------------------------------------------------------------------------- ---------------------------------------------------------------------------------
- you need Python installed - 3.x !! - You need Python installed (2.7+, 3.x). On Windows, run the scripts using "py".
- you need to build tools/assimp_cmd as described in the INSTALL file ( - You need to build the assimp command line tool (ASSIMP_BUILD_ASSIMP_TOOLS
make && make install on unixes,release-dll target with msvc). CMake build flag). Both run.py and gen_db.py take the full path to the binary
as first command line parameter.
3) How to add more test files? 3) How to add more test files?
--------------------------------------------------------------------------------- ---------------------------------------------------------------------------------
Use the following procedure: Use the following procedure:
- verify the correctness of your assimp build - run the regression suite. - Verify the correctness of your assimp build - run the regression suite.
DO NOT continue if one or more tests fail. DO NOT continue if more tests fail than usual.
- add your additional test files to <root>/test/models/<fileformat>, where - Add your additional test files to <root>/test/models/<fileformat>, where
<fileformat> is the file type (typically the file extension) <fileformat> is the file type (typically the file extension).
- rebuild the regression database using gen_db.py - If you test file does not meet the BSD license requirements, add it to
- run the regression suite again - all tests should pass, including <root>/test/models-nonbsd/<fileformat> so people know to be careful with it.
those for the new files. - Rebuild the regression database:
"gen_db.py <binary> -ixyz" where .xyz is the file extension of the new file.
- Run the regression suite again. There should be no new failures and the new
file should not be among the failures.
- Include the db.zip file with your Pull Request. Travis CI enforces a passing
regression suite (with offenders whitelisted as a last resort).
- contributors: commit the db.zip plus your additional test files to 4) I made a change/fix/patch to a loader, how to update the database?
the SVN repository.
4) I made a change/fix/.. to a loader, how to update the database?
--------------------------------------------------------------------------------- ---------------------------------------------------------------------------------
- rebuild the regression database using gen_db.py - Rebuild the regression database using "gen_db.py <binary> -ixyz"
- run the suite - all tests should pass now. If they do not, don't continue where .xyz is the file extension for which the loader was patched.
- Run the regression suite again. There should be no new failures and the new
- contributors: commit the db.zip to the SVN repository file should not be among the failures.
- Include the db.zip file with your Pull Request. Travis CI enforces a passing
regression suite (with offenders whitelisted as a last resort).
5) How to add my whole model repository to the suite? 5) How to add my whole model repository to the database?
--------------------------------------------------------------------------------- ---------------------------------------------------------------------------------
Edit the reg_settings.py file and add the path to your repository to Edit the reg_settings.py file and add the path to your repository to
<<model_directories>>. Then, rebuild the suite. <<model_directories>>. Then, rebuild the database.
6) So what is actually verified? 6) So what is actually tested?
--------------------------------------------------------------------------------- ---------------------------------------------------------------------------------
The regression database includes mini dumps of the aiScene data structure - The regression database includes mini dumps of the aiScene data structure, i.e.
the scene hierarchy plus the sizes of all data arrays MUST match. Floating-point the scene hierarchy plus the sizes of all data arrays MUST match. Floating-point
data buffers, such as vertex positions, are handled less strictly: min,max and data buffers, such as vertex positions are handled less strictly: min, max and
average values are stored with lower precision. This takes hardware- or average values are stored with low precision. This takes hardware- or
compiler-specific differences in floating-point computations into account. compiler-specific differences in floating-point computations into account.
Generally, almost all significant regressions will be detected while the Generally, almost all significant regressions will be detected while the
number of false positives is relatively low. number of false positives is relatively low.
7) The test suite fails, what do do? 7) The test suite fails, what do do?
--------------------------------------------------------------------------------- ---------------------------------------------------------------------------------
Get back to ../results and check out regression_suite_failures.txt Get back to <root>/test/results and look at regression_suite_failures.txt.
It contains a list of all files which failed the test ... they're copied to It contains a list of all files which failed the test. Failing dumps are copied to
../results/tmp. Both an EXPECTED and an ACTUAL file is produced per test. root>/test/results/tmp. Both an EXPECTED and an ACTUAL file is produced per test.
The output of `assimp cmpdump` is written to regressions_suite_output.txt. The output of "assimp cmpdump" is written to regressions_suite_output.txt. Grep
To quickly find all all reports pertaining to tests which failed, I'd for the file name in question and locate the log for the failed comparison. It
recommend grepping for 'but' because its a constituent of all error messages contains a full trace of which scene elements have been compared before, which
produced by assimp_cmd :) Error reports contain detailed information makes it reasonably easy to locate the offending field.
regarding the point of failure inside the data structure, the values of
the two corresponding fields that were found to be different from EXPECTED
and ACTUAL, respectively, ... this should offer you enough information to start
debugging.
8) fp:fast vs fp:precise fails the test suite (same for gcc equivalents) 8) fp:fast vs fp:precise fails the test suite (same for gcc equivalents)
--------------------------------------------------------------------------------- ---------------------------------------------------------------------------------
As mentioned above, floating-point inaccuracies between differently optimized As mentioned above, floating-point inaccuracies between differently optimized
builds are not considered regressions and all float comparisons done by the test builds are not considered regressions and all float comparisons done by the test
suite involve an epsilon. Changing floating-point optimizations can, however, suite involve an epsilon to accomodate. However compiler settings that allow
lead to *real* changes in the output data structure, such as different number compilers to perform non-IEEE754 compliant optimizations can cause arbitrary
of vertices or faces, ... this violates one of our primary targets, that is failures in the test suite. Even if the compiler is configured to be IEE754
produce reliable and portable output. We're working hard on removing these comformant, there is lots of code in assimp that leaves the compiler a choice
issues, but at the moment you have to live with few of them. and different compilers make different choices (for example the precision of
float intermediaries is implementation-specified).
Currently, the regression database is build on Windows using MSVC8 with
fp:precise. This leads to a small number of failures with fp:fast and
virtally every build with gcc. Be patient, it will be fixed.

View File

@ -69,6 +69,9 @@ configs for an IDE, make sure to build the assimp_cmd project.
-i,--include: List of file extensions to update dumps for. If omitted, -i,--include: List of file extensions to update dumps for. If omitted,
all file extensions are updated except those in `exclude`. all file extensions are updated except those in `exclude`.
Example: -ixyz,abc
-i.xyz,.abc
--include=xyz,abc
-e,--exclude: Merged with settings.exclude_extensions to produce a -e,--exclude: Merged with settings.exclude_extensions to produce a
list of all file extensions to ignore. If dumps exist, list of all file extensions to ignore. If dumps exist,
@ -78,8 +81,6 @@ configs for an IDE, make sure to build the assimp_cmd project.
Dont' change anything. Dont' change anything.
-n,--nozip: Don't pack to ZIP archive. Keep all dumps in individual files. -n,--nozip: Don't pack to ZIP archive. Keep all dumps in individual files.
(lists of file extensions are comma delimited, i.e. `3ds,lwo,x`)
""" """
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
@ -172,30 +173,32 @@ def gen_db(ext_list,outfile):
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
if __name__ == "__main__": if __name__ == "__main__":
assimp_bin_path = sys.argv[1] if len(sys.argv) > 1 else 'assimp'
def clean(f): def clean(f):
f = f.strip("* \'") f = f.strip("* \'")
return "."+f if f[:1] != '.' else f return "."+f if f[:1] != '.' else f
if len(sys.argv)>1 and (sys.argv[1] == "--help" or sys.argv[1] == "-h"): if len(sys.argv) <= 1 or sys.argv[1] == "--help" or sys.argv[1] == "-h":
print(usage) print(usage)
sys.exit(0) sys.exit(0)
assimp_bin_path = sys.argv[1]
ext_list, preview, nozip = None, False, False ext_list, preview, nozip = None, False, False
for m in sys.argv[1:]: for m in sys.argv[2:]:
if m[:10]=="--exclude=": if m[:10]=="--exclude=":
settings.exclude_extensions += map(clean, m[10:].split(",")) settings.exclude_extensions += map(clean, m[10:].split(","))
elif m[:3]=="-e=": elif m[:2]=="-e":
settings.exclude_extensions += map(clean, m[3:].split(",")) settings.exclude_extensions += map(clean, m[2:].split(","))
elif m[:10]=="--include=": elif m[:10]=="--include=":
ext_list = m[10:].split(",") ext_list = m[10:].split(",")
elif m[:3]=="-i=": elif m[:2]=="-i":
ext_list = m[3:].split(",") ext_list = m[2:].split(",")
elif m=="-p" or m == "--preview": elif m=="-p" or m == "--preview":
preview = True preview = True
elif m=="-n" or m == "--nozip": elif m=="-n" or m == "--nozip":
nozip = True nozip = True
else:
print("Unrecognized parameter: " + m)
sys.exit(-1)
outfile = open(os.path.join("..", "results", "gen_regression_db_output.txt"), "w") outfile = open(os.path.join("..", "results", "gen_regression_db_output.txt"), "w")
if ext_list is None: if ext_list is None:
@ -206,9 +209,8 @@ if __name__ == "__main__":
# todo: Fix for multi dot extensions like .skeleton.xml # todo: Fix for multi dot extensions like .skeleton.xml
ext_list = list(filter(lambda f: not f in settings.exclude_extensions, ext_list = list(filter(lambda f: not f in settings.exclude_extensions,
map(clean, ext_list))) map(clean, ext_list)))
print('File extensions processed: ' + ', '.join(ext_list))
if preview: if preview:
print(','.join(ext_list))
sys.exit(1) sys.exit(1)
extract_zip() extract_zip()