pull/502/head
ulf 2015-03-16 11:35:33 +01:00
commit fdad4b51a1
12 changed files with 7638 additions and 162 deletions

View File

@ -188,8 +188,8 @@ Discreet3DSExporter:: Discreet3DSExporter(boost::shared_ptr<IOStream> outfile, c
{ {
ChunkWriter chunk(writer, Discreet3DS::CHUNK_OBJMESH); ChunkWriter chunk(writer, Discreet3DS::CHUNK_OBJMESH);
WriteMeshes();
WriteMaterials(); WriteMaterials();
WriteMeshes();
{ {
ChunkWriter chunk(writer, Discreet3DS::CHUNK_MASTER_SCALE); ChunkWriter chunk(writer, Discreet3DS::CHUNK_MASTER_SCALE);

View File

@ -118,6 +118,16 @@ namespace Blender {
#ifdef _MSC_VER #ifdef _MSC_VER
# pragma warning(disable:4351) # pragma warning(disable:4351)
#endif #endif
struct ObjectCompare {
bool operator() (const Object* left, const Object* right) const {
return strcmp(left->id.name, right->id.name) == -1;
}
};
// When keeping objects in sets, sort them by their name.
typedef std::set<const Object*, ObjectCompare> ObjectSet;
// -------------------------------------------------------------------- // --------------------------------------------------------------------
/** ConversionData acts as intermediate storage location for /** ConversionData acts as intermediate storage location for
* the various ConvertXXX routines in BlenderImporter.*/ * the various ConvertXXX routines in BlenderImporter.*/
@ -130,7 +140,13 @@ namespace Blender {
, db(db) , db(db)
{} {}
std::set<const Object*> objects; struct ObjectCompare {
bool operator() (const Object* left, const Object* right) const {
return strcmp(left->id.name, right->id.name) == -1;
}
};
ObjectSet objects;
TempArray <std::vector, aiMesh> meshes; TempArray <std::vector, aiMesh> meshes;
TempArray <std::vector, aiCamera> cameras; TempArray <std::vector, aiCamera> cameras;

View File

@ -559,24 +559,26 @@ void BlenderImporter::BuildMaterials(ConversionData& conv_data)
if (mesh->mMaterialIndex == static_cast<unsigned int>( -1 )) { if (mesh->mMaterialIndex == static_cast<unsigned int>( -1 )) {
if (index == static_cast<unsigned int>( -1 )) { if (index == static_cast<unsigned int>( -1 )) {
// Setup a default material.
// ok, we need to add a dedicated default material for some poor material-less meshes
boost::shared_ptr<Material> p(new Material()); boost::shared_ptr<Material> p(new Material());
ai_assert(::strlen(AI_DEFAULT_MATERIAL_NAME) < sizeof(p->id.name)-2);
strcpy( p->id.name+2, AI_DEFAULT_MATERIAL_NAME ); strcpy( p->id.name+2, AI_DEFAULT_MATERIAL_NAME );
// Note: MSVC11 does not zero-initialize Material here, although it should.
// Thus all relevant fields should be explicitly initialized. We cannot add
// a default constructor to Material since the DNA codegen does not support
// parsing it.
p->r = p->g = p->b = 0.6f; p->r = p->g = p->b = 0.6f;
p->specr = p->specg = p->specb = 0.6f; p->specr = p->specg = p->specb = 0.6f;
p->ambr = p->ambg = p->ambb = 0.0f; p->ambr = p->ambg = p->ambb = 0.0f;
p->mirr = p->mirg = p->mirb = 0.0f; p->mirr = p->mirg = p->mirb = 0.0f;
p->emit = 0.f; p->emit = 0.f;
p->alpha = 0.f; p->alpha = 0.f;
p->har = 0;
// XXX add more / or add default c'tor to Material
index = static_cast<unsigned int>( conv_data.materials_raw.size() ); index = static_cast<unsigned int>( conv_data.materials_raw.size() );
conv_data.materials_raw.push_back(p); conv_data.materials_raw.push_back(p);
LogInfo("Adding default material");
LogInfo("Adding default material ...");
} }
mesh->mMaterialIndex = index; mesh->mMaterialIndex = index;
} }
@ -591,6 +593,7 @@ void BlenderImporter::BuildMaterials(ConversionData& conv_data)
aiMaterial* mout = new aiMaterial(); aiMaterial* mout = new aiMaterial();
conv_data.materials->push_back(mout); conv_data.materials->push_back(mout);
// For any new material field handled here, the default material above must be updated with an appropriate default value.
// set material name // set material name
aiString name = aiString(mat->id.name+2); // skip over the name prefix 'MA' aiString name = aiString(mat->id.name+2); // skip over the name prefix 'MA'
@ -1044,7 +1047,7 @@ aiLight* BlenderImporter::ConvertLight(const Scene& /*in*/, const Object* obj, c
aiNode* BlenderImporter::ConvertNode(const Scene& in, const Object* obj, ConversionData& conv_data, const aiMatrix4x4& parentTransform) aiNode* BlenderImporter::ConvertNode(const Scene& in, const Object* obj, ConversionData& conv_data, const aiMatrix4x4& parentTransform)
{ {
std::deque<const Object*> children; std::deque<const Object*> children;
for(std::set<const Object*>::iterator it = conv_data.objects.begin(); it != conv_data.objects.end() ;) { for(ObjectSet::iterator it = conv_data.objects.begin(); it != conv_data.objects.end() ;) {
const Object* object = *it; const Object* object = *it;
if (object->parent == obj) { if (object->parent == obj) {
children.push_back(object); children.push_back(object);

View File

@ -1982,9 +1982,11 @@ void ColladaParser::ReadIndexData( Mesh* pMesh)
} }
} }
// small sanity check #ifdef ASSIMP_BUILD_DEBUG
if (primType != Prim_TriFans && primType != Prim_TriStrips) if (primType != Prim_TriFans && primType != Prim_TriStrips) {
ai_assert(actualPrimitives == numPrimitives); ai_assert(actualPrimitives == numPrimitives);
}
#endif
// only when we're done reading all <p> tags (and thus know the final vertex count) can we commit the submesh // only when we're done reading all <p> tags (and thus know the final vertex count) can we commit the submesh
subgroup.mNumFaces = actualPrimitives; subgroup.mNumFaces = actualPrimitives;

View File

@ -1,33 +0,0 @@
rem ------------------------------------------------------------------------------
rem Tiny script to execute a single unit test suite.
rem
rem Usage:
rem SET OUTDIR=<directory_for_test_results>
rem SET BINDIR=<directory_where_binaries_are_stored>
rem
rem CALL RunSingleUnitTestSuite <name_of_test> <output_file>
rem
rem Post:
rem FIRSTUTNA - if the test wasn't found, receives the test name
rem FIRSTUTFAILUR - if the test failed, receives the test name
rem
rem ------------------------------------------------------------------------------
IF NOT EXIST %BINDIR%\%1\unit.exe (
echo NOT AVAILABLE. Please rebuild this configuration
echo Unable to find %BINDIR%\%1\unit.exe > %OUTDIR%%2
SET FIRSTUTNA=%2
) ELSE (
%BINDIR%\%1\unit.exe > %OUTDIR%%2
IF errorlevel == 0 (
echo SUCCESS
) ELSE (
echo FAILURE, check output file: %2
SET FIRSTUTFAILURE=%2
)
)
echo.
echo.

View File

@ -1,94 +0,0 @@
rem ------------------------------------------------------------------------------
rem Tiny script to execute Assimp's fully unit test suite for all configurations
rem
rem Usage: call RunUnitTestSuite
rem ------------------------------------------------------------------------------
rem Setup the console environment
set errorlevel=0
color 4e
cls
@echo off
rem Setup target architecture
SET ARCHEXT=x64
IF %PROCESSOR_ARCHITECTURE% == x86 (
SET ARCHEXT=win32
)
rem Setup standard paths from here
SET OUTDIR=results\
SET BINDIR=..\bin\
SET FIRSTUTFAILURE=nil
SET FIRSTUTNA=nil
echo #=====================================================================
echo # Open Asset Import Library - Unittests
echo #=====================================================================
echo #
echo # Executes the Assimp library unit test suite for the following
echo # build configurations (if available):
echo #
echo # Release
echo # Release -st
echo # Release -noboost
echo # Release -dll
echo #
echo # Debug
echo # Debug -st
echo # Debug -noboost
echo # Debug -dll
echo ======================================================================
echo.
echo.
echo assimp-core release
echo **********************************************************************
call RunSingleUnitTestSuite unit_release_%ARCHEXT% release.txt
echo assimp-core release -st
echo **********************************************************************
call RunSingleUnitTestSuite unit_release-st_%ARCHEXT% release-st.txt
echo assimp-core release -noboost
echo **********************************************************************
call RunSingleUnitTestSuite unit_release-noboost-st_%ARCHEXT% release-st-noboost.txt
echo assimp-core release -dll
echo **********************************************************************
call RunSingleUnitTestSuite unit_release-dll_%ARCHEXT% release-dll.txt
echo assimp-core debug
echo **********************************************************************
call RunSingleUnitTestSuite unit_debug_%ARCHEXT% debug.txt
echo assimp-core debug -st
echo **********************************************************************
call RunSingleUnitTestSuite unit_debug-st_%ARCHEXT% debug-st.txt
echo assimp-core debug -noboost
echo **********************************************************************
call RunSingleUnitTestSuite unit_debug-noboost-st_%ARCHEXT% debug-st-noboost.txt
echo assimp-core debug -dll
echo **********************************************************************
call RunSingleUnitTestSuite unit_debug-dll_%ARCHEXT% debug-dll.txt
echo ======================================================================
IF %FIRSTUTNA% == nil (
echo All test configs have been found.
) ELSE (
echo One or more test configs are not available.
)
IF %FIRSTUTFAILURE% == nil (
echo All tests have been successful.
) ELSE (
echo One or more tests failed.
)
echo.
pause

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@ -167,11 +167,11 @@ def process_dir(d, outfile_results, zipin, result):
print("Processing directory " + d) print("Processing directory " + d)
for f in sorted(os.listdir(d)): for f in sorted(os.listdir(d)):
fullpath = os.path.join(d, f) fullpath = os.path.join(d, f)
if os.path.isdir(fullpath) and not f == ".svn": if os.path.isdir(fullpath) and not f[:1] == '.':
process_dir(fullpath, outfile_results, zipin, result) process_dir(fullpath, outfile_results, zipin, result)
continue continue
if f in settings.files_to_ignore: if f in settings.files_to_ignore or os.path.splitext(f)[1] in settings.exclude_extensions:
print("Ignoring " + f) print("Ignoring " + f)
continue continue
@ -190,32 +190,26 @@ def process_dir(d, outfile_results, zipin, result):
"regression database? Use gen_db.zip to re-generate.") "regression database? Use gen_db.zip to re-generate.")
continue continue
# Ignore extensions via settings.py configured list
# todo: Fix for multi dot extensions like .skeleton.xml
ext = os.path.splitext(fullpath)[1].lower()
if ext != "" and ext in settings.exclude_extensions:
continue
print("-"*60 + "\n " + os.path.realpath(fullpath) + " pp: " + pppreset) print("-"*60 + "\n " + os.path.realpath(fullpath) + " pp: " + pppreset)
outfile_actual = prepare_output_dir(fullpath, filehash, "ACTUAL") outfile_actual = prepare_output_dir(fullpath, filehash, "ACTUAL")
outfile_expect = prepare_output_dir(fullpath, filehash, "EXPECT") outfile_expect = prepare_output_dir(fullpath, filehash, "EXPECT")
outfile_results.write("assimp dump "+"-"*80+"\n") outfile_results.write("assimp dump "+"-"*80+"\n")
outfile_results.flush() outfile_results.flush()
command = [assimp_bin_path, command = [assimp_bin_path,
"dump", "dump",
fullpath, outfile_actual, "-b", "-s", "-l" ] +\ fullpath, outfile_actual, "-b", "-s", "-l" ] +\
pppreset.split() pppreset.split()
r = subprocess.call(command, **shellparams) r = subprocess.call(command, **shellparams)
print(r) outfile_results.flush()
if r and not failure: if r and not failure:
result.fail(fullpath, outfile_expect, pppreset, IMPORT_FAILURE, r) result.fail(fullpath, outfile_expect, pppreset, IMPORT_FAILURE, r)
outfile_results.write("Failed to import\n")
continue continue
elif failure and not r: elif failure and not r:
result.fail(fullpath, outfile_expect, pppreset, EXPECTED_FAILURE_NOT_MET) result.fail(fullpath, outfile_expect, pppreset, EXPECTED_FAILURE_NOT_MET)
outfile_results.write("Expected import to fail\n")
continue continue
with open(outfile_expect, "wb") as s: with open(outfile_expect, "wb") as s:
@ -227,21 +221,24 @@ def process_dir(d, outfile_results, zipin, result):
except IOError: except IOError:
continue continue
outfile_results.write("Expected data length: {0}\n".format(len(input_expected)))
outfile_results.write("Actual data length: {0}\n".format(len(input_actual)))
failed = False
if len(input_expected) != len(input_actual): if len(input_expected) != len(input_actual):
result.fail(fullpath, outfile_expect, pppreset, DATABASE_LENGTH_MISMATCH, result.fail(fullpath, outfile_expect, pppreset, DATABASE_LENGTH_MISMATCH,
len(input_expected), len(input_actual)) len(input_expected), len(input_actual))
continue # Still compare the dumps to see what the difference is
failed = True
outfile_results.write("assimp cmpdump "+"-"*80+"\n") outfile_results.write("assimp cmpdump "+"-"*80+"\n")
outfile_results.flush() outfile_results.flush()
command = [ assimp_bin_path, 'cmpdump', outfile_actual, outfile_expect ] command = [ assimp_bin_path, 'cmpdump', outfile_actual, outfile_expect ]
if subprocess.call(command, **shellparams) != 0: if subprocess.call(command, **shellparams) != 0:
result.fail(fullpath, outfile_expect, pppreset, DATABASE_VALUE_MISMATCH) if not failed:
result.fail(fullpath, outfile_expect, pppreset, DATABASE_VALUE_MISMATCH)
continue continue
result.ok(fullpath, pppreset, COMPARE_SUCCESS, result.ok(fullpath, pppreset, COMPARE_SUCCESS, len(input_expected))
len(input_expected))
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------
def del_folder_with_contents(folder): def del_folder_with_contents(folder):

View File

@ -62,7 +62,7 @@ files_to_ignore = ["pond.0.ply"]
exclude_extensions = [ exclude_extensions = [
".lws", ".assbin", ".assxml", ".txt", ".md", ".lws", ".assbin", ".assxml", ".txt", ".md",
".jpeg", ".jpg", ".png", ".gif", ".tga", ".bmp", ".jpeg", ".jpg", ".png", ".gif", ".tga", ".bmp",
".skeleton", ".skeleton.xml" ".skeleton", ".skeleton.xml", ".license"
] ]
# ------------------------------------------------------------------------------- # -------------------------------------------------------------------------------

View File

@ -257,16 +257,32 @@ private:
/* read from both streams simult.*/ /* read from both streams simult.*/
template <typename T> void read(T& filla,T& fille) { template <typename T> void read(T& filla,T& fille) {
if(1 != fread(&filla,sizeof(T),1,actual)) { if(1 != fread(&filla,sizeof(T),1,actual)) {
throw compare_fails_exception("Unexpected EOF reading ACTUAL"); EOFActual();
} }
if(1 != fread(&fille,sizeof(T),1,expect)) { if(1 != fread(&fille,sizeof(T),1,expect)) {
throw compare_fails_exception("Unexpected EOF reading EXPECT"); EOFExpect();
} }
} }
private: private:
void EOFActual() {
std::stringstream ss;
throw compare_fails_exception((ss
<< "Unexpected EOF reading ACTUAL.\nCurrent position in scene hierarchy is "
<< print_hierarchy(),ss.str().c_str()
));
}
void EOFExpect() {
std::stringstream ss;
throw compare_fails_exception((ss
<< "Unexpected EOF reading EXPECT.\nCurrent position in scene hierarchy is "
<< print_hierarchy(),ss.str().c_str()
));
}
FILE *const actual, *const expect; FILE *const actual, *const expect;
typedef std::map<std::string,unsigned int> PerChunkCounter; typedef std::map<std::string,unsigned int> PerChunkCounter;
@ -290,10 +306,10 @@ template <> void comparer_context :: read<aiString>(aiString& filla,aiString& fi
read(lena,lene); read(lena,lene);
if(lena && 1 != fread(&filla.data,lena,1,actual)) { if(lena && 1 != fread(&filla.data,lena,1,actual)) {
throw compare_fails_exception("Unexpected EOF reading ACTUAL"); EOFActual();
} }
if(lene && 1 != fread(&fille.data,lene,1,expect)) { if(lene && 1 != fread(&fille.data,lene,1,expect)) {
throw compare_fails_exception("Unexpected EOF reading ACTUAL"); EOFExpect();
} }
fille.data[fille.length=static_cast<unsigned int>(lene)] = '\0'; fille.data[fille.length=static_cast<unsigned int>(lene)] = '\0';
@ -487,7 +503,7 @@ private:
res|=fread(&actual.second,4,1,ctx.get_actual()) <<3u; res|=fread(&actual.second,4,1,ctx.get_actual()) <<3u;
if(res!=0xf) { if(res!=0xf) {
ctx.failure("I/OError reading chunk head, dumps are not well-defined","<ChunkHead>"); ctx.failure("IO Error reading chunk head, dumps are malformed","<ChunkHead>");
} }
if (current.first != actual.first) { if (current.first != actual.first) {
@ -504,7 +520,7 @@ private:
if (current.first != actual.first) { if (current.first != actual.first) {
std::stringstream ss; std::stringstream ss;
ctx.failure((ss ctx.failure((ss
<<"Chunk lenghts do not match. EXPECT: " <<"Chunk lengths do not match. EXPECT: "
<<current.second <<current.second
<<" ACTUAL: " <<" ACTUAL: "
<< actual.second, << actual.second,