- added support for lights and cameras
 - added support for tangents and bitangents
 - added support for more than 2 UV components.
 - fixed node naming
 - beta support for instance_node elements. Works in most cases.
 - added support for more complex materials
 - UV index is now set correctly. hopefully.
 Material system
 - fixed potential problems regarding aiUVTransform
 - added utility macros for the base keys

git-svn-id: https://assimp.svn.sourceforge.net/svnroot/assimp/trunk@338 67173fc5-114c-0410-ac8e-9d2fd5bffc1f
pull/1/head
aramis_acg 2009-02-08 22:55:51 +00:00
parent e54ef3944d
commit 7678b8e1be
7 changed files with 1559 additions and 288 deletions

View File

@ -43,10 +43,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef AI_COLLADAHELPER_H_INC #ifndef AI_COLLADAHELPER_H_INC
#define AI_COLLADAHELPER_H_INC #define AI_COLLADAHELPER_H_INC
namespace Assimp namespace Assimp {
{ namespace Collada {
namespace Collada
{
/** Transformation types that can be applied to a node */ /** Transformation types that can be applied to a node */
enum TransformType enum TransformType
@ -59,6 +57,19 @@ enum TransformType
TF_MATRIX TF_MATRIX
}; };
/** Different types of input data to a vertex or face */
enum InputType
{
IT_Invalid,
IT_Vertex, // special type for per-index data referring to the <vertices> element carrying the per-vertex data.
IT_Position,
IT_Normal,
IT_Texcoord,
IT_Color,
IT_Tangent,
IT_Bitangent,
};
/** Contains all data for one of the different transformation types */ /** Contains all data for one of the different transformation types */
struct Transform struct Transform
{ {
@ -66,11 +77,135 @@ struct Transform
float f[16]; ///< Interpretation of data depends on the type of the transformation float f[16]; ///< Interpretation of data depends on the type of the transformation
}; };
/** A collada camera. */
struct Camera
{
Camera()
: mOrtho (false)
, mHorFov (10e10f)
, mVerFov (10e10f)
, mAspect (10e10f)
, mZNear (0.1f)
, mZFar (1000.f)
{}
// Name of camera
std::string mName;
// True if it is an orthografic camera
bool mOrtho;
//! Horizontal field of view in degrees
float mHorFov;
//! Vertical field of view in degrees
float mVerFov;
//! Screen aspect
float mAspect;
//! Near& far z
float mZNear, mZFar;
};
#define aiLightSource_AMBIENT 0xdeaddead
/** A collada light source. */
struct Light
{
Light()
: mAttConstant (1.f)
, mAttLinear (0.f)
, mAttQuadratic (0.f)
, mFalloffAngle (180.f)
, mFalloffExponent (0.f)
, mPenumbraAngle (10e10f)
, mOuterAngle (10e10f)
, mIntensity (1.f)
{}
//! Type of the light source aiLightSourceType + ambient
unsigned int mType;
//! Color of the light
aiColor3D mColor;
//! Light attenuation
float mAttConstant,mAttLinear,mAttQuadratic;
//! Spot light falloff
float mFalloffAngle;
float mFalloffExponent;
// -----------------------------------------------------
// FCOLLADA extension from here
//! ... related stuff from maja and max extensions
float mPenumbraAngle;
float mOuterAngle;
//! Common light intensity
float mIntensity;
};
/** Short vertex index description */
struct InputSemanticMapEntry
{
InputSemanticMapEntry()
: mSet (0)
{}
//! Index of set, optional
unsigned int mSet;
//! Name of referenced vertex input
InputType mType;
};
/** Table to map from effect to vertex input semantics */
struct SemanticMappingTable
{
//! Name of material
std::string mMatName;
//! List of semantic map commands, grouped by effect semantic name
std::map<std::string, InputSemanticMapEntry> mMap;
//! For std::find
bool operator == (const std::string& s) const {
return s == mMatName;
}
};
/** A reference to a mesh inside a node, including materials assigned to the various subgroups */ /** A reference to a mesh inside a node, including materials assigned to the various subgroups */
struct MeshInstance struct MeshInstance
{ {
std::string mMesh; ///< ID of the mesh ///< ID of the mesh
std::map<std::string, std::string> mMaterials; ///< Map of materials by the subgroup ID they're applied to std::string mMesh;
///< Map of materials by the subgroup ID they're applied to
std::map<std::string, SemanticMappingTable> mMaterials;
};
/** A reference to a camera inside a node*/
struct CameraInstance
{
///< ID of the camera
std::string mCamera;
};
/** A reference to a light inside a node*/
struct LightInstance
{
///< ID of the camera
std::string mLight;
};
/** A reference to a node inside a node*/
struct NodeInstance
{
///< ID of the node
std::string mNode;
}; };
/** A node in a scene hierarchy */ /** A node in a scene hierarchy */
@ -84,10 +219,31 @@ struct Node
/** Operations in order to calculate the resulting transformation to parent. */ /** Operations in order to calculate the resulting transformation to parent. */
std::vector<Transform> mTransforms; std::vector<Transform> mTransforms;
std::vector<MeshInstance> mMeshes; ///< Meshes at this node /** Meshes at this node */
std::vector<MeshInstance> mMeshes;
Node() { mParent = NULL; } /** Lights at this node */
~Node() { for( std::vector<Node*>::iterator it = mChildren.begin(); it != mChildren.end(); ++it) delete *it; } std::vector<LightInstance> mLights;
/** Cameras at this node */
std::vector<CameraInstance> mCameras;
/** Node instances at this node */
std::vector<NodeInstance> mNodeInstances;
/** Rootnodes: Name of primary camera, if any */
std::string mPrimaryCamera;
//! Constructor. Begin with a zero parent
Node() {
mParent = NULL;
}
//! Destructor: delete all children subsequently
~Node() {
for( std::vector<Node*>::iterator it = mChildren.begin(); it != mChildren.end(); ++it)
delete *it;
}
}; };
/** Data source array */ /** Data source array */
@ -121,17 +277,6 @@ struct Face
std::vector<size_t> mIndices; std::vector<size_t> mIndices;
}; };
/** Different types of input data to a vertex or face */
enum InputType
{
IT_Invalid,
IT_Vertex, // special type for per-index data referring to the <vertices> element carrying the per-vertex data.
IT_Position,
IT_Normal,
IT_Texcoord,
IT_Color
};
/** An input channel for mesh data, referring to a single accessor */ /** An input channel for mesh data, referring to a single accessor */
struct InputChannel struct InputChannel
{ {
@ -154,15 +299,25 @@ struct SubMesh
/** Contains data for a single mesh */ /** Contains data for a single mesh */
struct Mesh struct Mesh
{ {
Mesh()
{
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS;++i)
mNumUVComponents[i] = 2;
}
std::string mVertexID; // just to check if there's some sophisticated addressing involved... which we don't support, and therefore should warn about. std::string mVertexID; // just to check if there's some sophisticated addressing involved... which we don't support, and therefore should warn about.
std::vector<InputChannel> mPerVertexData; // Vertex data addressed by vertex indices std::vector<InputChannel> mPerVertexData; // Vertex data addressed by vertex indices
// actual mesh data, assembled on encounter of a <p> element. Verbose format, not indexed // actual mesh data, assembled on encounter of a <p> element. Verbose format, not indexed
std::vector<aiVector3D> mPositions; std::vector<aiVector3D> mPositions;
std::vector<aiVector3D> mNormals; std::vector<aiVector3D> mNormals;
std::vector<aiVector2D> mTexCoords[AI_MAX_NUMBER_OF_TEXTURECOORDS]; std::vector<aiVector3D> mTangents;
std::vector<aiVector3D> mBitangents;
std::vector<aiVector3D> mTexCoords[AI_MAX_NUMBER_OF_TEXTURECOORDS];
std::vector<aiColor4D> mColors[AI_MAX_NUMBER_OF_COLOR_SETS]; std::vector<aiColor4D> mColors[AI_MAX_NUMBER_OF_COLOR_SETS];
unsigned int mNumUVComponents[AI_MAX_NUMBER_OF_TEXTURECOORDS];
// Faces. Stored are only the number of vertices for each face. 1 == point, 2 == line, 3 == triangle, 4+ == poly // Faces. Stored are only the number of vertices for each face. 1 == point, 2 == line, 3 == triangle, 4+ == poly
std::vector<size_t> mFaceSize; std::vector<size_t> mFaceSize;
@ -213,30 +368,98 @@ enum ShadeType
Shade_Blinn Shade_Blinn
}; };
/** Represents a texture sampler in collada */
struct Sampler
{
Sampler()
: mWrapU (true)
, mWrapV (true)
, mMirrorU (true)
, mMirrorV (true)
, mOp (aiTextureOp_Multiply)
, mUVId (0xffffffff)
, mWeighting (1.f)
, mMixWithPrevious (1.f)
{}
// Name of image reference
std::string mName;
// Wrap U?
bool mWrapU;
// Wrap V?
bool mWrapV;
// Mirror U?
bool mMirrorU;
// Mirror V?
bool mMirrorV;
// Blend mode
aiTextureOp mOp;
// UV transformation
aiUVTransform mTransform;
// Name of source UV channel
std::string mUVChannel;
// Resolved UV channel index or 0xffffffff if not known
unsigned int mUVId;
// OKINO/MAX3D extensions from here
// -------------------------------------------------------
// Weighting factor
float mWeighting;
// Mixing factor from OKINO
float mMixWithPrevious;
};
/** A collada effect. Can contain about anything according to the Collada spec, but we limit our version to a reasonable subset. */ /** A collada effect. Can contain about anything according to the Collada spec, but we limit our version to a reasonable subset. */
struct Effect struct Effect
{ {
// Shading mode
ShadeType mShadeType; ShadeType mShadeType;
// Colors
aiColor4D mEmissive, mAmbient, mDiffuse, mSpecular; aiColor4D mEmissive, mAmbient, mDiffuse, mSpecular;
aiColor4D mReflective, mRefractive; aiColor4D mTransparent;
std::string mTexEmissive, mTexAmbient, mTexDiffuse, mTexSpecular;
// Textures
Sampler mTexEmissive, mTexAmbient, mTexDiffuse, mTexSpecular,
mTexTransparent, mTexBump;
// Scalar factory
float mShininess, mRefractIndex; float mShininess, mRefractIndex;
float mReflectivity, mRefractivity; float mTransparency;
// local params referring to each other by their SID // local params referring to each other by their SID
typedef std::map<std::string, Collada::EffectParam> ParamLibrary; typedef std::map<std::string, Collada::EffectParam> ParamLibrary;
ParamLibrary mParams; ParamLibrary mParams;
Effect() : mEmissive( 0, 0, 0, 1), mAmbient( 0.1f, 0.1f, 0.1f, 1), // MAX3D extensions
mDiffuse( 0.6f, 0.6f, 0.6f, 1), mSpecular( 0.4f, 0.4f, 0.4f, 1), // ---------------------------------------------------------
mReflective( 0, 0, 0, 0), mRefractive( 0, 0, 0, 0) // Double-sided?
bool mDoubleSided, mWireframe, mFaceted;
Effect()
: mShadeType (Shade_Phong)
, mEmissive ( 0, 0, 0, 1)
, mAmbient ( 0.1f, 0.1f, 0.1f, 1)
, mDiffuse ( 0.6f, 0.6f, 0.6f, 1)
, mSpecular ( 0.4f, 0.4f, 0.4f, 1)
, mTransparent ( 0, 0, 0, 1)
, mShininess (10.0f)
, mRefractIndex (1.f)
, mTransparency (0.f)
, mDoubleSided (false)
, mWireframe (false)
, mFaceted (false)
{ {
mShadeType = Shade_Phong;
mShininess = 10.0f;
mRefractIndex = 1.0f;
mReflectivity = 0.0f;
mRefractivity = 0.0f;
} }
}; };

View File

@ -48,6 +48,11 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "ColladaLoader.h" #include "ColladaLoader.h"
#include "ColladaParser.h" #include "ColladaParser.h"
#include "fast_atof.h"
#include "ParsingUtils.h"
#include "time.h"
using namespace Assimp; using namespace Assimp;
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -95,11 +100,19 @@ void ColladaLoader::InternReadFile( const std::string& pFile, aiScene* pScene, I
{ {
mFileName = pFile; mFileName = pFile;
// clean all member arrays - just for safety, it should work even if we did not
mMeshIndexByID.clear();
mMaterialIndexByName.clear();
mMeshes.clear();
newMats.clear();
mLights.clear();
mCameras.clear();
// parse the input file // parse the input file
ColladaParser parser( pFile); ColladaParser parser( pFile);
if( !parser.mRootNode) if( !parser.mRootNode)
throw new ImportErrorException( "File came out empty. Something is wrong here."); throw new ImportErrorException( "Collada: File came out empty. Something is wrong here.");
// create the materials first, for the meshes to find // create the materials first, for the meshes to find
BuildMaterials( parser, pScene); BuildMaterials( parser, pScene);
@ -107,6 +120,9 @@ void ColladaLoader::InternReadFile( const std::string& pFile, aiScene* pScene, I
// build the node hierarchy from it // build the node hierarchy from it
pScene->mRootNode = BuildHierarchy( parser, parser.mRootNode); pScene->mRootNode = BuildHierarchy( parser, parser.mRootNode);
// ... then fill the materials with the now adjusted settings
FillMaterials(parser, pScene);
// Convert to Z_UP, if different orientation // Convert to Z_UP, if different orientation
if( parser.mUpDirection == ColladaParser::UP_X) if( parser.mUpDirection == ColladaParser::UP_X)
pScene->mRootNode->mTransformation *= aiMatrix4x4( pScene->mRootNode->mTransformation *= aiMatrix4x4(
@ -123,6 +139,25 @@ void ColladaLoader::InternReadFile( const std::string& pFile, aiScene* pScene, I
// store all meshes // store all meshes
StoreSceneMeshes( pScene); StoreSceneMeshes( pScene);
// store all materials
StoreSceneMaterials( pScene);
// store all lights
StoreSceneLights( pScene);
// if we know which camera is the primary camera, copy it to index 0
if (0 == parser.mRootNode->mPrimaryCamera.length()) {
for (unsigned int i = 1; i < mCameras.size(); ++i) {
if (mCameras[i]->mName == parser.mRootNode->mPrimaryCamera) {
std::swap(mCameras[i],mCameras[0]);
break;
}
}
}
// store all cameras
StoreSceneCameras( pScene);
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -130,26 +165,229 @@ void ColladaLoader::InternReadFile( const std::string& pFile, aiScene* pScene, I
aiNode* ColladaLoader::BuildHierarchy( const ColladaParser& pParser, const Collada::Node* pNode) aiNode* ColladaLoader::BuildHierarchy( const ColladaParser& pParser, const Collada::Node* pNode)
{ {
// create a node for it // create a node for it
aiNode* node = new aiNode( pNode->mName); aiNode* node = new aiNode();
// now setup the name of the node. We take the name if not empty, otherwise the collada ID
if (!pNode->mName.empty())
node->mName.Set(pNode->mName);
else if (!pNode->mID.empty())
node->mName.Set(pNode->mID);
else
{
// No need to worry. Unnamed nodes are no problem at all, except
// if cameras or lights need to be assigned to them.
if (!pNode->mLights.empty() || !pNode->mCameras.empty()) {
::strcpy(node->mName.data,"$ColladaAutoName$_");
node->mName.length = 18 + ASSIMP_itoa10(node->mName.data+18,MAXLEN-18,(uint32_t)clock());
}
}
// calculate the transformation matrix for it // calculate the transformation matrix for it
node->mTransformation = pParser.CalculateResultTransform( pNode->mTransforms); node->mTransformation = pParser.CalculateResultTransform( pNode->mTransforms);
// add children // now resolve node instances
node->mNumChildren = pNode->mChildren.size(); std::vector<Collada::Node*> instances;
ResolveNodeInstances(pParser,pNode,instances);
// add children. first the *real* ones
node->mNumChildren = pNode->mChildren.size()+instances.size();
node->mChildren = new aiNode*[node->mNumChildren]; node->mChildren = new aiNode*[node->mNumChildren];
for( unsigned int a = 0; a < pNode->mChildren.size(); a++)
unsigned int a = 0;
for(; a < pNode->mChildren.size(); a++)
{ {
node->mChildren[a] = BuildHierarchy( pParser, pNode->mChildren[a]); node->mChildren[a] = BuildHierarchy( pParser, pNode->mChildren[a]);
node->mChildren[a]->mParent = node; node->mChildren[a]->mParent = node;
} }
// ... and finally the resolved node instances
for(; a < node->mNumChildren; a++)
{
node->mChildren[a] = BuildHierarchy( pParser, instances[a-pNode->mChildren.size()]);
node->mChildren[a]->mParent = node;
}
// construct meshes // construct meshes
BuildMeshesForNode( pParser, pNode, node); BuildMeshesForNode( pParser, pNode, node);
// construct cameras
BuildCamerasForNode(pParser, pNode, node);
// construct lights
BuildLightsForNode(pParser, pNode, node);
return node; return node;
} }
// ------------------------------------------------------------------------------------------------
// Resolve node instances
void ColladaLoader::ResolveNodeInstances( const ColladaParser& pParser, const Collada::Node* pNode,
std::vector<Collada::Node*>& resolved)
{
// reserve enough storage
resolved.reserve(pNode->mNodeInstances.size());
// ... and iterate through all nodes to be instanced as children of pNode
for (std::vector<Collada::NodeInstance>::const_iterator it = pNode->mNodeInstances.begin(),
end = pNode->mNodeInstances.end(); it != end; ++it)
{
// find the corresponding node in the library
ColladaParser::NodeLibrary::const_iterator fnd = pParser.mNodeLibrary.find((*it).mNode);
if (fnd == pParser.mNodeLibrary.end())
DefaultLogger::get()->error("Collada: Unable to resolve reference to instanced node " + (*it).mNode);
else {
// attach this node to the list of children
resolved.push_back((*fnd).second);
}
}
}
// ------------------------------------------------------------------------------------------------
// Resolve UV channels
void ColladaLoader::ApplyVertexToEffectSemanticMapping(Collada::Sampler& sampler,
const Collada::SemanticMappingTable& table)
{
std::map<std::string, Collada::InputSemanticMapEntry>::const_iterator it = table.mMap.find(sampler.mUVChannel);
if (it != table.mMap.end()) {
if (it->second.mType != Collada::IT_Texcoord)
DefaultLogger::get()->error("Collada: Unexpected effect input mapping");
sampler.mUVId = it->second.mSet;
}
}
// ------------------------------------------------------------------------------------------------
// Builds lights for the given node and references them
void ColladaLoader::BuildLightsForNode( const ColladaParser& pParser, const Collada::Node* pNode, aiNode* pTarget)
{
BOOST_FOREACH( const Collada::LightInstance& lid, pNode->mLights)
{
// find the referred light
ColladaParser::LightLibrary::const_iterator srcLightIt = pParser.mLightLibrary.find( lid.mLight);
if( srcLightIt == pParser.mLightLibrary.end())
{
DefaultLogger::get()->warn("Collada: Unable to find light for ID \"" + lid.mLight + "\". Skipping.");
continue;
}
const Collada::Light* srcLight = &srcLightIt->second;
if (srcLight->mType == aiLightSource_AMBIENT) {
DefaultLogger::get()->error("Collada: Skipping ambient light for the moment");
continue;
}
// now fill our ai data structure
aiLight* out = new aiLight();
out->mName = pTarget->mName;
out->mType = (aiLightSourceType)srcLight->mType;
// collada lights point in -Z by default, rest is specified in node transform
out->mDirection = aiVector3D(0.f,0.f,-1.f);
out->mAttenuationConstant = srcLight->mAttConstant;
out->mAttenuationLinear = srcLight->mAttLinear;
out->mAttenuationQuadratic = srcLight->mAttQuadratic;
// collada doesn't differenciate between these color types
out->mColorDiffuse = out->mColorSpecular = out->mColorAmbient = srcLight->mColor*srcLight->mIntensity;
// convert falloff angle and falloff exponent in our representation, if given
if (out->mType == aiLightSource_SPOT) {
out->mAngleInnerCone = AI_DEG_TO_RAD( srcLight->mFalloffAngle );
// ... some extension magic. FUCKING COLLADA.
if (srcLight->mOuterAngle == 10e10f)
{
// ... some deprecation magic. FUCKING FCOLLADA.
if (srcLight->mPenumbraAngle == 10e10f)
{
// Need to rely on falloff_exponent. I don't know how to interpret it, so I need to guess ....
// ci - inner cone angle
// co - outer cone angle
// fe - falloff exponent
// ld - spot direction - normalized
// rd - ray direction - normalized
//
// Formula is:
// 1. (cos(acos (ld dot rd) - ci))^fe == epsilon
// 2. (ld dot rd) == cos(acos(epsilon^(1/fe)) + ci)
// 3. co == acos (ld dot rd)
// 4. co == acos(epsilon^(1/fe)) + ci)
// epsilon chosen to be 0.1
out->mAngleOuterCone = AI_DEG_TO_RAD (acos(pow(0.1f,1.f/srcLight->mFalloffExponent))+
srcLight->mFalloffAngle);
}
else {
out->mAngleOuterCone = out->mAngleInnerCone + AI_DEG_TO_RAD( srcLight->mPenumbraAngle );
if (out->mAngleOuterCone < out->mAngleInnerCone)
std::swap(out->mAngleInnerCone,out->mAngleOuterCone);
}
}
else out->mAngleOuterCone = AI_DEG_TO_RAD( srcLight->mOuterAngle );
}
// add to light list
mLights.push_back(out);
}
}
// ------------------------------------------------------------------------------------------------
// Builds cameras for the given node and references them
void ColladaLoader::BuildCamerasForNode( const ColladaParser& pParser, const Collada::Node* pNode, aiNode* pTarget)
{
BOOST_FOREACH( const Collada::CameraInstance& cid, pNode->mCameras)
{
// find the referred light
ColladaParser::CameraLibrary::const_iterator srcCameraIt = pParser.mCameraLibrary.find( cid.mCamera);
if( srcCameraIt == pParser.mCameraLibrary.end())
{
DefaultLogger::get()->warn("Collada: Unable to find camera for ID \"" + cid.mCamera + "\". Skipping.");
continue;
}
const Collada::Camera* srcCamera = &srcCameraIt->second;
// orthographic cameras not yet supported in Assimp
if (srcCamera->mOrtho) {
DefaultLogger::get()->warn("Collada: Orthographic cameras are not supported.");
}
// now fill our ai data structure
aiCamera* out = new aiCamera();
out->mName = pTarget->mName;
// collada cameras point in -Z by default, rest is specified in node transform
out->mLookAt = aiVector3D(0.f,0.f,-1.f);
// near/far z is already ok
out->mClipPlaneFar = srcCamera->mZFar;
out->mClipPlaneNear = srcCamera->mZNear;
// ... but for the rest some values are optional
// and we need to compute the others in any combination. FUCKING COLLADA.
if (srcCamera->mAspect != 10e10f)
out->mAspect = srcCamera->mAspect;
if (srcCamera->mHorFov != 10e10f) {
out->mHorizontalFOV = srcCamera->mHorFov;
if (srcCamera->mVerFov != 10e10f && srcCamera->mAspect != 10e10f) {
out->mAspect = srcCamera->mHorFov/srcCamera->mVerFov;
}
}
else if (srcCamera->mAspect != 10e10f && srcCamera->mVerFov != 10e10f) {
out->mHorizontalFOV = srcCamera->mAspect*srcCamera->mVerFov;
}
// Collada uses degrees, we use radians
out->mHorizontalFOV = AI_DEG_TO_RAD(out->mHorizontalFOV);
// add to camera list
mCameras.push_back(out);
}
}
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// Builds meshes for the given node and references them // Builds meshes for the given node and references them
void ColladaLoader::BuildMeshesForNode( const ColladaParser& pParser, const Collada::Node* pNode, aiNode* pTarget) void ColladaLoader::BuildMeshesForNode( const ColladaParser& pParser, const Collada::Node* pNode, aiNode* pTarget)
@ -164,7 +402,7 @@ void ColladaLoader::BuildMeshesForNode( const ColladaParser& pParser, const Coll
ColladaParser::MeshLibrary::const_iterator srcMeshIt = pParser.mMeshLibrary.find( mid.mMesh); ColladaParser::MeshLibrary::const_iterator srcMeshIt = pParser.mMeshLibrary.find( mid.mMesh);
if( srcMeshIt == pParser.mMeshLibrary.end()) if( srcMeshIt == pParser.mMeshLibrary.end())
{ {
DefaultLogger::get()->warn( boost::str( boost::format( "Unable to find geometry for ID \"%s\". Skipping.") % mid.mMesh)); DefaultLogger::get()->warn( boost::str( boost::format( "Collada: Unable to find geometry for ID \"%s\". Skipping.") % mid.mMesh));
continue; continue;
} }
const Collada::Mesh* srcMesh = srcMeshIt->second; const Collada::Mesh* srcMesh = srcMeshIt->second;
@ -178,12 +416,38 @@ void ColladaLoader::BuildMeshesForNode( const ColladaParser& pParser, const Coll
continue; continue;
// find material assigned to this submesh // find material assigned to this submesh
std::map<std::string, std::string>::const_iterator meshMatIt = mid.mMaterials.find( submesh.mMaterial); std::map<std::string, Collada::SemanticMappingTable >::const_iterator meshMatIt = mid.mMaterials.find( submesh.mMaterial);
std::string meshMaterial;
const Collada::SemanticMappingTable* table;
if( meshMatIt != mid.mMaterials.end()) if( meshMatIt != mid.mMaterials.end())
meshMaterial = meshMatIt->second; table = &meshMatIt->second;
else {
table = NULL;
DefaultLogger::get()->warn( boost::str( boost::format( "Collada: No material specified for subgroup \"%s\" in geometry \"%s\".") % submesh.mMaterial % mid.mMesh));
}
std::string& meshMaterial = table ? table->mMatName : "";
// OK ... here the *real* fun starts ... we have the vertex-input-to-effect-semantic-table
// given. The only mapping stuff which we do actually support is the UV channel.
std::map<std::string, size_t>::const_iterator matIt = mMaterialIndexByName.find( meshMaterial);
unsigned int matIdx;
if( matIt != mMaterialIndexByName.end())
matIdx = matIt->second;
else else
DefaultLogger::get()->warn( boost::str( boost::format( "No material specified for subgroup \"%s\" in geometry \"%s\".") % submesh.mMaterial % mid.mMesh)); matIdx = 0;
if (table && !table->mMap.empty() ) {
std::pair<Collada::Effect*, aiMaterial*>& mat = newMats[matIdx];
// Iterate through all texture channels assigned to the effect and
// check whether we have mapping information for it.
ApplyVertexToEffectSemanticMapping(mat.first->mTexDiffuse, *table);
ApplyVertexToEffectSemanticMapping(mat.first->mTexAmbient, *table);
ApplyVertexToEffectSemanticMapping(mat.first->mTexSpecular, *table);
ApplyVertexToEffectSemanticMapping(mat.first->mTexEmissive, *table);
ApplyVertexToEffectSemanticMapping(mat.first->mTexTransparent,*table);
ApplyVertexToEffectSemanticMapping(mat.first->mTexBump, *table);
}
// built lookup index of the Mesh-Submesh-Material combination // built lookup index of the Mesh-Submesh-Material combination
ColladaMeshIndex index( mid.mMesh, sm, meshMaterial); ColladaMeshIndex index( mid.mMesh, sm, meshMaterial);
@ -199,20 +463,39 @@ void ColladaLoader::BuildMeshesForNode( const ColladaParser& pParser, const Coll
aiMesh* dstMesh = new aiMesh; aiMesh* dstMesh = new aiMesh;
// count the vertices addressed by its faces // count the vertices addressed by its faces
size_t numVertices = const size_t numVertices = std::accumulate( srcMesh->mFaceSize.begin() + faceStart,
std::accumulate( srcMesh->mFaceSize.begin() + faceStart, srcMesh->mFaceSize.begin() + faceStart + submesh.mNumFaces, 0); srcMesh->mFaceSize.begin() + faceStart + submesh.mNumFaces, 0);
// copy positions // copy positions
dstMesh->mNumVertices = numVertices; dstMesh->mNumVertices = numVertices;
dstMesh->mVertices = new aiVector3D[numVertices]; dstMesh->mVertices = new aiVector3D[numVertices];
std::copy( srcMesh->mPositions.begin() + vertexStart, srcMesh->mPositions.begin() + vertexStart + numVertices, dstMesh->mVertices); std::copy( srcMesh->mPositions.begin() + vertexStart, srcMesh->mPositions.begin() +
vertexStart + numVertices, dstMesh->mVertices);
// normals, if given. HACK: (thom) Due to the fucking Collada spec we never know if we have the same // normals, if given. HACK: (thom) Due to the fucking Collada spec we never
// number of normals as there are positions. So we also ignore any vertex attribute if it has a different count // know if we have the same number of normals as there are positions. So we
// also ignore any vertex attribute if it has a different count
if( srcMesh->mNormals.size() == srcMesh->mPositions.size()) if( srcMesh->mNormals.size() == srcMesh->mPositions.size())
{ {
dstMesh->mNormals = new aiVector3D[numVertices]; dstMesh->mNormals = new aiVector3D[numVertices];
std::copy( srcMesh->mNormals.begin() + vertexStart, srcMesh->mNormals.begin() + vertexStart + numVertices, dstMesh->mNormals); std::copy( srcMesh->mNormals.begin() + vertexStart, srcMesh->mNormals.begin() +
vertexStart + numVertices, dstMesh->mNormals);
}
// tangents, if given.
if( srcMesh->mTangents.size() == srcMesh->mPositions.size())
{
dstMesh->mTangents = new aiVector3D[numVertices];
std::copy( srcMesh->mTangents.begin() + vertexStart, srcMesh->mTangents.begin() +
vertexStart + numVertices, dstMesh->mTangents);
}
// bitangents, if given.
if( srcMesh->mBitangents.size() == srcMesh->mPositions.size())
{
dstMesh->mBitangents = new aiVector3D[numVertices];
std::copy( srcMesh->mBitangents.begin() + vertexStart, srcMesh->mBitangents.begin() +
vertexStart + numVertices, dstMesh->mBitangents);
} }
// same for texturecoords, as many as we have // same for texturecoords, as many as we have
@ -222,8 +505,9 @@ void ColladaLoader::BuildMeshesForNode( const ColladaParser& pParser, const Coll
{ {
dstMesh->mTextureCoords[a] = new aiVector3D[numVertices]; dstMesh->mTextureCoords[a] = new aiVector3D[numVertices];
for( size_t b = 0; b < numVertices; ++b) for( size_t b = 0; b < numVertices; ++b)
dstMesh->mTextureCoords[a][b].Set( srcMesh->mTexCoords[a][vertexStart+b].x, srcMesh->mTexCoords[a][vertexStart+b].y, 0.0f); dstMesh->mTextureCoords[a][b] = srcMesh->mTexCoords[a][vertexStart+b];
dstMesh->mNumUVComponents[a] = 2;
dstMesh->mNumUVComponents[a] = srcMesh->mNumUVComponents[a];
} }
} }
@ -258,11 +542,7 @@ void ColladaLoader::BuildMeshesForNode( const ColladaParser& pParser, const Coll
vertexStart += numVertices; faceStart += submesh.mNumFaces; vertexStart += numVertices; faceStart += submesh.mNumFaces;
// assign the material index // assign the material index
std::map<std::string, size_t>::const_iterator matIt = mMaterialIndexByName.find( meshMaterial); dstMesh->mMaterialIndex = matIdx;
if( matIt != mMaterialIndexByName.end())
dstMesh->mMaterialIndex = matIt->second;
else
dstMesh->mMaterialIndex = 0;
} }
} }
} }
@ -288,11 +568,185 @@ void ColladaLoader::StoreSceneMeshes( aiScene* pScene)
} }
} }
// ------------------------------------------------------------------------------------------------
// Stores all cameras in the given scene
void ColladaLoader::StoreSceneCameras( aiScene* pScene)
{
pScene->mNumCameras = mCameras.size();
if( mCameras.size() > 0)
{
pScene->mCameras = new aiCamera*[mCameras.size()];
std::copy( mCameras.begin(), mCameras.end(), pScene->mCameras);
}
}
// ------------------------------------------------------------------------------------------------
// Stores all lights in the given scene
void ColladaLoader::StoreSceneLights( aiScene* pScene)
{
pScene->mNumLights = mLights.size();
if( mLights.size() > 0)
{
pScene->mLights = new aiLight*[mLights.size()];
std::copy( mLights.begin(), mLights.end(), pScene->mLights);
}
}
// ------------------------------------------------------------------------------------------------
// Stores all materials in the given scene
void ColladaLoader::StoreSceneMaterials( aiScene* pScene)
{
pScene->mNumMaterials = newMats.size();
pScene->mMaterials = new aiMaterial*[newMats.size()];
for (unsigned int i = 0; i < newMats.size();++i)
pScene->mMaterials[i] = newMats[i].second;
}
// ------------------------------------------------------------------------------------------------
// Add a texture to a material structure
void ColladaLoader::AddTexture ( Assimp::MaterialHelper& mat, const ColladaParser& pParser,
const Collada::Effect& effect,
const Collada::Sampler& sampler,
aiTextureType type, unsigned int idx)
{
// first of all, basic file name
mat.AddProperty( &FindFilenameForEffectTexture( pParser, effect, sampler.mName),
_AI_MATKEY_TEXTURE_BASE,type,idx);
// mapping mode
int map = map = aiTextureMapMode_Clamp;
if (sampler.mWrapU)
map = aiTextureMapMode_Wrap;
if (sampler.mWrapU && sampler.mMirrorU)
map = aiTextureMapMode_Mirror;
mat.AddProperty( &map, 1, _AI_MATKEY_MAPPINGMODE_U_BASE, type, idx);
map = aiTextureMapMode_Clamp;
if (sampler.mWrapV)
map = aiTextureMapMode_Wrap;
if (sampler.mWrapV && sampler.mMirrorV)
map = aiTextureMapMode_Mirror;
mat.AddProperty( &map, 1, _AI_MATKEY_MAPPINGMODE_V_BASE, type, idx);
// UV transformation
mat.AddProperty(&sampler.mTransform, 1,
_AI_MATKEY_UVTRANSFORM_BASE, type, idx);
// Blend mode
mat.AddProperty((int*)&sampler.mOp , 1,
_AI_MATKEY_TEXBLEND_BASE, type, idx);
// Blend factor
mat.AddProperty((float*)&sampler.mWeighting , 1,
_AI_MATKEY_TEXBLEND_BASE, type, idx);
// UV source index ... if we didn't resolve the mapping it is actually just
// a guess but it works in most cases. We search for the frst occurence of a
// number in the channel name. We assume it is the zero-based index into the
// UV channel array of all corresponding meshes.
if (sampler.mUVId != 0xffffffff)
map = sampler.mUVId;
else {
map = 0xffffffff;
for (std::string::const_iterator it = sampler.mUVChannel.begin();
it != sampler.mUVChannel.end(); ++it)
{
if (IsNumeric(*it)) {
map = strtol10(&(*it));
break;
}
}
if (0xffffffff == map) {
DefaultLogger::get()->warn("Collada: unable to determine UV channel for texture");
map = 0;
}
}
mat.AddProperty(&map,1,_AI_MATKEY_UVWSRC_BASE,type,idx);
}
// ------------------------------------------------------------------------------------------------
// Fills materials from the collada material definitions
void ColladaLoader::FillMaterials( const ColladaParser& pParser, aiScene* pScene)
{
for (std::vector<std::pair<Collada::Effect*, aiMaterial*> >::iterator it = newMats.begin(),
end = newMats.end(); it != end; ++it)
{
MaterialHelper& mat = (MaterialHelper&)*it->second;
Collada::Effect& effect = *it->first;
// resolve shading mode
int shadeMode;
if (effect.mFaceted) /* fixme */
shadeMode = aiShadingMode_Flat;
else {
switch( effect.mShadeType)
{
case Collada::Shade_Constant:
shadeMode = aiShadingMode_NoShading;
break;
case Collada::Shade_Lambert:
shadeMode = aiShadingMode_Gouraud;
break;
case Collada::Shade_Blinn:
shadeMode = aiShadingMode_Blinn;
break;
case Collada::Shade_Phong:
shadeMode = aiShadingMode_Phong;
break;
default:
DefaultLogger::get()->warn("Collada: Unrecognized shading mode, using gouraud shading");
shadeMode = aiShadingMode_Gouraud;
break;
}
}
mat.AddProperty<int>( &shadeMode, 1, AI_MATKEY_SHADING_MODEL);
// double-sided?
shadeMode = effect.mDoubleSided;
mat.AddProperty<int>( &shadeMode, 1, AI_MATKEY_TWOSIDED);
// wireframe?
shadeMode = effect.mWireframe;
mat.AddProperty<int>( &shadeMode, 1, AI_MATKEY_ENABLE_WIREFRAME);
// add material colors
mat.AddProperty( &effect.mAmbient, 1, AI_MATKEY_COLOR_AMBIENT);
mat.AddProperty( &effect.mDiffuse, 1, AI_MATKEY_COLOR_DIFFUSE);
mat.AddProperty( &effect.mSpecular, 1, AI_MATKEY_COLOR_SPECULAR);
mat.AddProperty( &effect.mEmissive, 1, AI_MATKEY_COLOR_EMISSIVE);
mat.AddProperty( &effect.mShininess, 1, AI_MATKEY_SHININESS);
mat.AddProperty( &effect.mRefractIndex, 1, AI_MATKEY_REFRACTI);
// add textures, if given
if( !effect.mTexAmbient.mName.empty())
AddTexture( mat, pParser, effect, effect.mTexAmbient,aiTextureType_AMBIENT);
if( !effect.mTexEmissive.mName.empty())
AddTexture( mat, pParser, effect, effect.mTexEmissive,aiTextureType_EMISSIVE);
if( !effect.mTexSpecular.mName.empty())
AddTexture( mat, pParser, effect, effect.mTexSpecular,aiTextureType_SPECULAR);
if( !effect.mTexDiffuse.mName.empty())
AddTexture( mat, pParser, effect, effect.mTexDiffuse,aiTextureType_DIFFUSE);
if( !effect.mTexBump.mName.empty())
AddTexture( mat, pParser, effect, effect.mTexBump,aiTextureType_HEIGHT);
if( !effect.mTexTransparent.mName.empty())
AddTexture( mat, pParser, effect, effect.mTexBump,aiTextureType_OPACITY);
}
}
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// Constructs materials from the collada material definitions // Constructs materials from the collada material definitions
void ColladaLoader::BuildMaterials( const ColladaParser& pParser, aiScene* pScene) void ColladaLoader::BuildMaterials( const ColladaParser& pParser, aiScene* pScene)
{ {
std::vector<aiMaterial*> newMats; newMats.reserve(pParser.mMaterialLibrary.size());
for( ColladaParser::MaterialLibrary::const_iterator matIt = pParser.mMaterialLibrary.begin(); matIt != pParser.mMaterialLibrary.end(); ++matIt) for( ColladaParser::MaterialLibrary::const_iterator matIt = pParser.mMaterialLibrary.begin(); matIt != pParser.mMaterialLibrary.end(); ++matIt)
{ {
@ -308,59 +762,37 @@ void ColladaLoader::BuildMaterials( const ColladaParser& pParser, aiScene* pScen
aiString name( matIt->first); aiString name( matIt->first);
mat->AddProperty(&name,AI_MATKEY_NAME); mat->AddProperty(&name,AI_MATKEY_NAME);
int shadeMode; // MEGA SUPER MONSTER HACK by Alex ... It's all my fault, yes.
switch( effect.mShadeType) // We store the reference to the effect in the material and
{ // return ... we'll add the actual material properties later
case Collada::Shade_Constant: shadeMode = aiShadingMode_NoShading; break; // after we processed all meshes. During mesh processing,
case Collada::Shade_Lambert: shadeMode = aiShadingMode_Gouraud; break; // we evaluate vertex input mappings. Afterwards we should be
case Collada::Shade_Blinn: shadeMode = aiShadingMode_Blinn; break; // able to correctly setup source UV channels for textures.
default: shadeMode = aiShadingMode_Phong; break;
}
mat->AddProperty<int>( &shadeMode, 1, AI_MATKEY_SHADING_MODEL);
mat->AddProperty( &effect.mAmbient, 1, AI_MATKEY_COLOR_AMBIENT); // ... moved to ColladaLoader::FillMaterials()
mat->AddProperty( &effect.mDiffuse, 1, AI_MATKEY_COLOR_DIFFUSE); // *duck*
mat->AddProperty( &effect.mSpecular, 1, AI_MATKEY_COLOR_SPECULAR);
mat->AddProperty( &effect.mEmissive, 1, AI_MATKEY_COLOR_EMISSIVE);
mat->AddProperty( &effect.mShininess, 1, AI_MATKEY_SHININESS);
mat->AddProperty( &effect.mRefractIndex, 1, AI_MATKEY_REFRACTI);
// add textures, if given
if( !effect.mTexAmbient.empty())
mat->AddProperty( &FindFilenameForEffectTexture( pParser, effect, effect.mTexAmbient), AI_MATKEY_TEXTURE_AMBIENT( 0));
if( !effect.mTexDiffuse.empty())
mat->AddProperty( &FindFilenameForEffectTexture( pParser, effect, effect.mTexDiffuse), AI_MATKEY_TEXTURE_DIFFUSE( 0));
if( !effect.mTexEmissive.empty())
mat->AddProperty( &FindFilenameForEffectTexture( pParser, effect, effect.mTexEmissive), AI_MATKEY_TEXTURE_EMISSIVE( 0));
if( !effect.mTexSpecular.empty())
mat->AddProperty( &FindFilenameForEffectTexture( pParser, effect, effect.mTexSpecular), AI_MATKEY_TEXTURE_SPECULAR( 0));
// store the material // store the material
mMaterialIndexByName[matIt->first] = newMats.size(); mMaterialIndexByName[matIt->first] = newMats.size();
newMats.push_back( mat); newMats.push_back( std::pair<Collada::Effect*, aiMaterial*>(const_cast<Collada::Effect*>(&effect),mat) );
} }
// store a dummy material if none were given // store a dummy material if none were given
if( newMats.size() == 0) if( newMats.size() == 0)
{ {
Assimp::MaterialHelper* mat = new Assimp::MaterialHelper; Assimp::MaterialHelper* mat = new Assimp::MaterialHelper;
aiString name( std::string( "dummy")); aiString name( AI_DEFAULT_MATERIAL_NAME );
mat->AddProperty( &name, AI_MATKEY_NAME); mat->AddProperty( &name, AI_MATKEY_NAME);
int shadeMode = aiShadingMode_Phong; const int shadeMode = aiShadingMode_Phong;
mat->AddProperty<int>( &shadeMode, 1, AI_MATKEY_SHADING_MODEL); mat->AddProperty<int>( &shadeMode, 1, AI_MATKEY_SHADING_MODEL);
aiColor4D colAmbient( 0.2f, 0.2f, 0.2f, 1.0f), colDiffuse( 0.8f, 0.8f, 0.8f, 1.0f), colSpecular( 0.5f, 0.5f, 0.5f, 0.5f); aiColor4D colAmbient( 0.2f, 0.2f, 0.2f, 1.0f), colDiffuse( 0.8f, 0.8f, 0.8f, 1.0f), colSpecular( 0.5f, 0.5f, 0.5f, 0.5f);
mat->AddProperty( &colAmbient, 1, AI_MATKEY_COLOR_AMBIENT); mat->AddProperty( &colAmbient, 1, AI_MATKEY_COLOR_AMBIENT);
mat->AddProperty( &colDiffuse, 1, AI_MATKEY_COLOR_DIFFUSE); mat->AddProperty( &colDiffuse, 1, AI_MATKEY_COLOR_DIFFUSE);
mat->AddProperty( &colSpecular, 1, AI_MATKEY_COLOR_SPECULAR); mat->AddProperty( &colSpecular, 1, AI_MATKEY_COLOR_SPECULAR);
float specExp = 5.0f; const float specExp = 5.0f;
mat->AddProperty( &specExp, 1, AI_MATKEY_SHININESS); mat->AddProperty( &specExp, 1, AI_MATKEY_SHININESS);
} }
// store the materials in the scene
pScene->mNumMaterials = newMats.size();
pScene->mMaterials = new aiMaterial*[pScene->mNumMaterials];
std::copy( newMats.begin(), newMats.end(), pScene->mMaterials);
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -384,7 +816,7 @@ const aiString& ColladaLoader::FindFilenameForEffectTexture( const ColladaParser
// find the image referred by this name in the image library of the scene // find the image referred by this name in the image library of the scene
ColladaParser::ImageLibrary::const_iterator imIt = pParser.mImageLibrary.find( name); ColladaParser::ImageLibrary::const_iterator imIt = pParser.mImageLibrary.find( name);
if( imIt == pParser.mImageLibrary.end()) if( imIt == pParser.mImageLibrary.end())
throw new ImportErrorException( boost::str( boost::format( "Unable to resolve effect texture entry \"%s\", ended up at ID \"%s\".") % pName % name)); throw new ImportErrorException( boost::str( boost::format( "Collada: Unable to resolve effect texture entry \"%s\", ended up at ID \"%s\".") % pName % name));
static aiString result; static aiString result;
result.Set( imIt->second.mFileName ); result.Set( imIt->second.mFileName );

View File

@ -109,17 +109,53 @@ protected:
/** Recursively constructs a scene node for the given parser node and returns it. */ /** Recursively constructs a scene node for the given parser node and returns it. */
aiNode* BuildHierarchy( const ColladaParser& pParser, const Collada::Node* pNode); aiNode* BuildHierarchy( const ColladaParser& pParser, const Collada::Node* pNode);
/** Resolve node instances */
void ResolveNodeInstances( const ColladaParser& pParser, const Collada::Node* pNode,
std::vector<Collada::Node*>& resolved);
/** Builds meshes for the given node and references them */ /** Builds meshes for the given node and references them */
void BuildMeshesForNode( const ColladaParser& pParser, const Collada::Node* pNode, aiNode* pTarget); void BuildMeshesForNode( const ColladaParser& pParser, const Collada::Node* pNode,
aiNode* pTarget);
/** Builds cameras for the given node and references them */
void BuildCamerasForNode( const ColladaParser& pParser, const Collada::Node* pNode,
aiNode* pTarget);
/** Builds lights for the given node and references them */
void BuildLightsForNode( const ColladaParser& pParser, const Collada::Node* pNode,
aiNode* pTarget);
/** Stores all meshes in the given scene */ /** Stores all meshes in the given scene */
void StoreSceneMeshes( aiScene* pScene); void StoreSceneMeshes( aiScene* pScene);
/** Stores all materials in the given scene */
void StoreSceneMaterials( aiScene* pScene);
/** Stores all lights in the given scene */
void StoreSceneLights( aiScene* pScene);
/** Stores all cameras in the given scene */
void StoreSceneCameras( aiScene* pScene);
/** Constructs materials from the collada material definitions */ /** Constructs materials from the collada material definitions */
void BuildMaterials( const ColladaParser& pParser, aiScene* pScene); void BuildMaterials( const ColladaParser& pParser, aiScene* pScene);
/** Fill materials from the collada material definitions */
void FillMaterials( const ColladaParser& pParser, aiScene* pScene);
/** Resolve UV channel mappings*/
void ApplyVertexToEffectSemanticMapping(Collada::Sampler& sampler,
const Collada::SemanticMappingTable& table);
/** Add a texture and all of its sampling properties to a material*/
void AddTexture ( Assimp::MaterialHelper& mat, const ColladaParser& pParser,
const Collada::Effect& effect,
const Collada::Sampler& sampler,
aiTextureType type, unsigned int idx = 0);
/** Resolves the texture name for the given effect texture entry */ /** Resolves the texture name for the given effect texture entry */
const aiString& FindFilenameForEffectTexture( const ColladaParser& pParser, const Collada::Effect& pEffect, const std::string& pName); const aiString& FindFilenameForEffectTexture( const ColladaParser& pParser,
const Collada::Effect& pEffect, const std::string& pName);
/** Converts a path read from a collada file to the usual representation */ /** Converts a path read from a collada file to the usual representation */
void ConvertPath (aiString& ss); void ConvertPath (aiString& ss);
@ -136,6 +172,15 @@ protected:
/** Accumulated meshes for the target scene */ /** Accumulated meshes for the target scene */
std::vector<aiMesh*> mMeshes; std::vector<aiMesh*> mMeshes;
/** Temporary material list */
std::vector<std::pair<Collada::Effect*, aiMaterial*> > newMats;
/** Temporary camera list */
std::vector<aiCamera*> mCameras;
/** Temporary light list */
std::vector<aiLight*> mLights;
}; };
} // end of namespace Assimp } // end of namespace Assimp

File diff suppressed because it is too large Load Diff

View File

@ -84,14 +84,30 @@ protected:
/** Reads a material entry into the given material */ /** Reads a material entry into the given material */
void ReadMaterial( Collada::Material& pMaterial); void ReadMaterial( Collada::Material& pMaterial);
/** Reads the camera library */
void ReadCameraLibrary();
/** Reads a camera entry into the given camera */
void ReadCamera( Collada::Camera& pCamera);
/** Reads the light library */
void ReadLightLibrary();
/** Reads a light entry into the given light */
void ReadLight( Collada::Light& pLight);
/** Reads the effect library */ /** Reads the effect library */
void ReadEffectLibrary(); void ReadEffectLibrary();
/** Reads an effect entry into the given effect*/ /** Reads an effect entry into the given effect*/
void ReadEffect( Collada::Effect& pEffect); void ReadEffect( Collada::Effect& pEffect);
/** Read sampler properties */
void ReadSamplerProperties( Collada::Sampler& pSampler);
/** Reads an effect entry containing a color or a texture defining that color */ /** Reads an effect entry containing a color or a texture defining that color */
void ReadEffectColor( aiColor4D& pColor, std::string& pSampler); void ReadEffectColor( aiColor4D& pColor, Collada::Sampler& pSampler);
/** Reads an effect entry containing a float */ /** Reads an effect entry containing a float */
void ReadEffectFloat( float& pFloat); void ReadEffectFloat( float& pFloat);
@ -146,6 +162,9 @@ protected:
/** Reads the collada scene */ /** Reads the collada scene */
void ReadScene(); void ReadScene();
// Processes bind_vertex_input and bind elements
void ReadMaterialVertexInputBinding( Collada::SemanticMappingTable& tbl);
protected: protected:
/** Aborts the file reading with an exception */ /** Aborts the file reading with an exception */
void ThrowException( const std::string& pError) const; void ThrowException( const std::string& pError) const;
@ -157,7 +176,10 @@ protected:
void SkipElement( const char* pElement); void SkipElement( const char* pElement);
/** Compares the current xml element name to the given string and returns true if equal */ /** Compares the current xml element name to the given string and returns true if equal */
bool IsElement( const char* pName) const { assert( mReader->getNodeType() == irr::io::EXN_ELEMENT); return strcmp( mReader->getNodeName(), pName) == 0; } bool IsElement( const char* pName) const {
ai_assert( mReader->getNodeType() == irr::io::EXN_ELEMENT);
return ::strcmp( mReader->getNodeName(), pName) == 0;
}
/** Tests for the opening tag of the given element, throws an exception if not found */ /** Tests for the opening tag of the given element, throws an exception if not found */
void TestOpening( const char* pName); void TestOpening( const char* pName);
@ -165,15 +187,24 @@ protected:
/** Tests for the closing tag of the given element, throws an exception if not found */ /** Tests for the closing tag of the given element, throws an exception if not found */
void TestClosing( const char* pName); void TestClosing( const char* pName);
/** Checks the present element for the presence of the attribute, returns its index or throws an exception if not found */ /** Checks the present element for the presence of the attribute, returns its index
or throws an exception if not found */
int GetAttribute( const char* pAttr) const; int GetAttribute( const char* pAttr) const;
/** Returns the index of the named attribute or -1 if not found. Does not throw, therefore useful for optional attributes */ /** Returns the index of the named attribute or -1 if not found. Does not throw,
therefore useful for optional attributes */
int TestAttribute( const char* pAttr) const; int TestAttribute( const char* pAttr) const;
/** Reads the text contents of an element, throws an exception if not given. Skips leading whitespace. */ /** Reads the text contents of an element, throws an exception if not given.
Skips leading whitespace. */
const char* GetTextContent(); const char* GetTextContent();
/** Reads a single bool from current text content */
bool ReadBoolFromTextContent();
/** Reads a single float from current text content */
float ReadFloatFromTextContent();
/** Calculates the resulting transformation fromm all the given transform steps */ /** Calculates the resulting transformation fromm all the given transform steps */
aiMatrix4x4 CalculateResultTransform( const std::vector<Collada::Transform>& pTransforms) const; aiMatrix4x4 CalculateResultTransform( const std::vector<Collada::Transform>& pTransforms) const;
@ -181,7 +212,8 @@ protected:
Collada::InputType GetTypeForSemantic( const std::string& pSemantic); Collada::InputType GetTypeForSemantic( const std::string& pSemantic);
/** Finds the item in the given library by its reference, throws if not found */ /** Finds the item in the given library by its reference, throws if not found */
template <typename Type> const Type& ResolveLibraryReference( const std::map<std::string, Type>& pLibrary, const std::string& pURL) const; template <typename Type> const Type& ResolveLibraryReference(
const std::map<std::string, Type>& pLibrary, const std::string& pURL) const;
protected: protected:
/** Filename, for a verbose error message */ /** Filename, for a verbose error message */
@ -218,6 +250,14 @@ protected:
typedef std::map<std::string, Collada::Material> MaterialLibrary; typedef std::map<std::string, Collada::Material> MaterialLibrary;
MaterialLibrary mMaterialLibrary; MaterialLibrary mMaterialLibrary;
/** Light library: surface light by ID */
typedef std::map<std::string, Collada::Light> LightLibrary;
LightLibrary mLightLibrary;
/** Camera library: surface material by ID */
typedef std::map<std::string, Collada::Camera> CameraLibrary;
CameraLibrary mCameraLibrary;
/** Pointer to the root node. Don't delete, it just points to one of the nodes in the node library. */ /** Pointer to the root node. Don't delete, it just points to one of the nodes in the node library. */
Collada::Node* mRootNode; Collada::Node* mRootNode;

View File

@ -177,6 +177,19 @@ inline aiReturn MaterialHelper::AddProperty<float> (const float* pInput,
pKey,type,index,aiPTI_Float); pKey,type,index,aiPTI_Float);
} }
// ----------------------------------------------------------------------------------------
template<>
inline aiReturn MaterialHelper::AddProperty<aiUVTransform> (const aiUVTransform* pInput,
const unsigned int pNumValues,
const char* pKey,
unsigned int type,
unsigned int index)
{
return AddBinaryProperty((const void*)pInput,
pNumValues * sizeof(aiUVTransform),
pKey,type,index,aiPTI_Float);
}
// ---------------------------------------------------------------------------------------- // ----------------------------------------------------------------------------------------
template<> template<>
inline aiReturn MaterialHelper::AddProperty<aiColor4D> (const aiColor4D* pInput, inline aiReturn MaterialHelper::AddProperty<aiColor4D> (const aiColor4D* pInput,

View File

@ -237,30 +237,34 @@ enum aiAxis
*/ */
enum aiTextureType enum aiTextureType
{ {
// Set as texture semantic for material properties not related to textures
aiTextureType_NONE = 0x0,
/** The texture is combined with the result of the diffuse /** The texture is combined with the result of the diffuse
* lighting equation. * lighting equation.
*/ */
aiTextureType_DIFFUSE = 0x0, aiTextureType_DIFFUSE = 0x1,
/** The texture is combined with the result of the specular /** The texture is combined with the result of the specular
* lighting equation. * lighting equation.
*/ */
aiTextureType_SPECULAR = 0x1, aiTextureType_SPECULAR = 0x2,
/** The texture is combined with the result of the ambient /** The texture is combined with the result of the ambient
* lighting equation. * lighting equation.
*/ */
aiTextureType_AMBIENT = 0x2, aiTextureType_AMBIENT = 0x3,
/** The texture is added to the result of the lighting /** The texture is added to the result of the lighting
* calculation. It isn't influenced by any lighting. * calculation. It isn't influenced by lights.
*/ */
aiTextureType_EMISSIVE = 0x3, aiTextureType_EMISSIVE = 0x4,
/** The texture is a height map and serves as input for /** The texture is a height map and serves as input for
* a normal map generator. * a normal map generator.
*/ */
aiTextureType_HEIGHT = 0x4, aiTextureType_HEIGHT = 0x5,
/** The texture is a (tangent space) normal-map. /** The texture is a (tangent space) normal-map.
* *
@ -268,7 +272,7 @@ enum aiTextureType
* for use with techniques such as Parallax Occlusion Mapping * for use with techniques such as Parallax Occlusion Mapping
* it is registered once as a normalmap. * it is registered once as a normalmap.
*/ */
aiTextureType_NORMALS = 0x5, aiTextureType_NORMALS = 0x6,
/** The texture defines the glossiness of the material. /** The texture defines the glossiness of the material.
* *
@ -277,14 +281,14 @@ enum aiTextureType
* function define to map the linear color values in the * function define to map the linear color values in the
* texture to a suitable exponent. Have fun. * texture to a suitable exponent. Have fun.
*/ */
aiTextureType_SHININESS = 0x6, aiTextureType_SHININESS = 0x7,
/** The texture defines a per-pixel opacity. /** The texture defines a per-pixel opacity.
* *
* Normally 'white' means opaque and 'black' means * Normally 'white' means opaque and 'black' means
* 'transparency'. Or quite the opposite. Have fun. * 'transparency'. Or quite the opposite. Have fun.
*/ */
aiTextureType_OPACITY = 0x7, aiTextureType_OPACITY = 0x8,
/** This value is not used. It is just here to force the /** This value is not used. It is just here to force the
@ -369,6 +373,25 @@ enum aiShadingMode
_aiShadingMode_Force32Bit = 0x9fffffff _aiShadingMode_Force32Bit = 0x9fffffff
}; };
// ---------------------------------------------------------------------------
/** @brief Defines flags for a texture.
*
* See
*/
enum aiTextureFlags
{
/** The texture's color values have to be inverted (componentwise 1-n)
*/
aiTextureFlags_Invert = 0x1,
/** This value is not used. It is just there to force the
* compiler to map this enum to a 32 Bit integer.
*/
_aiTextureFlags_Force32Bit = 0x9fffffff
};
#include "./Compiler/pushpack1.h" #include "./Compiler/pushpack1.h"
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -707,7 +730,8 @@ extern "C" {
* <b>Default value to be assumed if this key isn't there:</b> n/a<br> * <b>Default value to be assumed if this key isn't there:</b> n/a<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_TEXTURE(type, N) "$tex.file",type,N #define _AI_MATKEY_TEXTURE_BASE "$tex.file"
#define AI_MATKEY_TEXTURE(type, N) _AI_MATKEY_TEXTURE_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_TEXTURE_DIFFUSE(N) \ #define AI_MATKEY_TEXTURE_DIFFUSE(N) \
@ -747,7 +771,8 @@ extern "C" {
* and AI_MATKEY_MAPPING(type,N) == UV<br> * and AI_MATKEY_MAPPING(type,N) == UV<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_UVWSRC(type, N) "$tex.uvwsrc",type,N #define _AI_MATKEY_UVWSRC_BASE "$tex.uvwsrc"
#define AI_MATKEY_UVWSRC(type, N) _AI_MATKEY_UVWSRC_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_UVWSRC_DIFFUSE(N) \ #define AI_MATKEY_UVWSRC_DIFFUSE(N) \
@ -786,7 +811,8 @@ extern "C" {
* <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br> * <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_TEXOP(type, N) "$tex.op",type,N #define _AI_MATKEY_TEXOP_BASE "$tex.op"
#define AI_MATKEY_TEXOP(type, N) _AI_MATKEY_TEXOP_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_TEXOP_DIFFUSE(N) \ #define AI_MATKEY_TEXOP_DIFFUSE(N) \
@ -824,7 +850,8 @@ extern "C" {
* <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br> * <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_MAPPING(type, N) "$tex.mapping",type,N #define _AI_MATKEY_MAPPING_BASE "$tex.mapping"
#define AI_MATKEY_MAPPING(type, N) _AI_MATKEY_MAPPING_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_MAPPING_DIFFUSE(N) \ #define AI_MATKEY_MAPPING_DIFFUSE(N) \
@ -855,14 +882,16 @@ extern "C" {
/** @def AI_MATKEY_TEXBLEND ( /** @def AI_MATKEY_TEXBLEND (
* Parameters: type, N<br> * Parameters: type, N<br>
* Specifies the strength of the <N>th texture of type <type>. This is just * Specifies the strength of the <N>th texture of type <type>. This is just
* a multiplier for the texture's color values. * a multiplier for the texture's color values. It may have any value, even
* outside [0..1]
* <br> * <br>
* <b>Type:</b> float<br> * <b>Type:</b> float<br>
* <b>Default value to be assumed if this key isn't there:</b> 1.f<br> * <b>Default value to be assumed if this key isn't there:</b> 1.f<br>
* <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br> * <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_TEXBLEND(type, N) "$tex.blend",type,N #define _AI_MATKEY_TEXBLEND_BASE "$tex.blend"
#define AI_MATKEY_TEXBLEND(type, N) _AI_MATKEY_TEXBLEND_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_TEXBLEND_DIFFUSE(N) \ #define AI_MATKEY_TEXBLEND_DIFFUSE(N) \
@ -900,7 +929,8 @@ extern "C" {
* <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br> * <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_MAPPINGMODE_U(type, N) "$tex.mapmodeu",type,N #define _AI_MATKEY_MAPPINGMODE_U_BASE "$tex.mapmodeu"
#define AI_MATKEY_MAPPINGMODE_U(type, N) _AI_MATKEY_MAPPINGMODE_U_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_MAPPINGMODE_U_DIFFUSE(N) \ #define AI_MATKEY_MAPPINGMODE_U_DIFFUSE(N) \
@ -938,7 +968,8 @@ extern "C" {
* <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br> * <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_MAPPINGMODE_V(type, N) "$tex.mapmodev",type,N #define _AI_MATKEY_MAPPINGMODE_V_BASE "$tex.mapmodev"
#define AI_MATKEY_MAPPINGMODE_V(type, N) _AI_MATKEY_MAPPINGMODE_V_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_MAPPINGMODE_V_DIFFUSE(N) \ #define AI_MATKEY_MAPPINGMODE_V_DIFFUSE(N) \
@ -976,7 +1007,8 @@ extern "C" {
* <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br> * <b>Requires:</b> AI_MATKEY_TEXTURE(type,N)<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_MAPPINGMODE_W(type, N) "$tex.mapmodew",type,N #define _AI_MATKEY_MAPPINGMODE_W_BASE "$tex.mapmodew"
#define AI_MATKEY_MAPPINGMODE_W(type, N) _AI_MATKEY_MAPPINGMODE_W_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_MAPPINGMODE_W_DIFFUSE(N) \ #define AI_MATKEY_MAPPINGMODE_W_DIFFUSE(N) \
@ -1017,7 +1049,8 @@ extern "C" {
* AI_MATKEY_MAPPING(type,N) != UV<br> * AI_MATKEY_MAPPING(type,N) != UV<br>
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_TEXMAP_AXIS(type, N) "$tex.mapaxis",type,N #define _AI_MATKEY_TEXMAP_AXIS_BASE "$tex.mapaxis"
#define AI_MATKEY_TEXMAP_AXIS(type, N) _AI_MATKEY_TEXMAP_AXIS_BASE,type,N
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
/** @def AI_MATKEY_UVTRANSFORM /** @def AI_MATKEY_UVTRANSFORM
@ -1033,7 +1066,8 @@ extern "C" {
* <b>Note:</b>Transformed 3D texture coordinates are not supported * <b>Note:</b>Transformed 3D texture coordinates are not supported
*/ */
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
#define AI_MATKEY_UVTRANSFORM(type, N) "$tex.uvtrafo",type,N #define _AI_MATKEY_UVTRANSFORM_BASE "$tex.uvtrafo"
#define AI_MATKEY_UVTRANSFORM(type, N) _AI_MATKEY_UVTRANSFORM_BASE,type,N
// for backward compatibility // for backward compatibility
#define AI_MATKEY_UVTRANSFORM_DIFFUSE(N) \ #define AI_MATKEY_UVTRANSFORM_DIFFUSE(N) \