Merge pull request #266 from jonnenauha/ogre-enhancements
Ogre importer cleanup, improvements and bug fixespull/267/head
commit
7b38eed767
|
@ -320,8 +320,8 @@ SET( Obj_SRCS
|
|||
SOURCE_GROUP( Obj FILES ${Obj_SRCS})
|
||||
|
||||
SET( Ogre_SRCS
|
||||
OgreImporter.hpp
|
||||
OgreXmlHelper.hpp
|
||||
OgreImporter.h
|
||||
OgreParsingUtils.h
|
||||
OgreImporter.cpp
|
||||
OgreMaterial.cpp
|
||||
OgreMesh.cpp
|
||||
|
|
|
@ -140,7 +140,7 @@ corresponding preprocessor flag to selectively disable formats.
|
|||
# include "LWSLoader.h"
|
||||
#endif
|
||||
#ifndef ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
# include "OgreImporter.hpp"
|
||||
# include "OgreImporter.h"
|
||||
#endif
|
||||
#ifndef ASSIMP_BUILD_NO_MS3D_IMPORTER
|
||||
# include "MS3DLoader.h"
|
||||
|
|
|
@ -38,17 +38,14 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
----------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
/** @file OgreImporter.cpp
|
||||
* @brief Implementation of the Ogre XML (.mesh.xml) loader.
|
||||
*/
|
||||
#include "AssimpPCH.h"
|
||||
|
||||
#ifndef ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
||||
#include <vector>
|
||||
#include <sstream>
|
||||
using namespace std;
|
||||
|
||||
#include "OgreImporter.hpp"
|
||||
#include "OgreImporter.h"
|
||||
#include "TinyFormatter.h"
|
||||
#include "irrXMLWrapper.h"
|
||||
|
||||
|
@ -65,199 +62,198 @@ static const aiImporterDesc desc = {
|
|||
"mesh.xml"
|
||||
};
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace Assimp
|
||||
{
|
||||
namespace Ogre
|
||||
{
|
||||
|
||||
|
||||
bool OgreImporter::CanRead(const std::string &pFile, Assimp::IOSystem *pIOHandler, bool checkSig) const
|
||||
{
|
||||
if(!checkSig)//Check File Extension
|
||||
{
|
||||
std::string extension("mesh.xml");
|
||||
int l=extension.length();
|
||||
return pFile.substr(pFile.length()-l, l)==extension;
|
||||
}
|
||||
else//Check file Header
|
||||
{
|
||||
const char* tokens[] = {"<mesh>"};
|
||||
return BaseImporter::SearchFileHeaderForToken(pIOHandler, pFile, tokens, 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void OgreImporter::InternReadFile(const std::string &pFile, aiScene *pScene, Assimp::IOSystem *pIOHandler)
|
||||
{
|
||||
m_CurrentFilename=pFile;
|
||||
m_CurrentIOHandler=pIOHandler;
|
||||
m_CurrentScene=pScene;
|
||||
|
||||
//Open the File:
|
||||
boost::scoped_ptr<IOStream> file(pIOHandler->Open(pFile));
|
||||
if( file.get() == NULL)
|
||||
throw DeadlyImportError("Failed to open file "+pFile+".");
|
||||
|
||||
//Read the Mesh File:
|
||||
boost::scoped_ptr<CIrrXML_IOStreamReader> mIOWrapper( new CIrrXML_IOStreamReader( file.get()));
|
||||
boost::scoped_ptr<XmlReader> MeshFile(irr::io::createIrrXMLReader(mIOWrapper.get()));
|
||||
if(!MeshFile)//parse the xml file
|
||||
throw DeadlyImportError("Failed to create XML Reader for "+pFile);
|
||||
|
||||
|
||||
DefaultLogger::get()->debug("Mesh File opened");
|
||||
|
||||
//Read root Node:
|
||||
if(!(XmlRead(MeshFile.get()) && string(MeshFile->getNodeName())=="mesh"))
|
||||
{
|
||||
throw DeadlyImportError("Root Node is not <mesh>! "+pFile+" "+MeshFile->getNodeName());
|
||||
}
|
||||
|
||||
//eventually load shared geometry
|
||||
XmlRead(MeshFile.get());//shared geometry is optional, so we need a reed for the next two if's
|
||||
if(MeshFile->getNodeName()==string("sharedgeometry"))
|
||||
{
|
||||
unsigned int NumVertices=GetAttribute<int>(MeshFile.get(), "vertexcount");;
|
||||
|
||||
XmlRead(MeshFile.get());
|
||||
while(MeshFile->getNodeName()==string("vertexbuffer"))
|
||||
{
|
||||
ReadVertexBuffer(m_SharedGeometry, MeshFile.get(), NumVertices);
|
||||
}
|
||||
}
|
||||
|
||||
//Go to the submeshs:
|
||||
if(MeshFile->getNodeName()!=string("submeshes"))
|
||||
{
|
||||
throw DeadlyImportError("No <submeshes> node in <mesh> node! "+pFile);
|
||||
}
|
||||
|
||||
|
||||
//-------------------Read the submeshs and materials:-----------------------
|
||||
std::list<boost::shared_ptr<SubMesh> > SubMeshes;
|
||||
vector<aiMaterial*> Materials;
|
||||
XmlRead(MeshFile.get());
|
||||
while(MeshFile->getNodeName()==string("submesh"))
|
||||
{
|
||||
SubMesh* theSubMesh=new SubMesh();
|
||||
theSubMesh->MaterialName=GetAttribute<string>(MeshFile.get(), "material");
|
||||
DefaultLogger::get()->debug("Loading Submehs with Material: "+theSubMesh->MaterialName);
|
||||
ReadSubMesh(*theSubMesh, MeshFile.get());
|
||||
|
||||
//just a index in a array, we add a mesh in each loop cycle, so we get indicies like 0, 1, 2 ... n;
|
||||
//so it is important to do this before pushing the mesh in the vector!
|
||||
theSubMesh->MaterialIndex=SubMeshes.size();
|
||||
|
||||
SubMeshes.push_back(boost::shared_ptr<SubMesh>(theSubMesh));
|
||||
|
||||
//Load the Material:
|
||||
aiMaterial* MeshMat=LoadMaterial(theSubMesh->MaterialName);
|
||||
|
||||
//Set the Material:
|
||||
Materials.push_back(MeshMat);
|
||||
}
|
||||
|
||||
if(SubMeshes.empty())
|
||||
throw DeadlyImportError("no submesh loaded!");
|
||||
if(SubMeshes.size()!=Materials.size())
|
||||
throw DeadlyImportError("materialcount doesn't match mesh count!");
|
||||
|
||||
//____________________________________________________________
|
||||
|
||||
|
||||
//skip submeshnames (stupid irrxml)
|
||||
if(MeshFile->getNodeName()==string("submeshnames"))
|
||||
{
|
||||
XmlRead(MeshFile.get());
|
||||
while(MeshFile->getNodeName()==string("submesh"))
|
||||
XmlRead(MeshFile.get());
|
||||
}
|
||||
|
||||
|
||||
//----------------Load the skeleton: -------------------------------
|
||||
vector<Bone> Bones;
|
||||
vector<Animation> Animations;
|
||||
if(MeshFile->getNodeName()==string("skeletonlink"))
|
||||
{
|
||||
string SkeletonFile=GetAttribute<string>(MeshFile.get(), "name");
|
||||
LoadSkeleton(SkeletonFile, Bones, Animations);
|
||||
XmlRead(MeshFile.get());
|
||||
}
|
||||
else
|
||||
{
|
||||
DefaultLogger::get()->debug("No skeleton file will be loaded");
|
||||
DefaultLogger::get()->debug(MeshFile->getNodeName());
|
||||
}
|
||||
//__________________________________________________________________
|
||||
|
||||
|
||||
//now there might be boneassignments for the shared geometry:
|
||||
if(MeshFile->getNodeName()==string("boneassignments"))
|
||||
{
|
||||
ReadBoneWeights(m_SharedGeometry, MeshFile.get());
|
||||
}
|
||||
|
||||
|
||||
//----------------- Process Meshs -----------------------
|
||||
BOOST_FOREACH(boost::shared_ptr<SubMesh> theSubMesh, SubMeshes)
|
||||
{
|
||||
ProcessSubMesh(*theSubMesh, m_SharedGeometry);
|
||||
}
|
||||
//_______________________________________________________
|
||||
|
||||
|
||||
|
||||
|
||||
//----------------- Now fill the Assimp scene ---------------------------
|
||||
|
||||
//put the aiMaterials in the scene:
|
||||
m_CurrentScene->mMaterials=new aiMaterial*[Materials.size()];
|
||||
m_CurrentScene->mNumMaterials=Materials.size();
|
||||
for(unsigned int i=0; i<Materials.size(); ++i)
|
||||
m_CurrentScene->mMaterials[i]=Materials[i];
|
||||
|
||||
//create the aiMehs...
|
||||
vector<aiMesh*> aiMeshes;
|
||||
BOOST_FOREACH(boost::shared_ptr<SubMesh> theSubMesh, SubMeshes)
|
||||
{
|
||||
aiMeshes.push_back(CreateAssimpSubMesh(*theSubMesh, Bones));
|
||||
}
|
||||
//... and put them in the scene:
|
||||
m_CurrentScene->mNumMeshes=aiMeshes.size();
|
||||
m_CurrentScene->mMeshes=new aiMesh*[aiMeshes.size()];
|
||||
memcpy(m_CurrentScene->mMeshes, &(aiMeshes[0]), sizeof(aiMeshes[0])*aiMeshes.size());
|
||||
|
||||
//Create the root node
|
||||
m_CurrentScene->mRootNode=new aiNode("root");
|
||||
|
||||
//link the meshs with the root node:
|
||||
m_CurrentScene->mRootNode->mMeshes=new unsigned int[SubMeshes.size()];
|
||||
m_CurrentScene->mRootNode->mNumMeshes=SubMeshes.size();
|
||||
for(unsigned int i=0; i<SubMeshes.size(); ++i)
|
||||
m_CurrentScene->mRootNode->mMeshes[i]=i;
|
||||
|
||||
|
||||
|
||||
CreateAssimpSkeleton(Bones, Animations);
|
||||
PutAnimationsInScene(Bones, Animations);
|
||||
//___________________________________________________________
|
||||
}
|
||||
|
||||
|
||||
const aiImporterDesc* OgreImporter::GetInfo () const
|
||||
const aiImporterDesc* OgreImporter::GetInfo() const
|
||||
{
|
||||
return &desc;
|
||||
}
|
||||
|
||||
|
||||
void OgreImporter::SetupProperties(const Importer* pImp)
|
||||
{
|
||||
m_MaterialLibFilename=pImp->GetPropertyString(AI_CONFIG_IMPORT_OGRE_MATERIAL_FILE, "Scene.material");
|
||||
m_TextureTypeFromFilename=pImp->GetPropertyBool(AI_CONFIG_IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME, false);
|
||||
m_userDefinedMaterialLibFile = pImp->GetPropertyString(AI_CONFIG_IMPORT_OGRE_MATERIAL_FILE, "Scene.material");
|
||||
m_detectTextureTypeFromFilename = pImp->GetPropertyBool(AI_CONFIG_IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME, false);
|
||||
}
|
||||
|
||||
bool OgreImporter::CanRead(const std::string &pFile, Assimp::IOSystem *pIOHandler, bool checkSig) const
|
||||
{
|
||||
if (!checkSig) {
|
||||
return EndsWith(pFile, ".mesh.xml", false);
|
||||
}
|
||||
|
||||
}//namespace Ogre
|
||||
}//namespace Assimp
|
||||
const char* tokens[] = { "<mesh>" };
|
||||
return SearchFileHeaderForToken(pIOHandler, pFile, tokens, 1);
|
||||
}
|
||||
|
||||
#endif // !! ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
void OgreImporter::InternReadFile(const std::string &pFile, aiScene *pScene, Assimp::IOSystem *pIOHandler)
|
||||
{
|
||||
// -------------------- Initial file and XML operations --------------------
|
||||
|
||||
// Open
|
||||
boost::scoped_ptr<IOStream> file(pIOHandler->Open(pFile));
|
||||
if (!file.get()) {
|
||||
throw DeadlyImportError("Failed to open file " + pFile);
|
||||
}
|
||||
|
||||
// Read
|
||||
boost::scoped_ptr<CIrrXML_IOStreamReader> xmlStream(new CIrrXML_IOStreamReader(file.get()));
|
||||
boost::scoped_ptr<XmlReader> reader(irr::io::createIrrXMLReader(xmlStream.get()));
|
||||
if (!reader) {
|
||||
throw DeadlyImportError("Failed to create XML Reader for " + pFile);
|
||||
}
|
||||
|
||||
DefaultLogger::get()->debug("Opened a XML reader for " + pFile);
|
||||
|
||||
// Read root node
|
||||
NextNode(reader.get());
|
||||
if (!CurrentNodeNameEquals(reader, "mesh")) {
|
||||
throw DeadlyImportError("Root node is not <mesh> but <" + string(reader->getNodeName()) + "> in " + pFile);
|
||||
}
|
||||
|
||||
// Node names
|
||||
const string nnSharedGeometry = "sharedgeometry";
|
||||
const string nnVertexBuffer = "vertexbuffer";
|
||||
const string nnSubMeshes = "submeshes";
|
||||
const string nnSubMesh = "submesh";
|
||||
const string nnSubMeshNames = "submeshnames";
|
||||
const string nnSkeletonLink = "skeletonlink";
|
||||
|
||||
// -------------------- Shared Geometry --------------------
|
||||
// This can be used to share geometry between submeshes
|
||||
|
||||
NextNode(reader.get());
|
||||
if (CurrentNodeNameEquals(reader, nnSharedGeometry))
|
||||
{
|
||||
DefaultLogger::get()->debug("Reading shared geometry");
|
||||
unsigned int NumVertices = GetAttribute<unsigned int>(reader.get(), "vertexcount");
|
||||
|
||||
NextNode(reader.get());
|
||||
while(CurrentNodeNameEquals(reader, nnVertexBuffer)) {
|
||||
ReadVertexBuffer(m_SharedGeometry, reader.get(), NumVertices);
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------- Sub Meshes --------------------
|
||||
|
||||
if (!CurrentNodeNameEquals(reader, nnSubMeshes)) {
|
||||
throw DeadlyImportError("Could not find <submeshes> node inside root <mesh> node");
|
||||
}
|
||||
|
||||
vector<boost::shared_ptr<SubMesh> > subMeshes;
|
||||
vector<aiMaterial*> materials;
|
||||
|
||||
NextNode(reader.get());
|
||||
while(CurrentNodeNameEquals(reader, nnSubMesh))
|
||||
{
|
||||
SubMesh* submesh = new SubMesh();
|
||||
ReadSubMesh(subMeshes.size(), *submesh, reader.get());
|
||||
|
||||
// Just a index in a array, we add a mesh in each loop cycle, so we get indicies like 0, 1, 2 ... n;
|
||||
// so it is important to do this before pushing the mesh in the vector!
|
||||
/// @todo Not sure if this really is needed, refactor out if possible.
|
||||
submesh->MaterialIndex = subMeshes.size();
|
||||
|
||||
subMeshes.push_back(boost::shared_ptr<SubMesh>(submesh));
|
||||
|
||||
/** @todo What is the correct way of handling empty ref here.
|
||||
Does Assimp require there to be a valid material index for each mesh,
|
||||
even if its a dummy material. */
|
||||
aiMaterial* material = ReadMaterial(pFile, pIOHandler, submesh->MaterialName);
|
||||
materials.push_back(material);
|
||||
}
|
||||
|
||||
if (subMeshes.empty()) {
|
||||
throw DeadlyImportError("Could not find <submeshes> node inside root <mesh> node");
|
||||
}
|
||||
|
||||
// This is really a internal error if we failed to create dummy materials.
|
||||
if (subMeshes.size() != materials.size()) {
|
||||
throw DeadlyImportError("Internal Error: Material count does not match the submesh count");
|
||||
}
|
||||
|
||||
// Skip submesh names.
|
||||
/// @todo Should these be read to scene etc. metadata?
|
||||
if (CurrentNodeNameEquals(reader, nnSubMeshNames))
|
||||
{
|
||||
NextNode(reader.get());
|
||||
while(CurrentNodeNameEquals(reader, nnSubMesh)) {
|
||||
NextNode(reader.get());
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------- Skeleton --------------------
|
||||
|
||||
vector<Bone> Bones;
|
||||
vector<Animation> Animations;
|
||||
|
||||
if (CurrentNodeNameEquals(reader, nnSkeletonLink))
|
||||
{
|
||||
string skeletonFile = GetAttribute<string>(reader.get(), "name");
|
||||
if (!skeletonFile.empty())
|
||||
{
|
||||
ReadSkeleton(pFile, pIOHandler, pScene, skeletonFile, Bones, Animations);
|
||||
}
|
||||
else
|
||||
{
|
||||
DefaultLogger::get()->debug("Found a unusual <" + nnSkeletonLink + "> with a empty file reference");
|
||||
}
|
||||
NextNode(reader.get());
|
||||
}
|
||||
else
|
||||
{
|
||||
DefaultLogger::get()->debug("Mesh has no assigned skeleton with <" + nnSkeletonLink + ">");
|
||||
}
|
||||
|
||||
// Now there might be <boneassignments> for the shared geometry
|
||||
if (CurrentNodeNameEquals(reader, "boneassignments")) {
|
||||
ReadBoneWeights(m_SharedGeometry, reader.get());
|
||||
}
|
||||
|
||||
// -------------------- Process Results --------------------
|
||||
BOOST_FOREACH(boost::shared_ptr<SubMesh> submesh, subMeshes)
|
||||
{
|
||||
ProcessSubMesh(*submesh.get(), m_SharedGeometry);
|
||||
}
|
||||
|
||||
// -------------------- Apply to aiScene --------------------
|
||||
|
||||
// Materials
|
||||
pScene->mMaterials = new aiMaterial*[materials.size()];
|
||||
pScene->mNumMaterials = materials.size();
|
||||
|
||||
for(size_t i=0, len=materials.size(); i<len; ++i) {
|
||||
pScene->mMaterials[i] = materials[i];
|
||||
}
|
||||
|
||||
// Meshes
|
||||
pScene->mMeshes = new aiMesh*[subMeshes.size()];
|
||||
pScene->mNumMeshes = subMeshes.size();
|
||||
|
||||
for(size_t i=0, len=subMeshes.size(); i<len; ++i)
|
||||
{
|
||||
boost::shared_ptr<SubMesh> submesh = subMeshes[i];
|
||||
pScene->mMeshes[i] = CreateAssimpSubMesh(pScene, *(submesh.get()), Bones);
|
||||
}
|
||||
|
||||
// Create the root node
|
||||
pScene->mRootNode = new aiNode();
|
||||
pScene->mRootNode->mMeshes = new unsigned int[subMeshes.size()];
|
||||
pScene->mRootNode->mNumMeshes = subMeshes.size();
|
||||
|
||||
for(size_t i=0, len=subMeshes.size(); i<len; ++i) {
|
||||
pScene->mRootNode->mMeshes[i] = static_cast<unsigned int>(i);
|
||||
}
|
||||
|
||||
// Skeleton and animations
|
||||
CreateAssimpSkeleton(pScene, Bones, Animations);
|
||||
}
|
||||
|
||||
} // Ogre
|
||||
} // Assimp
|
||||
|
||||
#endif // ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
|
|
@ -0,0 +1,230 @@
|
|||
|
||||
#ifndef AI_OGREIMPORTER_H_INC
|
||||
#define AI_OGREIMPORTER_H_INC
|
||||
|
||||
#ifndef ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
||||
#include "BaseImporter.h"
|
||||
#include "OgreParsingUtils.h"
|
||||
|
||||
namespace Assimp
|
||||
{
|
||||
namespace Ogre
|
||||
{
|
||||
|
||||
struct Face;
|
||||
struct BoneWeight;
|
||||
struct Bone;
|
||||
struct Animation;
|
||||
|
||||
/// Ogre SubMesh
|
||||
struct SubMesh
|
||||
{
|
||||
bool UseSharedGeometry;
|
||||
bool Use32bitIndexes;
|
||||
|
||||
std::string Name;
|
||||
std::string MaterialName;
|
||||
|
||||
bool HasGeometry;
|
||||
bool HasPositions;
|
||||
bool HasNormals;
|
||||
bool HasTangents;
|
||||
|
||||
std::vector<Face> Faces;
|
||||
std::vector<aiVector3D> Positions;
|
||||
std::vector<aiVector3D> Normals;
|
||||
std::vector<aiVector3D> Tangents;
|
||||
|
||||
/// Arbitrary number of texcoords, they are nearly always 2d, but Assimp has always 3d texcoords, n vectors(outer) with texcoords for each vertex(inner).
|
||||
std::vector<std::vector<aiVector3D> > Uvs;
|
||||
|
||||
/// A list(inner) of bones for each vertex(outer).
|
||||
std::vector<std::vector<BoneWeight> > Weights;
|
||||
|
||||
/// The Index in the Assimp material array from the material witch is attached to this submesh.
|
||||
int MaterialIndex;
|
||||
|
||||
// The highest index of a bone from a bone weight, this is needed to create the Assimp bone struct. Converting from vertex-bones to bone-vertices.
|
||||
unsigned int BonesUsed;
|
||||
|
||||
SubMesh() :
|
||||
UseSharedGeometry(false),
|
||||
Use32bitIndexes(false),
|
||||
HasGeometry(false),
|
||||
HasPositions(false),
|
||||
HasNormals(false),
|
||||
HasTangents(false),
|
||||
MaterialIndex(-1),
|
||||
BonesUsed(0)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
/** Importer for Ogre mesh, skeleton and material formats.
|
||||
@todo Support vertex colors
|
||||
@todo Support multiple TexCoords (this is already done??) */
|
||||
class OgreImporter : public BaseImporter
|
||||
{
|
||||
public:
|
||||
/// BaseImporter override.
|
||||
virtual bool CanRead(const std::string &pFile, IOSystem *pIOHandler, bool checkSig) const;
|
||||
|
||||
/// BaseImporter override.
|
||||
virtual void InternReadFile(const std::string &pFile, aiScene *pScene, IOSystem *pIOHandler);
|
||||
|
||||
/// BaseImporter override.
|
||||
virtual const aiImporterDesc *GetInfo() const;
|
||||
|
||||
/// BaseImporter override.
|
||||
virtual void SetupProperties(const Importer *pImp);
|
||||
|
||||
private:
|
||||
//-------------------------------- OgreMesh.cpp -------------------------------
|
||||
|
||||
/// Helper Functions to read parts of the XML File.
|
||||
void ReadSubMesh(const unsigned int submeshIndex, SubMesh &submesh, XmlReader *reader);
|
||||
|
||||
/// Reads a single Vertexbuffer and writes its data in the Submesh.
|
||||
static void ReadVertexBuffer(SubMesh &submesh, XmlReader *reader, const unsigned int numVertices);
|
||||
|
||||
/// Reads bone weights are stores them into the given submesh.
|
||||
static void ReadBoneWeights(SubMesh &submesh, XmlReader *reader);
|
||||
|
||||
/// After Loading a SubMehs some work needs to be done (make all Vertexes unique, normalize weights).
|
||||
static void ProcessSubMesh(SubMesh &submesh, SubMesh &sharedGeometry);
|
||||
|
||||
/// Uses the bone data to convert a SubMesh into a aiMesh which will be created and returned.
|
||||
aiMesh *CreateAssimpSubMesh(aiScene *pScene, const SubMesh &submesh, const std::vector<Bone> &bones) const;
|
||||
|
||||
//-------------------------------- OgreSkeleton.cpp -------------------------------
|
||||
|
||||
/// Writes the results in Bones and Animations, Filename is not const, because its call-by-value and the function will change it!
|
||||
void ReadSkeleton(const std::string &pFile, Assimp::IOSystem *pIOHandler, const aiScene *pScene,
|
||||
const std::string &skeletonFile, std::vector<Bone> &Bones, std::vector<Animation> &Animations) const;
|
||||
|
||||
/// Converts the animations in aiAnimations and puts them into the scene.
|
||||
void PutAnimationsInScene(aiScene *pScene, const std::vector<Bone> &Bones, const std::vector<Animation> &Animations);
|
||||
|
||||
/// Creates the aiSkeleton in current scene.
|
||||
void CreateAssimpSkeleton(aiScene *pScene, const std::vector<Bone> &bones, const std::vector<Animation> &animations);
|
||||
|
||||
/// Recursively creates a filled aiNode from a given root bone.
|
||||
static aiNode* CreateNodeFromBone(int boneId, const std::vector<Bone> &bones, aiNode *parent);
|
||||
|
||||
//-------------------------------- OgreMaterial.cpp -------------------------------
|
||||
|
||||
/// Reads material
|
||||
aiMaterial* ReadMaterial(const std::string &pFile, Assimp::IOSystem *pIOHandler, const std::string MaterialName);
|
||||
|
||||
// These functions parse blocks from a material file from @c ss. Starting parsing from "{" and ending it to "}".
|
||||
bool ReadTechnique(const std::string &techniqueName, std::stringstream &ss, aiMaterial *material);
|
||||
bool ReadPass(const std::string &passName, std::stringstream &ss, aiMaterial *material);
|
||||
bool ReadTextureUnit(const std::string &textureUnitName, std::stringstream &ss, aiMaterial *material);
|
||||
|
||||
std::string m_userDefinedMaterialLibFile;
|
||||
bool m_detectTextureTypeFromFilename;
|
||||
|
||||
/// VertexBuffer for the sub meshes that use shader geometry.
|
||||
SubMesh m_SharedGeometry;
|
||||
|
||||
std::map<aiTextureType, unsigned int> m_textures;
|
||||
};
|
||||
|
||||
/// Simplified face.
|
||||
/** @todo Support other polygon types than just just triangles. Move to using aiFace. */
|
||||
struct Face
|
||||
{
|
||||
unsigned int VertexIndices[3];
|
||||
};
|
||||
|
||||
/// Ogre Bone assignment
|
||||
struct BoneAssignment
|
||||
{
|
||||
/// Bone ID from Ogre.
|
||||
unsigned int BoneId;
|
||||
// Bone name for Assimp.
|
||||
std::string BoneName;
|
||||
};
|
||||
|
||||
/// Ogre Bone weight
|
||||
struct BoneWeight
|
||||
{
|
||||
/// Bone Id
|
||||
unsigned int Id;
|
||||
/// BoneWeight
|
||||
float Value;
|
||||
};
|
||||
|
||||
|
||||
/// Ogre Bone
|
||||
struct Bone
|
||||
{
|
||||
std::string Name;
|
||||
|
||||
int Id;
|
||||
int ParentId;
|
||||
|
||||
aiVector3D Position;
|
||||
aiVector3D RotationAxis;
|
||||
float RotationAngle;
|
||||
|
||||
aiMatrix4x4 BoneToWorldSpace;
|
||||
|
||||
std::vector<int> Children;
|
||||
|
||||
Bone() :
|
||||
Id(-1),
|
||||
ParentId(-1),
|
||||
RotationAngle(0.0f)
|
||||
{
|
||||
}
|
||||
|
||||
/// Returns if this bone is parented.
|
||||
bool IsParented() const { return (ParentId != -1); }
|
||||
|
||||
/// This operator is needed to sort the bones by Id in a vector<Bone>.
|
||||
bool operator<(const Bone &other) const { return (Id < other.Id); }
|
||||
|
||||
/// This operator is needed to find a bone by its name in a vector<Bone>
|
||||
bool operator==(const std::string& other) const { return Name == other; }
|
||||
bool operator==(const aiString& other) const { return Name == std::string(other.data); }
|
||||
|
||||
/// @note Implemented in OgreSkeleton.cpp
|
||||
void CalculateBoneToWorldSpaceMatrix(std::vector<Bone>& Bones);
|
||||
};
|
||||
|
||||
/// Ogre animation key frame
|
||||
/** Transformations for a frame. */
|
||||
struct KeyFrame
|
||||
{
|
||||
float Time;
|
||||
aiVector3D Position;
|
||||
aiQuaternion Rotation;
|
||||
aiVector3D Scaling;
|
||||
};
|
||||
|
||||
/// Ogre animation track
|
||||
/** Keyframes for one bone. */
|
||||
struct Track
|
||||
{
|
||||
std::string BoneName;
|
||||
std::vector<KeyFrame> Keyframes;
|
||||
};
|
||||
|
||||
/// Ogre animation
|
||||
struct Animation
|
||||
{
|
||||
/// Name
|
||||
std::string Name;
|
||||
/// Length
|
||||
float Length;
|
||||
/// Tracks
|
||||
std::vector<Track> Tracks;
|
||||
};
|
||||
|
||||
} // Ogre
|
||||
} // Assimp
|
||||
|
||||
#endif // ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
#endif // AI_OGREIMPORTER_H_INC
|
|
@ -1,185 +0,0 @@
|
|||
#include "BaseImporter.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "OgreXmlHelper.hpp"
|
||||
#include "irrXMLWrapper.h"
|
||||
|
||||
/// Ogre Importer TODO
|
||||
/* - Read Vertex Colors
|
||||
- Read multiple TexCoords
|
||||
*/
|
||||
|
||||
|
||||
|
||||
namespace Assimp
|
||||
{
|
||||
namespace Ogre
|
||||
{
|
||||
|
||||
|
||||
//Forward declarations:
|
||||
struct Face;
|
||||
struct Weight;
|
||||
struct Bone;
|
||||
struct Animation;
|
||||
struct Track;
|
||||
struct Keyframe;
|
||||
|
||||
///A submesh from Ogre
|
||||
struct SubMesh
|
||||
{
|
||||
bool SharedData;
|
||||
|
||||
std::string Name;
|
||||
std::string MaterialName;
|
||||
std::vector<Face> FaceList;
|
||||
|
||||
std::vector<aiVector3D> Positions; bool HasPositions;
|
||||
std::vector<aiVector3D> Normals; bool HasNormals;
|
||||
std::vector<aiVector3D> Tangents; bool HasTangents;
|
||||
std::vector<std::vector<aiVector3D> > Uvs;//arbitrary number of texcoords, they are nearly always 2d, but assimp has always 3d texcoords, n vectors(outer) with texcoords for each vertex(inner)
|
||||
|
||||
std::vector< std::vector<Weight> > Weights;//a list(inner) of bones for each vertex(outer)
|
||||
int MaterialIndex;///< The Index in the Assimp Materialarray from the material witch is attached to this submesh
|
||||
unsigned int BonesUsed;//the highest index of a bone from a bone weight, this is needed to create the assimp bone structur (converting from Vertex-Bones to Bone-Vertices)
|
||||
|
||||
SubMesh(): SharedData(false), HasPositions(false), HasNormals(false), HasTangents(false),
|
||||
MaterialIndex(-1), BonesUsed(0) {}//initialize everything
|
||||
};
|
||||
|
||||
|
||||
///The Main Ogre Importer Class
|
||||
class OgreImporter : public BaseImporter
|
||||
{
|
||||
public:
|
||||
virtual bool CanRead( const std::string& pFile, IOSystem* pIOHandler, bool checkSig) const;
|
||||
virtual void InternReadFile( const std::string& pFile, aiScene* pScene, IOSystem* pIOHandler);
|
||||
virtual const aiImporterDesc* GetInfo () const;
|
||||
virtual void SetupProperties(const Importer* pImp);
|
||||
private:
|
||||
|
||||
|
||||
//-------------------------------- OgreMesh.cpp -------------------------------
|
||||
/// Helper Functions to read parts of the XML File
|
||||
void ReadSubMesh(SubMesh& theSubMesh, XmlReader* Reader);//the submesh reference is the result value
|
||||
|
||||
/// Reads a single Vertexbuffer and writes its data in the Submesh
|
||||
static void ReadVertexBuffer(SubMesh &theSubMesh, XmlReader *Reader, unsigned int NumVertices);
|
||||
|
||||
/// Reads bone weights are stores them into the given submesh
|
||||
static void ReadBoneWeights(SubMesh &theSubMesh, XmlReader *Reader);
|
||||
|
||||
/// After Loading a SubMehs some work needs to be done (make all Vertexes unique, normalize weights)
|
||||
static void ProcessSubMesh(SubMesh &theSubMesh, SubMesh &theSharedGeometry);
|
||||
|
||||
/// Uses the bone data to convert a SubMesh into a aiMesh which will be created and returned
|
||||
aiMesh* CreateAssimpSubMesh(const SubMesh &theSubMesh, const std::vector<Bone>& Bones) const;
|
||||
|
||||
|
||||
//-------------------------------- OgreSkeleton.cpp -------------------------------
|
||||
/// Writes the results in Bones and Animations, Filename is not const, because its call-by-value and the function will change it!
|
||||
void LoadSkeleton(std::string FileName, std::vector<Bone> &Bones, std::vector<Animation> &Animations) const;
|
||||
|
||||
/// Converts the animations in aiAnimations and puts them into the scene
|
||||
void PutAnimationsInScene(const std::vector<Bone> &Bones, const std::vector<Animation> &Animations);
|
||||
|
||||
/// Creates the aiskeleton in current scene
|
||||
void CreateAssimpSkeleton(const std::vector<Bone> &Bones, const std::vector<Animation> &Animations);
|
||||
|
||||
/// Recursivly creates a filled aiNode from a given root bone
|
||||
static aiNode* CreateAiNodeFromBone(int BoneId, const std::vector<Bone> &Bones, aiNode* ParentNode);
|
||||
|
||||
|
||||
//-------------------------------- OgreMaterial.cpp -------------------------------
|
||||
aiMaterial* LoadMaterial(const std::string MaterialName) const;
|
||||
void ReadTechnique(std::stringstream &ss, aiMaterial* NewMaterial) const;
|
||||
|
||||
|
||||
|
||||
|
||||
//Now we don't have to give theses parameters to all functions
|
||||
std::string m_CurrentFilename;
|
||||
std::string m_MaterialLibFilename;
|
||||
bool m_TextureTypeFromFilename;
|
||||
IOSystem* m_CurrentIOHandler;
|
||||
aiScene *m_CurrentScene;
|
||||
SubMesh m_SharedGeometry;///< we will just use the vertexbuffers of the submesh
|
||||
};
|
||||
|
||||
///For the moment just triangles, no other polygon types!
|
||||
struct Face
|
||||
{
|
||||
unsigned int VertexIndices[3];
|
||||
};
|
||||
|
||||
struct BoneAssignment
|
||||
{
|
||||
unsigned int BoneId;//this is, what we get from ogre
|
||||
std::string BoneName;//this is, what we need for assimp
|
||||
};
|
||||
|
||||
///for a vertex->bone structur
|
||||
struct Weight
|
||||
{
|
||||
unsigned int BoneId;
|
||||
float Value;
|
||||
};
|
||||
|
||||
|
||||
/// Helper Class to describe an ogre-bone for the skeleton:
|
||||
/** All Id's are signed ints, because than we have -1 as a simple INVALID_ID Value (we start from 0 so 0 is a valid bone ID!*/
|
||||
struct Bone
|
||||
{
|
||||
int Id;
|
||||
int ParentId;
|
||||
std::string Name;
|
||||
aiVector3D Position;
|
||||
float RotationAngle;
|
||||
aiVector3D RotationAxis;
|
||||
std::vector<int> Children;
|
||||
aiMatrix4x4 BoneToWorldSpace;
|
||||
|
||||
///ctor
|
||||
Bone(): Id(-1), ParentId(-1), RotationAngle(0.0f) {}
|
||||
///this operator is needed to sort the bones after Id's
|
||||
bool operator<(const Bone& rval) const
|
||||
{return Id<rval.Id; }
|
||||
///this operator is needed to find a bone by its name in a vector<Bone>
|
||||
bool operator==(const std::string& rval) const
|
||||
{return Name==rval; }
|
||||
bool operator==(const aiString& rval) const
|
||||
{return Name==std::string(rval.data); }
|
||||
|
||||
// implemented in OgreSkeleton.cpp
|
||||
void CalculateBoneToWorldSpaceMatrix(std::vector<Bone>& Bones);
|
||||
};
|
||||
|
||||
|
||||
|
||||
///Describes an Ogre Animation
|
||||
struct Animation
|
||||
{
|
||||
std::string Name;
|
||||
float Length;
|
||||
std::vector<Track> Tracks;
|
||||
};
|
||||
|
||||
///a track (keyframes for one bone) from an animation
|
||||
struct Track
|
||||
{
|
||||
std::string BoneName;
|
||||
std::vector<Keyframe> Keyframes;
|
||||
};
|
||||
|
||||
/// keyframe (bone transformation) from a track from a animation
|
||||
struct Keyframe
|
||||
{
|
||||
float Time;
|
||||
aiVector3D Position;
|
||||
aiQuaternion Rotation;
|
||||
aiVector3D Scaling;
|
||||
};
|
||||
|
||||
}//namespace Ogre
|
||||
}//namespace Assimp
|
|
@ -38,56 +38,36 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
----------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
/**
|
||||
This file contains material related code. This is
|
||||
spilitted up from the main file OgreImporter.cpp
|
||||
to make it shorter easier to maintain.
|
||||
*/
|
||||
#include "AssimpPCH.h"
|
||||
|
||||
#ifndef ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
||||
#include <vector>
|
||||
#include <sstream>
|
||||
using namespace std;
|
||||
|
||||
#include "OgreImporter.hpp"
|
||||
#include "irrXMLWrapper.h"
|
||||
#include "OgreImporter.h"
|
||||
#include "TinyFormatter.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
namespace Assimp
|
||||
{
|
||||
namespace Ogre
|
||||
{
|
||||
|
||||
static const string partComment = "//";
|
||||
static const string partBlockStart = "{";
|
||||
static const string partBlockEnd = "}";
|
||||
|
||||
|
||||
aiMaterial* OgreImporter::LoadMaterial(const std::string MaterialName) const
|
||||
aiMaterial* OgreImporter::ReadMaterial(const std::string &pFile, Assimp::IOSystem *pIOHandler, const std::string materialName)
|
||||
{
|
||||
/*For better understanding of the material parser, here is a material example file:
|
||||
|
||||
material Sarg
|
||||
{
|
||||
receive_shadows on
|
||||
technique
|
||||
{
|
||||
pass
|
||||
{
|
||||
ambient 0.500000 0.500000 0.500000 1.000000
|
||||
diffuse 0.640000 0.640000 0.640000 1.000000
|
||||
specular 0.500000 0.500000 0.500000 1.000000 12.500000
|
||||
emissive 0.000000 0.000000 0.000000 1.000000
|
||||
texture_unit
|
||||
{
|
||||
texture SargTextur.tga
|
||||
tex_address_mode wrap
|
||||
filtering linear linear none
|
||||
}
|
||||
}
|
||||
}
|
||||
/// @todo Should we return null ptr here or a empty material?
|
||||
if (materialName.empty()) {
|
||||
return new aiMaterial();
|
||||
}
|
||||
|
||||
*/
|
||||
// Full reference and examples of Ogre Material Script
|
||||
// can be found from http://www.ogre3d.org/docs/manual/manual_14.html
|
||||
|
||||
/*and here is another one:
|
||||
|
||||
|
@ -112,342 +92,450 @@ aiMaterial* OgreImporter::LoadMaterial(const std::string MaterialName) const
|
|||
}
|
||||
*/
|
||||
|
||||
//Read the file into memory and put it in a stringstream
|
||||
stringstream ss;
|
||||
{// after this block, the temporarly loaded data will be released
|
||||
|
||||
/*
|
||||
We have 3 guesses for the Material filename:
|
||||
- the Material Name
|
||||
- the Name of the mesh file
|
||||
- the DefaultMaterialLib (which you can set before importing)
|
||||
*/
|
||||
// Scope for scopre_ptr auto release
|
||||
{
|
||||
/* There are three .material options in priority order:
|
||||
1) File with the material name (materialName)
|
||||
2) File with the mesh files base name (pFile)
|
||||
3) Optional user defined material library file (m_userDefinedMaterialLibFile) */
|
||||
std::vector<string> potentialFiles;
|
||||
potentialFiles.push_back(materialName + ".material");
|
||||
potentialFiles.push_back(pFile.substr(0, pFile.rfind(".mesh")) + ".material");
|
||||
if (!m_userDefinedMaterialLibFile.empty())
|
||||
potentialFiles.push_back(m_userDefinedMaterialLibFile);
|
||||
|
||||
IOStream* MatFilePtr=m_CurrentIOHandler->Open(MaterialName+".material");
|
||||
if(NULL==MatFilePtr)
|
||||
IOStream *materialFile = 0;
|
||||
for(size_t i=0; i<potentialFiles.size(); ++i)
|
||||
{
|
||||
//the filename typically ends with .mesh or .mesh.xml
|
||||
const string MaterialFileName=m_CurrentFilename.substr(0, m_CurrentFilename.rfind(".mesh"))+".material";
|
||||
|
||||
MatFilePtr=m_CurrentIOHandler->Open(MaterialFileName);
|
||||
if(NULL==MatFilePtr)
|
||||
{
|
||||
//try the default mat Library
|
||||
if(NULL==MatFilePtr)
|
||||
{
|
||||
|
||||
MatFilePtr=m_CurrentIOHandler->Open(m_MaterialLibFilename);
|
||||
if(NULL==MatFilePtr)
|
||||
{
|
||||
DefaultLogger::get()->error(m_MaterialLibFilename+" and "+MaterialFileName + " could not be opened, Material will not be loaded!");
|
||||
return new aiMaterial();
|
||||
}
|
||||
}
|
||||
materialFile = pIOHandler->Open(potentialFiles[i]);
|
||||
if (materialFile) {
|
||||
break;
|
||||
}
|
||||
DefaultLogger::get()->debug(Formatter::format() << "Source file for material '" << materialName << "' " << potentialFiles[i] << " does not exist");
|
||||
}
|
||||
//Fill the stream
|
||||
boost::scoped_ptr<IOStream> MaterialFile(MatFilePtr);
|
||||
if(MaterialFile->FileSize()>0)
|
||||
if (!materialFile)
|
||||
{
|
||||
vector<char> FileData(MaterialFile->FileSize());
|
||||
MaterialFile->Read(&FileData[0], MaterialFile->FileSize(), 1);
|
||||
BaseImporter::ConvertToUTF8(FileData);
|
||||
/// @todo Should we return null ptr here or a empty material?
|
||||
DefaultLogger::get()->error(Formatter::format() << "Failed to find source file for material '" << materialName << "'");
|
||||
return new aiMaterial();
|
||||
}
|
||||
|
||||
FileData.push_back('\0');//terminate the string with zero, so that the ss can parse it correctly
|
||||
ss << &FileData[0];
|
||||
}
|
||||
else
|
||||
boost::scoped_ptr<IOStream> stream(materialFile);
|
||||
if (stream->FileSize() == 0)
|
||||
{
|
||||
DefaultLogger::get()->warn("Material " + MaterialName + " seams to be empty");
|
||||
return NULL;
|
||||
/// @todo Should we return null ptr here or a empty material?
|
||||
DefaultLogger::get()->warn(Formatter::format() << "Source file for material '" << materialName << "' is empty (size is 0 bytes)");
|
||||
return new aiMaterial();
|
||||
}
|
||||
|
||||
// Read bytes
|
||||
vector<char> data(stream->FileSize());
|
||||
stream->Read(&data[0], stream->FileSize(), 1);
|
||||
|
||||
// Convert to UTF-8 and terminate the string for ss
|
||||
BaseImporter::ConvertToUTF8(data);
|
||||
data.push_back('\0');
|
||||
|
||||
ss << &data[0];
|
||||
}
|
||||
|
||||
DefaultLogger::get()->debug("Reading material '" + materialName + "'");
|
||||
|
||||
//create the material
|
||||
aiMaterial *NewMaterial=new aiMaterial();
|
||||
aiMaterial *material = new aiMaterial();
|
||||
m_textures.clear();
|
||||
|
||||
aiString ts(materialName);
|
||||
material->AddProperty(&ts, AI_MATKEY_NAME);
|
||||
|
||||
aiString ts(MaterialName.c_str());
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_NAME);
|
||||
// The stringstream will push words from a line until newline.
|
||||
// It will also trim whitespace from line start and between words.
|
||||
string linePart;
|
||||
ss >> linePart;
|
||||
|
||||
const string partMaterial = "material";
|
||||
const string partTechnique = "technique";
|
||||
|
||||
string Line;
|
||||
ss >> Line;
|
||||
// unsigned int Level=0;//Hierarchielevels in the material file, like { } blocks into another
|
||||
while(!ss.eof())
|
||||
{
|
||||
if(Line=="material")
|
||||
// Skip commented lines
|
||||
if (linePart == partComment)
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line==MaterialName)//Load the next material
|
||||
{
|
||||
string RestOfLine;
|
||||
getline(ss, RestOfLine);//ignore the rest of the line
|
||||
ss >> Line;
|
||||
|
||||
if(Line!="{")
|
||||
{
|
||||
DefaultLogger::get()->warn("empyt material!");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
while(Line!="}")//read until the end of the material
|
||||
{
|
||||
//Proceed to the first technique
|
||||
ss >> Line;
|
||||
if(Line=="technique")
|
||||
{
|
||||
ReadTechnique(ss, NewMaterial);
|
||||
}
|
||||
|
||||
DefaultLogger::get()->info(Line);
|
||||
//read informations from a custom material:
|
||||
if(Line=="set")
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line=="$specular")//todo load this values:
|
||||
{
|
||||
}
|
||||
if(Line=="$diffuse")
|
||||
{
|
||||
}
|
||||
if(Line=="$ambient")
|
||||
{
|
||||
}
|
||||
if(Line=="$colormap")
|
||||
{
|
||||
ss >> Line;
|
||||
aiString ts(Line.c_str());
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_DIFFUSE, 0));
|
||||
}
|
||||
if(Line=="$normalmap")
|
||||
{
|
||||
ss >> Line;
|
||||
aiString ts(Line.c_str());
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_NORMALS, 0));
|
||||
}
|
||||
|
||||
if(Line=="$shininess_strength")
|
||||
{
|
||||
ss >> Line;
|
||||
float Shininess=fast_atof(Line.c_str());
|
||||
NewMaterial->AddProperty(&Shininess, 1, AI_MATKEY_SHININESS_STRENGTH);
|
||||
}
|
||||
|
||||
if(Line=="$shininess_exponent")
|
||||
{
|
||||
ss >> Line;
|
||||
float Shininess=fast_atof(Line.c_str());
|
||||
NewMaterial->AddProperty(&Shininess, 1, AI_MATKEY_SHININESS);
|
||||
}
|
||||
|
||||
//Properties from Venetica:
|
||||
if(Line=="$diffuse_map")
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line[0]=='"')// "file" -> file
|
||||
Line=Line.substr(1, Line.size()-2);
|
||||
aiString ts(Line.c_str());
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_DIFFUSE, 0));
|
||||
}
|
||||
if(Line=="$specular_map")
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line[0]=='"')// "file" -> file
|
||||
Line=Line.substr(1, Line.size()-2);
|
||||
aiString ts(Line.c_str());
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_SHININESS, 0));
|
||||
}
|
||||
if(Line=="$normal_map")
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line[0]=='"')// "file" -> file
|
||||
Line=Line.substr(1, Line.size()-2);
|
||||
aiString ts(Line.c_str());
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_NORMALS, 0));
|
||||
}
|
||||
if(Line=="$light_map")
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line[0]=='"')// "file" -> file
|
||||
Line=Line.substr(1, Line.size()-2);
|
||||
aiString ts(Line.c_str());
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_LIGHTMAP, 0));
|
||||
}
|
||||
}
|
||||
}//end of material
|
||||
}
|
||||
else {} //this is the wrong material, proceed the file until we reach the next material
|
||||
string postComment = NextAfterNewLine(ss, linePart);
|
||||
DefaultLogger::get()->debug("//" + postComment + " (comment line ignored)");
|
||||
continue;
|
||||
}
|
||||
ss >> Line;
|
||||
}
|
||||
|
||||
return NewMaterial;
|
||||
}
|
||||
|
||||
void OgreImporter::ReadTechnique(stringstream &ss, aiMaterial* NewMaterial) const
|
||||
{
|
||||
unsigned int CurrentDiffuseTextureId=0;
|
||||
unsigned int CurrentSpecularTextureId=0;
|
||||
unsigned int CurrentNormalTextureId=0;
|
||||
unsigned int CurrentLightTextureId=0;
|
||||
|
||||
|
||||
string RestOfLine;
|
||||
getline(ss, RestOfLine);//ignore the rest of the line
|
||||
|
||||
string Line;
|
||||
ss >> Line;
|
||||
if(Line!="{")
|
||||
{
|
||||
DefaultLogger::get()->warn("empty technique!");
|
||||
return;
|
||||
}
|
||||
while(Line!="}")//read until the end of the technique
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line=="pass")
|
||||
if (linePart != partMaterial)
|
||||
{
|
||||
getline(ss, RestOfLine);//ignore the rest of the line
|
||||
ss >> linePart;
|
||||
continue;
|
||||
}
|
||||
|
||||
ss >> Line;
|
||||
if(Line!="{")
|
||||
{
|
||||
DefaultLogger::get()->warn("empty pass!");
|
||||
return;
|
||||
}
|
||||
while(Line!="}")//read until the end of the pass
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line=="ambient")
|
||||
{
|
||||
float r,g,b;
|
||||
ss >> r >> g >> b;
|
||||
const aiColor3D Color(r,g,b);
|
||||
NewMaterial->AddProperty(&Color, 1, AI_MATKEY_COLOR_AMBIENT);
|
||||
}
|
||||
else if(Line=="diffuse")
|
||||
{
|
||||
float r,g,b;
|
||||
ss >> r >> g >> b;
|
||||
const aiColor3D Color(r,g,b);
|
||||
NewMaterial->AddProperty(&Color, 1, AI_MATKEY_COLOR_DIFFUSE);
|
||||
}
|
||||
else if(Line=="specular")
|
||||
{
|
||||
float r,g,b;
|
||||
ss >> r >> g >> b;
|
||||
const aiColor3D Color(r,g,b);
|
||||
NewMaterial->AddProperty(&Color, 1, AI_MATKEY_COLOR_SPECULAR);
|
||||
}
|
||||
else if(Line=="emmisive")
|
||||
{
|
||||
float r,g,b;
|
||||
ss >> r >> g >> b;
|
||||
const aiColor3D Color(r,g,b);
|
||||
NewMaterial->AddProperty(&Color, 1, AI_MATKEY_COLOR_EMISSIVE);
|
||||
}
|
||||
else if(Line=="texture_unit")
|
||||
{
|
||||
getline(ss, RestOfLine);//ignore the rest of the line
|
||||
ss >> linePart;
|
||||
if (linePart != materialName)
|
||||
{
|
||||
//DefaultLogger::get()->debug(Formatter::format() << "Found material '" << linePart << "' that does not match at index " << ss.tellg());
|
||||
ss >> linePart;
|
||||
continue;
|
||||
}
|
||||
|
||||
std::string TextureName;
|
||||
int TextureType=-1;
|
||||
int UvSet=0;
|
||||
NextAfterNewLine(ss, linePart);
|
||||
if (linePart != partBlockStart)
|
||||
{
|
||||
DefaultLogger::get()->error(Formatter::format() << "Invalid material: block start missing near index " << ss.tellg());
|
||||
return material;
|
||||
}
|
||||
|
||||
DefaultLogger::get()->debug("material '" + materialName + "'");
|
||||
|
||||
ss >> Line;
|
||||
if(Line!="{")
|
||||
throw DeadlyImportError("empty texture unit!");
|
||||
while(Line!="}")//read until the end of the texture_unit
|
||||
{
|
||||
ss >> Line;
|
||||
if(Line=="texture")
|
||||
{
|
||||
ss >> Line;
|
||||
TextureName=Line;
|
||||
|
||||
if(m_TextureTypeFromFilename)
|
||||
{
|
||||
if(Line.find("_n.")!=string::npos)// Normalmap
|
||||
{
|
||||
TextureType=aiTextureType_NORMALS;
|
||||
}
|
||||
else if(Line.find("_s.")!=string::npos)// Specularmap
|
||||
{
|
||||
TextureType=aiTextureType_SPECULAR;
|
||||
}
|
||||
else if(Line.find("_l.")!=string::npos)// Lightmap
|
||||
{
|
||||
TextureType=aiTextureType_LIGHTMAP;
|
||||
}
|
||||
else// colormap
|
||||
{
|
||||
TextureType=aiTextureType_DIFFUSE;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
TextureType=aiTextureType_DIFFUSE;
|
||||
}
|
||||
}
|
||||
else if(Line=="tex_coord_set")
|
||||
{
|
||||
ss >> UvSet;
|
||||
}
|
||||
else if(Line=="colour_op")//TODO implement this
|
||||
{
|
||||
/*
|
||||
ss >> Line;
|
||||
if("replace"==Line)//I don't think, assimp has something for this...
|
||||
{
|
||||
}
|
||||
else if("modulate"==Line)
|
||||
{
|
||||
//TODO: set value
|
||||
//NewMaterial->AddProperty(aiTextureOp_Multiply)
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
}//end of texture unit
|
||||
Line="";//clear the } that would end the outer loop
|
||||
|
||||
//give the texture to assimp:
|
||||
while(linePart != partBlockEnd)
|
||||
{
|
||||
// Proceed to the first technique
|
||||
ss >> linePart;
|
||||
|
||||
aiString ts(TextureName.c_str());
|
||||
switch(TextureType)
|
||||
{
|
||||
case aiTextureType_DIFFUSE:
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_DIFFUSE, CurrentDiffuseTextureId));
|
||||
NewMaterial->AddProperty(&UvSet, 1, AI_MATKEY_UVWSRC(0, CurrentDiffuseTextureId));
|
||||
CurrentDiffuseTextureId++;
|
||||
break;
|
||||
case aiTextureType_NORMALS:
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_NORMALS, CurrentNormalTextureId));
|
||||
NewMaterial->AddProperty(&UvSet, 1, AI_MATKEY_UVWSRC(0, CurrentNormalTextureId));
|
||||
CurrentNormalTextureId++;
|
||||
break;
|
||||
case aiTextureType_SPECULAR:
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_SPECULAR, CurrentSpecularTextureId));
|
||||
NewMaterial->AddProperty(&UvSet, 1, AI_MATKEY_UVWSRC(0, CurrentSpecularTextureId));
|
||||
CurrentSpecularTextureId++;
|
||||
break;
|
||||
case aiTextureType_LIGHTMAP:
|
||||
NewMaterial->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_LIGHTMAP, CurrentLightTextureId));
|
||||
NewMaterial->AddProperty(&UvSet, 1, AI_MATKEY_UVWSRC(0, CurrentLightTextureId));
|
||||
CurrentLightTextureId++;
|
||||
break;
|
||||
default:
|
||||
DefaultLogger::get()->warn("Invalid Texture Type!");
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (linePart == partTechnique)
|
||||
{
|
||||
string techniqueName = SkipLine(ss);
|
||||
ReadTechnique(Trim(techniqueName), ss, material);
|
||||
}
|
||||
Line="";//clear the } that would end the outer loop
|
||||
|
||||
// Read informations from a custom material
|
||||
/** @todo This "set $x y" does not seem to be a official Ogre material system feature.
|
||||
Materials can inherit other materials and override texture units by using the (unique)
|
||||
parent texture unit name in your cloned material.
|
||||
This is not yet supported and below code is probably some hack from the original
|
||||
author of this Ogre importer. Should be removed? */
|
||||
if (linePart=="set")
|
||||
{
|
||||
ss >> linePart;
|
||||
if (linePart=="$specular")//todo load this values:
|
||||
{
|
||||
}
|
||||
else if (linePart=="$diffuse")
|
||||
{
|
||||
}
|
||||
else if (linePart=="$ambient")
|
||||
{
|
||||
}
|
||||
else if (linePart=="$colormap")
|
||||
{
|
||||
ss >> linePart;
|
||||
aiString ts(linePart);
|
||||
material->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_DIFFUSE, 0));
|
||||
}
|
||||
else if (linePart=="$normalmap")
|
||||
{
|
||||
ss >> linePart;
|
||||
aiString ts(linePart);
|
||||
material->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_NORMALS, 0));
|
||||
}
|
||||
else if (linePart=="$shininess_strength")
|
||||
{
|
||||
ss >> linePart;
|
||||
float Shininess = fast_atof(linePart.c_str());
|
||||
material->AddProperty(&Shininess, 1, AI_MATKEY_SHININESS_STRENGTH);
|
||||
}
|
||||
else if (linePart=="$shininess_exponent")
|
||||
{
|
||||
ss >> linePart;
|
||||
float Shininess = fast_atof(linePart.c_str());
|
||||
material->AddProperty(&Shininess, 1, AI_MATKEY_SHININESS);
|
||||
}
|
||||
//Properties from Venetica:
|
||||
else if (linePart=="$diffuse_map")
|
||||
{
|
||||
ss >> linePart;
|
||||
if (linePart[0] == '"')// "file" -> file
|
||||
linePart = linePart.substr(1, linePart.size()-2);
|
||||
aiString ts(linePart);
|
||||
material->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_DIFFUSE, 0));
|
||||
}
|
||||
else if (linePart=="$specular_map")
|
||||
{
|
||||
ss >> linePart;
|
||||
if (linePart[0] == '"')// "file" -> file
|
||||
linePart = linePart.substr(1, linePart.size()-2);
|
||||
aiString ts(linePart);
|
||||
material->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_SHININESS, 0));
|
||||
}
|
||||
else if (linePart=="$normal_map")
|
||||
{
|
||||
ss >> linePart;
|
||||
if (linePart[0]=='"')// "file" -> file
|
||||
linePart = linePart.substr(1, linePart.size()-2);
|
||||
aiString ts(linePart);
|
||||
material->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_NORMALS, 0));
|
||||
}
|
||||
else if (linePart=="$light_map")
|
||||
{
|
||||
ss >> linePart;
|
||||
if (linePart[0]=='"') {
|
||||
linePart = linePart.substr(1, linePart.size() - 2);
|
||||
}
|
||||
aiString ts(linePart);
|
||||
material->AddProperty(&ts, AI_MATKEY_TEXTURE(aiTextureType_LIGHTMAP, 0));
|
||||
}
|
||||
}
|
||||
}
|
||||
}//end of technique
|
||||
ss >> linePart;
|
||||
}
|
||||
|
||||
return material;
|
||||
}
|
||||
|
||||
bool OgreImporter::ReadTechnique(const std::string &techniqueName, stringstream &ss, aiMaterial *material)
|
||||
{
|
||||
string linePart;
|
||||
ss >> linePart;
|
||||
|
||||
}//namespace Ogre
|
||||
}//namespace Assimp
|
||||
if (linePart != partBlockStart)
|
||||
{
|
||||
DefaultLogger::get()->error(Formatter::format() << "Invalid material: Technique block start missing near index " << ss.tellg());
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif // !! ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
DefaultLogger::get()->debug(" technique '" + techniqueName + "'");
|
||||
|
||||
const string partPass = "pass";
|
||||
|
||||
while(linePart != partBlockEnd)
|
||||
{
|
||||
ss >> linePart;
|
||||
|
||||
// Skip commented lines
|
||||
if (linePart == partComment)
|
||||
{
|
||||
string postComment = SkipLine(ss);
|
||||
DefaultLogger::get()->debug(" //" + postComment + " (comment line ignored)");
|
||||
continue;
|
||||
}
|
||||
|
||||
/// @todo Techniques have other attributes than just passes.
|
||||
if (linePart == partPass)
|
||||
{
|
||||
string passName = SkipLine(ss);
|
||||
ReadPass(Trim(passName), ss, material);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OgreImporter::ReadPass(const std::string &passName, stringstream &ss, aiMaterial *material)
|
||||
{
|
||||
string linePart;
|
||||
ss >> linePart;
|
||||
|
||||
if (linePart != partBlockStart)
|
||||
{
|
||||
DefaultLogger::get()->error(Formatter::format() << "Invalid material: Pass block start missing near index " << ss.tellg());
|
||||
return false;
|
||||
}
|
||||
|
||||
DefaultLogger::get()->debug(" pass '" + passName + "'");
|
||||
|
||||
const string partAmbient = "ambient";
|
||||
const string partDiffuse = "diffuse";
|
||||
const string partSpecular = "specular";
|
||||
const string partEmissive = "emissive";
|
||||
const string partTextureUnit = "texture_unit";
|
||||
|
||||
while(linePart != partBlockEnd)
|
||||
{
|
||||
ss >> linePart;
|
||||
|
||||
// Skip commented lines
|
||||
if (linePart == partComment)
|
||||
{
|
||||
string postComment = SkipLine(ss);
|
||||
DefaultLogger::get()->debug(" //" + postComment + " (comment line ignored)");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Colors
|
||||
/// @todo Support alpha via aiColor4D.
|
||||
if (linePart == partAmbient || linePart == partDiffuse || linePart == partSpecular || linePart == partEmissive)
|
||||
{
|
||||
float r, g, b;
|
||||
ss >> r >> g >> b;
|
||||
const aiColor3D color(r, g, b);
|
||||
|
||||
DefaultLogger::get()->debug(Formatter::format() << " " << linePart << " " << r << " " << g << " " << b);
|
||||
|
||||
if (linePart == partAmbient)
|
||||
{
|
||||
material->AddProperty(&color, 1, AI_MATKEY_COLOR_AMBIENT);
|
||||
}
|
||||
else if (linePart == partDiffuse)
|
||||
{
|
||||
material->AddProperty(&color, 1, AI_MATKEY_COLOR_DIFFUSE);
|
||||
}
|
||||
else if (linePart == partSpecular)
|
||||
{
|
||||
material->AddProperty(&color, 1, AI_MATKEY_COLOR_SPECULAR);
|
||||
}
|
||||
else if (linePart == partEmissive)
|
||||
{
|
||||
material->AddProperty(&color, 1, AI_MATKEY_COLOR_EMISSIVE);
|
||||
}
|
||||
}
|
||||
else if (linePart == partTextureUnit)
|
||||
{
|
||||
string textureUnitName = SkipLine(ss);
|
||||
ReadTextureUnit(Trim(textureUnitName), ss, material);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OgreImporter::ReadTextureUnit(const std::string &textureUnitName, stringstream &ss, aiMaterial *material)
|
||||
{
|
||||
string linePart;
|
||||
ss >> linePart;
|
||||
|
||||
if (linePart != partBlockStart)
|
||||
{
|
||||
DefaultLogger::get()->error(Formatter::format() << "Invalid material: Texture unit block start missing near index " << ss.tellg());
|
||||
return false;
|
||||
}
|
||||
|
||||
DefaultLogger::get()->debug(" texture_unit '" + textureUnitName + "'");
|
||||
|
||||
const string partTexture = "texture";
|
||||
const string partTextCoordSet = "tex_coord_set";
|
||||
const string partColorOp = "colour_op";
|
||||
|
||||
aiTextureType textureType = aiTextureType_NONE;
|
||||
std::string textureRef;
|
||||
int uvCoord = 0;
|
||||
|
||||
while(linePart != partBlockEnd)
|
||||
{
|
||||
ss >> linePart;
|
||||
|
||||
// Skip commented lines
|
||||
if (linePart == partComment)
|
||||
{
|
||||
string postComment = SkipLine(ss);
|
||||
DefaultLogger::get()->debug(" //" + postComment + " (comment line ignored)");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (linePart == partTexture)
|
||||
{
|
||||
ss >> linePart;
|
||||
textureRef = linePart;
|
||||
|
||||
// User defined Assimp config property to detect texture type from filename.
|
||||
if (m_detectTextureTypeFromFilename)
|
||||
{
|
||||
size_t posSuffix = textureRef.find_last_of(".");
|
||||
size_t posUnderscore = textureRef.find_last_of("_");
|
||||
|
||||
if (posSuffix != string::npos && posUnderscore != string::npos && posSuffix > posUnderscore)
|
||||
{
|
||||
string identifier = Ogre::ToLower(textureRef.substr(posUnderscore, posSuffix - posUnderscore));
|
||||
DefaultLogger::get()->debug(Formatter::format() << "Detecting texture type from filename postfix '" << identifier << "'");
|
||||
|
||||
if (identifier == "_n" || identifier == "_nrm" || identifier == "_nrml" || identifier == "_normal" || identifier == "_normals" || identifier == "_normalmap")
|
||||
{
|
||||
textureType = aiTextureType_NORMALS;
|
||||
}
|
||||
else if (identifier == "_s" || identifier == "_spec" || identifier == "_specular" || identifier == "_specularmap")
|
||||
{
|
||||
textureType = aiTextureType_SPECULAR;
|
||||
}
|
||||
else if (identifier == "_l" || identifier == "_light" || identifier == "_lightmap" || identifier == "_occ" || identifier == "_occlusion")
|
||||
{
|
||||
textureType = aiTextureType_LIGHTMAP;
|
||||
}
|
||||
else if (identifier == "_disp" || identifier == "_displacement")
|
||||
{
|
||||
textureType = aiTextureType_DISPLACEMENT;
|
||||
}
|
||||
else
|
||||
{
|
||||
textureType = aiTextureType_DIFFUSE;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
textureType = aiTextureType_DIFFUSE;
|
||||
}
|
||||
}
|
||||
// Detect from texture unit name. This cannot be too broad as
|
||||
// authors might give names like "LightSaber" or "NormalNinja".
|
||||
else
|
||||
{
|
||||
string unitNameLower = Ogre::ToLower(textureUnitName);
|
||||
if (unitNameLower.find("normalmap") != string::npos)
|
||||
{
|
||||
textureType = aiTextureType_NORMALS;
|
||||
}
|
||||
else if (unitNameLower.find("specularmap") != string::npos)
|
||||
{
|
||||
textureType = aiTextureType_SPECULAR;
|
||||
}
|
||||
else if (unitNameLower.find("lightmap") != string::npos)
|
||||
{
|
||||
textureType = aiTextureType_LIGHTMAP;
|
||||
}
|
||||
else if (unitNameLower.find("displacementmap") != string::npos)
|
||||
{
|
||||
textureType = aiTextureType_DISPLACEMENT;
|
||||
}
|
||||
else
|
||||
{
|
||||
textureType = aiTextureType_DIFFUSE;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (linePart == partTextCoordSet)
|
||||
{
|
||||
ss >> uvCoord;
|
||||
}
|
||||
/// @todo Implement
|
||||
else if(linePart == partColorOp)
|
||||
{
|
||||
/*
|
||||
ss >> linePart;
|
||||
if("replace"==linePart)//I don't think, assimp has something for this...
|
||||
{
|
||||
}
|
||||
else if("modulate"==linePart)
|
||||
{
|
||||
//TODO: set value
|
||||
//material->AddProperty(aiTextureOp_Multiply)
|
||||
}
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
||||
if (textureRef.empty())
|
||||
{
|
||||
DefaultLogger::get()->warn("Texture reference is empty, ignoring texture_unit.");
|
||||
return false;
|
||||
}
|
||||
if (textureType == aiTextureType_NONE)
|
||||
{
|
||||
DefaultLogger::get()->warn("Failed to detect texture type for '" + textureRef + "', ignoring texture_unit.");
|
||||
return false;
|
||||
}
|
||||
|
||||
unsigned int textureTypeIndex = m_textures[textureType];
|
||||
m_textures[textureType]++;
|
||||
|
||||
DefaultLogger::get()->debug(Formatter::format() << " texture '" << textureRef << "' type " << textureType
|
||||
<< " index " << textureTypeIndex << " UV " << uvCoord);
|
||||
|
||||
aiString assimpTextureRef(textureRef);
|
||||
material->AddProperty(&assimpTextureRef, AI_MATKEY_TEXTURE(textureType, textureTypeIndex));
|
||||
material->AddProperty(&uvCoord, 1, AI_MATKEY_UVWSRC(textureType, textureTypeIndex));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} // Ogre
|
||||
} // Assimp
|
||||
|
||||
#endif // ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
|
|
@ -42,7 +42,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
||||
#ifndef ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
||||
#include "OgreImporter.hpp"
|
||||
#include "OgreImporter.h"
|
||||
#include "TinyFormatter.h"
|
||||
|
||||
using namespace std;
|
||||
|
@ -52,465 +52,518 @@ namespace Assimp
|
|||
namespace Ogre
|
||||
{
|
||||
|
||||
|
||||
void OgreImporter::ReadSubMesh(SubMesh &theSubMesh, XmlReader *Reader)
|
||||
void OgreImporter::ReadSubMesh(const unsigned int submeshIndex, SubMesh &submesh, XmlReader *reader)
|
||||
{
|
||||
if(Reader->getAttributeValue("usesharedvertices"))
|
||||
theSubMesh.SharedData=GetAttribute<bool>(Reader, "usesharedvertices");
|
||||
if (reader->getAttributeValue("material")) {
|
||||
submesh.MaterialName = GetAttribute<string>(reader, "material");
|
||||
}
|
||||
if (reader->getAttributeValue("use32bitindexes")) {
|
||||
submesh.Use32bitIndexes = GetAttribute<bool>(reader, "use32bitindexes");
|
||||
}
|
||||
if (reader->getAttributeValue("usesharedvertices")) {
|
||||
submesh.UseSharedGeometry = GetAttribute<bool>(reader, "usesharedvertices");
|
||||
}
|
||||
|
||||
DefaultLogger::get()->debug(Formatter::format() << "Reading submesh " << submeshIndex);
|
||||
DefaultLogger::get()->debug(Formatter::format() << " - Material '" << submesh.MaterialName << "'");
|
||||
DefaultLogger::get()->debug(Formatter::format() << " - Shader geometry = " << (submesh.UseSharedGeometry ? "true" : "false") <<
|
||||
", 32bit indexes = " << (submesh.Use32bitIndexes ? "true" : "false"));
|
||||
|
||||
XmlRead(Reader);
|
||||
//TODO: maybe we have alsways just 1 faces and 1 geometry and always in this order. this loop will only work correct, when the order
|
||||
//of faces and geometry changed, and not if we have more than one of one
|
||||
while( Reader->getNodeName()==string("faces")
|
||||
|| Reader->getNodeName()==string("geometry")
|
||||
|| Reader->getNodeName()==string("boneassignments"))
|
||||
{
|
||||
if(string(Reader->getNodeName())=="faces")//Read the face list
|
||||
{
|
||||
//some info logging:
|
||||
unsigned int NumFaces=GetAttribute<int>(Reader, "count");
|
||||
ostringstream ss; ss <<"Submesh has " << NumFaces << " Faces.";
|
||||
DefaultLogger::get()->debug(ss.str());
|
||||
/// @todo Fix above comment with better read logic below
|
||||
|
||||
while(XmlRead(Reader) && Reader->getNodeName()==string("face"))
|
||||
NextNode(reader);
|
||||
string currentNodeName = reader->getNodeName();
|
||||
|
||||
const string nnFaces = "faces";
|
||||
const string nnFace = "face";
|
||||
const string nnGeometry = "geometry";
|
||||
const string nnBoneAssignments = "boneassignments";
|
||||
const string nnVertexBuffer = "vertexbuffer";
|
||||
|
||||
bool quadWarned = false;
|
||||
|
||||
while(currentNodeName == nnFaces ||
|
||||
currentNodeName == nnGeometry ||
|
||||
currentNodeName == nnBoneAssignments)
|
||||
{
|
||||
if (currentNodeName == nnFaces)
|
||||
{
|
||||
unsigned int numFaces = GetAttribute<unsigned int>(reader, "count");
|
||||
|
||||
NextNode(reader);
|
||||
currentNodeName = reader->getNodeName();
|
||||
|
||||
while(currentNodeName == nnFace)
|
||||
{
|
||||
Face NewFace;
|
||||
NewFace.VertexIndices[0]=GetAttribute<int>(Reader, "v1");
|
||||
NewFace.VertexIndices[1]=GetAttribute<int>(Reader, "v2");
|
||||
NewFace.VertexIndices[2]=GetAttribute<int>(Reader, "v3");
|
||||
if(Reader->getAttributeValue("v4"))//this should be supported in the future
|
||||
{
|
||||
DefaultLogger::get()->warn("Submesh has quads, only traingles are supported!");
|
||||
//throw DeadlyImportError("Submesh has quads, only traingles are supported!");
|
||||
NewFace.VertexIndices[0] = GetAttribute<int>(reader, "v1");
|
||||
NewFace.VertexIndices[1] = GetAttribute<int>(reader, "v2");
|
||||
NewFace.VertexIndices[2] = GetAttribute<int>(reader, "v3");
|
||||
|
||||
/// @todo Support quads
|
||||
if (!quadWarned && reader->getAttributeValue("v4")) {
|
||||
DefaultLogger::get()->warn("Submesh has quads, only triangles are supported at the moment!");
|
||||
}
|
||||
theSubMesh.FaceList.push_back(NewFace);
|
||||
|
||||
submesh.Faces.push_back(NewFace);
|
||||
|
||||
// Advance
|
||||
NextNode(reader);
|
||||
currentNodeName = reader->getNodeName();
|
||||
}
|
||||
|
||||
}//end of faces
|
||||
else if(string(Reader->getNodeName())=="geometry")//Read the vertexdata
|
||||
{
|
||||
//some info logging:
|
||||
unsigned int NumVertices=GetAttribute<int>(Reader, "vertexcount");
|
||||
ostringstream ss; ss<<"VertexCount: " << NumVertices;
|
||||
DefaultLogger::get()->debug(ss.str());
|
||||
|
||||
//General Informations about vertices
|
||||
XmlRead(Reader);
|
||||
while(Reader->getNodeName()==string("vertexbuffer"))
|
||||
if (submesh.Faces.size() == numFaces)
|
||||
{
|
||||
ReadVertexBuffer(theSubMesh, Reader, NumVertices);
|
||||
DefaultLogger::get()->debug(Formatter::format() << " - Faces " << numFaces);
|
||||
}
|
||||
|
||||
//some error checking on the loaded data
|
||||
if(!theSubMesh.HasPositions)
|
||||
throw DeadlyImportError("No positions could be loaded!");
|
||||
|
||||
if(theSubMesh.HasNormals && theSubMesh.Normals.size() != NumVertices)
|
||||
throw DeadlyImportError("Wrong Number of Normals loaded!");
|
||||
|
||||
if(theSubMesh.HasTangents && theSubMesh.Tangents.size() != NumVertices)
|
||||
throw DeadlyImportError("Wrong Number of Tangents loaded!");
|
||||
|
||||
for(unsigned int i=0; i<theSubMesh.Uvs.size(); ++i)
|
||||
else
|
||||
{
|
||||
if(theSubMesh.Uvs[i].size() != NumVertices)
|
||||
throw DeadlyImportError("Wrong Number of Uvs loaded!");
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << submesh.Faces.size() << " faces when should have read " << numFaces);
|
||||
}
|
||||
|
||||
}//end of "geometry
|
||||
|
||||
|
||||
else if(Reader->getNodeName()==string("boneassignments"))
|
||||
{
|
||||
ReadBoneWeights(theSubMesh, Reader);
|
||||
}
|
||||
else if (currentNodeName == nnGeometry)
|
||||
{
|
||||
unsigned int numVertices = GetAttribute<int>(reader, "vertexcount");
|
||||
|
||||
NextNode(reader);
|
||||
while(string(reader->getNodeName()) == nnVertexBuffer) {
|
||||
ReadVertexBuffer(submesh, reader, numVertices);
|
||||
}
|
||||
}
|
||||
else if (reader->getNodeName() == nnBoneAssignments)
|
||||
{
|
||||
ReadBoneWeights(submesh, reader);
|
||||
}
|
||||
|
||||
currentNodeName = reader->getNodeName();
|
||||
}
|
||||
DefaultLogger::get()->debug((Formatter::format(),
|
||||
"Positionen: ",theSubMesh.Positions.size(),
|
||||
" Normale: ",theSubMesh.Normals.size(),
|
||||
" TexCoords: ",theSubMesh.Uvs.size(),
|
||||
" Tantents: ",theSubMesh.Tangents.size()
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
void OgreImporter::ReadVertexBuffer(SubMesh &theSubMesh, XmlReader *Reader, unsigned int NumVertices)
|
||||
void OgreImporter::ReadVertexBuffer(SubMesh &submesh, XmlReader *reader, const unsigned int numVertices)
|
||||
{
|
||||
DefaultLogger::get()->debug("new Vertex Buffer");
|
||||
|
||||
bool ReadPositions=false;
|
||||
bool ReadNormals=false;
|
||||
bool ReadTangents=false;
|
||||
unsigned int NumUvs=0;
|
||||
|
||||
//-------------------- check, what we need to read: --------------------------------
|
||||
if(Reader->getAttributeValue("positions") && GetAttribute<bool>(Reader, "positions"))
|
||||
{
|
||||
ReadPositions=theSubMesh.HasPositions=true;
|
||||
theSubMesh.Positions.reserve(NumVertices);
|
||||
DefaultLogger::get()->debug("reading positions");
|
||||
}
|
||||
if(Reader->getAttributeValue("normals") && GetAttribute<bool>(Reader, "normals"))
|
||||
{
|
||||
ReadNormals=theSubMesh.HasNormals=true;
|
||||
theSubMesh.Normals.reserve(NumVertices);
|
||||
DefaultLogger::get()->debug("reading normals");
|
||||
}
|
||||
if(Reader->getAttributeValue("tangents") && GetAttribute<bool>(Reader, "tangents"))
|
||||
{
|
||||
ReadTangents=theSubMesh.HasTangents=true;
|
||||
theSubMesh.Tangents.reserve(NumVertices);
|
||||
DefaultLogger::get()->debug("reading tangents");
|
||||
}
|
||||
|
||||
if(Reader->getAttributeValue("texture_coords"))
|
||||
{
|
||||
NumUvs=GetAttribute<unsigned int>(Reader, "texture_coords");
|
||||
theSubMesh.Uvs.resize(NumUvs);
|
||||
for(unsigned int i=0; i<theSubMesh.Uvs.size(); ++i) theSubMesh.Uvs[i].reserve(NumVertices);
|
||||
DefaultLogger::get()->debug("reading texture coords");
|
||||
}
|
||||
//___________________________________________________________________
|
||||
|
||||
|
||||
//check if we will load anything
|
||||
if(!( ReadPositions || ReadNormals || ReadTangents || (NumUvs>0) ))
|
||||
DefaultLogger::get()->warn("vertexbuffer seams to be empty!");
|
||||
DefaultLogger::get()->debug(Formatter::format() << "Reading vertex buffer with " << numVertices << " vertices");
|
||||
|
||||
submesh.HasGeometry = true;
|
||||
|
||||
//read all the vertices:
|
||||
XmlRead(Reader);
|
||||
|
||||
/*it might happen, that we have more than one attribute per vertex (they are not splitted to different buffers)
|
||||
so the break condition is a bit tricky */
|
||||
while(Reader->getNodeName()==string("vertex")
|
||||
||Reader->getNodeName()==string("position")
|
||||
||Reader->getNodeName()==string("normal")
|
||||
||Reader->getNodeName()==string("tangent")
|
||||
||Reader->getNodeName()==string("texcoord")
|
||||
||Reader->getNodeName()==string("colour_diffuse"))
|
||||
if (reader->getAttributeValue("positions") && GetAttribute<bool>(reader, "positions"))
|
||||
{
|
||||
if(Reader->getNodeName()==string("vertex"))
|
||||
XmlRead(Reader);//Read an attribute tag
|
||||
submesh.HasPositions = true;
|
||||
submesh.Positions.reserve(numVertices);
|
||||
DefaultLogger::get()->debug(" - Has positions");
|
||||
}
|
||||
if (reader->getAttributeValue("normals") && GetAttribute<bool>(reader, "normals"))
|
||||
{
|
||||
submesh.HasNormals = true;
|
||||
submesh.Normals.reserve(numVertices);
|
||||
DefaultLogger::get()->debug(" - Has normals");
|
||||
}
|
||||
if (reader->getAttributeValue("tangents") && GetAttribute<bool>(reader, "tangents"))
|
||||
{
|
||||
submesh.HasTangents = true;
|
||||
submesh.Tangents.reserve(numVertices);
|
||||
DefaultLogger::get()->debug(" - Has tangents");
|
||||
}
|
||||
if (reader->getAttributeValue("texture_coords"))
|
||||
{
|
||||
submesh.Uvs.resize(GetAttribute<unsigned int>(reader, "texture_coords"));
|
||||
for(size_t i=0, len=submesh.Uvs.size(); i<len; ++i) {
|
||||
submesh.Uvs[i].reserve(numVertices);
|
||||
}
|
||||
DefaultLogger::get()->debug(Formatter::format() << " - Has " << submesh.Uvs.size() << " texture coords");
|
||||
}
|
||||
|
||||
//Position
|
||||
if(ReadPositions && Reader->getNodeName()==string("position"))
|
||||
if (!submesh.HasPositions) {
|
||||
throw DeadlyImportError("Vertex buffer does not contain positions!");
|
||||
}
|
||||
|
||||
const string nnVertex = "vertex";
|
||||
const string nnPosition = "position";
|
||||
const string nnNormal = "normal";
|
||||
const string nnTangent = "tangent";
|
||||
const string nnBinormal = "binormal";
|
||||
const string nnTexCoord = "texcoord";
|
||||
const string nnColorDiffuse = "colour_diffuse";
|
||||
const string nnColorSpecular = "colour_specular";
|
||||
|
||||
bool warnBinormal = true;
|
||||
bool warnColorDiffuse = true;
|
||||
bool warnColorSpecular = true;
|
||||
|
||||
NextNode(reader);
|
||||
string currentNodeName = reader->getNodeName();
|
||||
|
||||
/// @todo Make this loop nicer.
|
||||
while(currentNodeName == nnVertex ||
|
||||
currentNodeName == nnPosition ||
|
||||
currentNodeName == nnNormal ||
|
||||
currentNodeName == nnTangent ||
|
||||
currentNodeName == nnBinormal ||
|
||||
currentNodeName == nnTexCoord ||
|
||||
currentNodeName == nnColorDiffuse ||
|
||||
currentNodeName == nnColorSpecular)
|
||||
{
|
||||
if (currentNodeName == nnVertex)
|
||||
{
|
||||
NextNode(reader);
|
||||
currentNodeName = reader->getNodeName();
|
||||
}
|
||||
|
||||
/// @todo Implement nnBinormal, nnColorDiffuse and nnColorSpecular
|
||||
|
||||
if (submesh.HasPositions && currentNodeName == nnPosition)
|
||||
{
|
||||
aiVector3D NewPos;
|
||||
NewPos.x=GetAttribute<float>(Reader, "x");
|
||||
NewPos.y=GetAttribute<float>(Reader, "y");
|
||||
NewPos.z=GetAttribute<float>(Reader, "z");
|
||||
theSubMesh.Positions.push_back(NewPos);
|
||||
NewPos.x = GetAttribute<float>(reader, "x");
|
||||
NewPos.y = GetAttribute<float>(reader, "y");
|
||||
NewPos.z = GetAttribute<float>(reader, "z");
|
||||
submesh.Positions.push_back(NewPos);
|
||||
}
|
||||
|
||||
//Normal
|
||||
else if(ReadNormals && Reader->getNodeName()==string("normal"))
|
||||
else if (submesh.HasNormals && currentNodeName == nnNormal)
|
||||
{
|
||||
aiVector3D NewNormal;
|
||||
NewNormal.x=GetAttribute<float>(Reader, "x");
|
||||
NewNormal.y=GetAttribute<float>(Reader, "y");
|
||||
NewNormal.z=GetAttribute<float>(Reader, "z");
|
||||
theSubMesh.Normals.push_back(NewNormal);
|
||||
NewNormal.x = GetAttribute<float>(reader, "x");
|
||||
NewNormal.y = GetAttribute<float>(reader, "y");
|
||||
NewNormal.z = GetAttribute<float>(reader, "z");
|
||||
submesh.Normals.push_back(NewNormal);
|
||||
}
|
||||
|
||||
//Tangent
|
||||
else if(ReadTangents && Reader->getNodeName()==string("tangent"))
|
||||
else if (submesh.HasTangents && currentNodeName == nnTangent)
|
||||
{
|
||||
aiVector3D NewTangent;
|
||||
NewTangent.x=GetAttribute<float>(Reader, "x");
|
||||
NewTangent.y=GetAttribute<float>(Reader, "y");
|
||||
NewTangent.z=GetAttribute<float>(Reader, "z");
|
||||
theSubMesh.Tangents.push_back(NewTangent);
|
||||
NewTangent.x = GetAttribute<float>(reader, "x");
|
||||
NewTangent.y = GetAttribute<float>(reader, "y");
|
||||
NewTangent.z = GetAttribute<float>(reader, "z");
|
||||
submesh.Tangents.push_back(NewTangent);
|
||||
}
|
||||
|
||||
//Uv:
|
||||
else if(NumUvs>0 && Reader->getNodeName()==string("texcoord"))
|
||||
else if (submesh.Uvs.size() > 0 && currentNodeName == nnTexCoord)
|
||||
{
|
||||
for(unsigned int i=0; i<NumUvs; ++i)
|
||||
for(size_t i=0, len=submesh.Uvs.size(); i<len; ++i)
|
||||
{
|
||||
if(Reader->getNodeName()!=string("texcoord"))
|
||||
{
|
||||
DefaultLogger::get()->warn(string("Not enough UVs in Vertex: ")+Reader->getNodeName());
|
||||
if (currentNodeName != nnTexCoord) {
|
||||
throw DeadlyImportError("Vertex buffer declared more UVs than can be found in a vertex");
|
||||
}
|
||||
|
||||
aiVector3D NewUv;
|
||||
NewUv.x=GetAttribute<float>(Reader, "u");
|
||||
NewUv.y=GetAttribute<float>(Reader, "v")*(-1)+1;//flip the uv vertikal, blender exports them so!
|
||||
theSubMesh.Uvs[i].push_back(NewUv);
|
||||
XmlRead(Reader);
|
||||
NewUv.x = GetAttribute<float>(reader, "u");
|
||||
NewUv.y = GetAttribute<float>(reader, "v") * (-1)+1; //flip the uv vertikal, blender exports them so! (ahem... @todo ????)
|
||||
submesh.Uvs[i].push_back(NewUv);
|
||||
|
||||
NextNode(reader);
|
||||
currentNodeName = reader->getNodeName();
|
||||
}
|
||||
continue;//because we already read the next node...
|
||||
// Continue main loop as above already read next node
|
||||
continue;
|
||||
}
|
||||
|
||||
//Color:
|
||||
//TODO: actually save this data!
|
||||
else if(Reader->getNodeName()==string("colour_diffuse"))
|
||||
{
|
||||
//do nothing, because we not yet support them
|
||||
}
|
||||
|
||||
//Attribute could not be read
|
||||
else
|
||||
{
|
||||
DefaultLogger::get()->warn(string("Attribute was not read: ")+Reader->getNodeName());
|
||||
}
|
||||
|
||||
XmlRead(Reader);//Read the Vertex tag
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void OgreImporter::ReadBoneWeights(SubMesh &theSubMesh, XmlReader *Reader)
|
||||
{
|
||||
theSubMesh.Weights.resize(theSubMesh.Positions.size());
|
||||
while(XmlRead(Reader) && Reader->getNodeName()==string("vertexboneassignment"))
|
||||
{
|
||||
Weight NewWeight;
|
||||
unsigned int VertexId=GetAttribute<int>(Reader, "vertexindex");
|
||||
NewWeight.BoneId=GetAttribute<int>(Reader, "boneindex");
|
||||
NewWeight.Value=GetAttribute<float>(Reader, "weight");
|
||||
//calculate the number of bones used (this is the highest id +1 becuase bone ids start at 0)
|
||||
theSubMesh.BonesUsed=max(theSubMesh.BonesUsed, NewWeight.BoneId+1);
|
||||
|
||||
theSubMesh.Weights[VertexId].push_back(NewWeight);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
void OgreImporter::ProcessSubMesh(SubMesh &theSubMesh, SubMesh &theSharedGeometry)
|
||||
{
|
||||
//---------------Make all Vertexes unique: (this is required by assimp)-----------------------
|
||||
vector<Face> UniqueFaceList(theSubMesh.FaceList.size());
|
||||
unsigned int UniqueVertexCount=theSubMesh.FaceList.size()*3;//*3 because each face consists of 3 vertexes, because we only support triangles^^
|
||||
|
||||
vector<aiVector3D> UniquePositions(UniqueVertexCount);
|
||||
|
||||
vector<aiVector3D> UniqueNormals(UniqueVertexCount);
|
||||
|
||||
vector<aiVector3D> UniqueTangents(UniqueVertexCount);
|
||||
|
||||
vector< vector<Weight> > UniqueWeights(UniqueVertexCount);
|
||||
|
||||
vector< vector<aiVector3D> > UniqueUvs(theSubMesh.Uvs.size());
|
||||
for(unsigned int i=0; i<UniqueUvs.size(); ++i) UniqueUvs[i].resize(UniqueVertexCount);
|
||||
|
||||
|
||||
|
||||
//Support for shared data:
|
||||
/*We can use this loop to copy vertex informations from the shared data pool. In order to do so
|
||||
we just use a reference to a submodel instead of our submodel itself*/
|
||||
|
||||
SubMesh& VertexSource= theSubMesh.SharedData ? theSharedGeometry : theSubMesh;
|
||||
if(theSubMesh.SharedData)//copy vertexinformations to our mesh:
|
||||
{
|
||||
theSubMesh.HasPositions=theSharedGeometry.HasPositions;
|
||||
theSubMesh.HasNormals=theSharedGeometry.HasNormals;
|
||||
theSubMesh.HasTangents=theSharedGeometry.HasTangents;
|
||||
|
||||
theSubMesh.BonesUsed=theSharedGeometry.BonesUsed;
|
||||
|
||||
UniqueUvs.resize(theSharedGeometry.Uvs.size());
|
||||
for(unsigned int i=0; i<UniqueUvs.size(); ++i) UniqueUvs[i].resize(UniqueVertexCount);
|
||||
}
|
||||
|
||||
for(unsigned int i=0; i<theSubMesh.FaceList.size(); ++i)
|
||||
{
|
||||
//We precalculate the index vlaues her, because we need them in all vertex attributes
|
||||
unsigned int Vertex1=theSubMesh.FaceList[i].VertexIndices[0];
|
||||
unsigned int Vertex2=theSubMesh.FaceList[i].VertexIndices[1];
|
||||
unsigned int Vertex3=theSubMesh.FaceList[i].VertexIndices[2];
|
||||
|
||||
UniquePositions[3*i+0]=VertexSource.Positions[Vertex1];
|
||||
UniquePositions[3*i+1]=VertexSource.Positions[Vertex2];
|
||||
UniquePositions[3*i+2]=VertexSource.Positions[Vertex3];
|
||||
|
||||
if(VertexSource.HasNormals)
|
||||
{
|
||||
UniqueNormals[3*i+0]=VertexSource.Normals[Vertex1];
|
||||
UniqueNormals[3*i+1]=VertexSource.Normals[Vertex2];
|
||||
UniqueNormals[3*i+2]=VertexSource.Normals[Vertex3];
|
||||
}
|
||||
|
||||
if(VertexSource.HasTangents)
|
||||
{
|
||||
UniqueTangents[3*i+0]=VertexSource.Tangents[Vertex1];
|
||||
UniqueTangents[3*i+1]=VertexSource.Tangents[Vertex2];
|
||||
UniqueTangents[3*i+2]=VertexSource.Tangents[Vertex3];
|
||||
}
|
||||
|
||||
if(UniqueUvs.size()>0)
|
||||
{
|
||||
for(unsigned int j=0; j<UniqueUvs.size(); ++j)
|
||||
/// @todo Remove this stuff once implemented. We only want to log warnings once per element.
|
||||
bool warn = true;
|
||||
if (currentNodeName == nnBinormal)
|
||||
{
|
||||
UniqueUvs[j][3*i+0]=VertexSource.Uvs[j][Vertex1];
|
||||
UniqueUvs[j][3*i+1]=VertexSource.Uvs[j][Vertex2];
|
||||
UniqueUvs[j][3*i+2]=VertexSource.Uvs[j][Vertex3];
|
||||
if (warnBinormal)
|
||||
{
|
||||
warnBinormal = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
warn = false;
|
||||
}
|
||||
}
|
||||
else if (currentNodeName == nnColorDiffuse)
|
||||
{
|
||||
if (warnColorDiffuse)
|
||||
{
|
||||
warnColorDiffuse = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
warn = false;
|
||||
}
|
||||
}
|
||||
else if (currentNodeName == nnColorSpecular)
|
||||
{
|
||||
if (warnColorSpecular)
|
||||
{
|
||||
warnColorSpecular = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
warn = false;
|
||||
}
|
||||
}
|
||||
if (warn) {
|
||||
DefaultLogger::get()->warn(string("Vertex buffer attribute read not implemented for element: ") + currentNodeName);
|
||||
}
|
||||
}
|
||||
|
||||
if(VertexSource.Weights.size() > 0)
|
||||
// Advance
|
||||
NextNode(reader);
|
||||
currentNodeName = reader->getNodeName();
|
||||
}
|
||||
|
||||
DefaultLogger::get()->debug(Formatter::format() <<
|
||||
" - Positions " << submesh.Positions.size() <<
|
||||
" Normals " << submesh.Normals.size() <<
|
||||
" TexCoords " << submesh.Uvs.size() <<
|
||||
" Tangents " << submesh.Tangents.size());
|
||||
|
||||
// Sanity checks
|
||||
if (submesh.HasNormals && submesh.Normals.size() != numVertices) {
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << submesh.Normals.size() << " normals when should have read " << numVertices);
|
||||
}
|
||||
if (submesh.HasTangents && submesh.Tangents.size() != numVertices) {
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << submesh.Tangents.size() << " tangents when should have read " << numVertices);
|
||||
}
|
||||
for(unsigned int i=0; i<submesh.Uvs.size(); ++i)
|
||||
{
|
||||
if (submesh.Uvs[i].size() != numVertices) {
|
||||
throw DeadlyImportError(Formatter::format() << "Read only " << submesh.Uvs[i].size()
|
||||
<< " uvs for uv index " << i << " when should have read " << numVertices);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void OgreImporter::ReadBoneWeights(SubMesh &submesh, XmlReader *reader)
|
||||
{
|
||||
submesh.Weights.resize(submesh.Positions.size());
|
||||
|
||||
unsigned int numRead = 0;
|
||||
const string nnVertexBoneAssignment = "vertexboneassignment";
|
||||
|
||||
NextNode(reader);
|
||||
while(CurrentNodeNameEquals(reader, nnVertexBoneAssignment))
|
||||
{
|
||||
numRead++;
|
||||
|
||||
BoneWeight weight;
|
||||
weight.Id = GetAttribute<int>(reader, "boneindex");
|
||||
weight.Value = GetAttribute<float>(reader, "weight");
|
||||
|
||||
//calculate the number of bones used (this is the highest id +1 becuase bone ids start at 0)
|
||||
/// @todo This can probably be refactored to something else.
|
||||
submesh.BonesUsed = max(submesh.BonesUsed, weight.Id+1);
|
||||
|
||||
const unsigned int vertexId = GetAttribute<int>(reader, "vertexindex");
|
||||
submesh.Weights[vertexId].push_back(weight);
|
||||
|
||||
NextNode(reader);
|
||||
}
|
||||
DefaultLogger::get()->debug(Formatter::format() << " - Bone weights " << numRead);
|
||||
}
|
||||
|
||||
void OgreImporter::ProcessSubMesh(SubMesh &submesh, SubMesh &sharedGeometry)
|
||||
{
|
||||
// Make all vertexes unique. Required by Assimp.
|
||||
vector<Face> uniqueFaceList(submesh.Faces.size());
|
||||
unsigned int uniqueVertexCount = submesh.Faces.size() * 3;
|
||||
|
||||
vector<aiVector3D> uniquePositions(uniqueVertexCount);
|
||||
vector<aiVector3D> uniqueNormals(uniqueVertexCount);
|
||||
vector<aiVector3D> uniqueTangents(uniqueVertexCount);
|
||||
|
||||
vector<vector<BoneWeight> > uniqueWeights(uniqueVertexCount);
|
||||
vector<vector<aiVector3D> > uniqueUvs(submesh.UseSharedGeometry ? sharedGeometry.Uvs.size() : submesh.Uvs.size());
|
||||
|
||||
for(size_t uvi=0; uvi<uniqueUvs.size(); ++uvi) {
|
||||
uniqueUvs[uvi].resize(uniqueVertexCount);
|
||||
}
|
||||
|
||||
/* Support for shared geometry.
|
||||
We can use this loop to copy vertex informations from the shared data pool. In order to do so
|
||||
we just use a reference to a submodel instead of our submodel itself */
|
||||
SubMesh &vertexSource = (submesh.UseSharedGeometry ? sharedGeometry : submesh);
|
||||
if (submesh.UseSharedGeometry)
|
||||
{
|
||||
submesh.HasPositions = sharedGeometry.HasPositions;
|
||||
submesh.HasNormals = sharedGeometry.HasNormals;
|
||||
submesh.HasTangents = sharedGeometry.HasTangents;
|
||||
submesh.BonesUsed = sharedGeometry.BonesUsed;
|
||||
}
|
||||
|
||||
for (size_t i=0, flen=submesh.Faces.size(); i<flen; ++i)
|
||||
{
|
||||
const Face &face = submesh.Faces[i];
|
||||
|
||||
// We pre calculate the index values here,
|
||||
// because we need them in all vertex attributes.
|
||||
unsigned int v1 = face.VertexIndices[0];
|
||||
unsigned int v2 = face.VertexIndices[1];
|
||||
unsigned int v3 = face.VertexIndices[2];
|
||||
|
||||
size_t pos = i*3;
|
||||
|
||||
uniqueFaceList[i].VertexIndices[0] = pos;
|
||||
uniqueFaceList[i].VertexIndices[1] = pos + 1;
|
||||
uniqueFaceList[i].VertexIndices[2] = pos + 2;
|
||||
|
||||
uniquePositions[pos] = vertexSource.Positions[v1];
|
||||
uniquePositions[pos+1] = vertexSource.Positions[v2];
|
||||
uniquePositions[pos+2] = vertexSource.Positions[v3];
|
||||
|
||||
if (vertexSource.HasNormals)
|
||||
{
|
||||
UniqueWeights[3*i+0]=VertexSource.Weights[Vertex1];
|
||||
UniqueWeights[3*i+1]=VertexSource.Weights[Vertex2];
|
||||
UniqueWeights[3*i+2]=VertexSource.Weights[Vertex3];
|
||||
uniqueNormals[pos ] = vertexSource.Normals[v1];
|
||||
uniqueNormals[pos+1] = vertexSource.Normals[v2];
|
||||
uniqueNormals[pos+2] = vertexSource.Normals[v3];
|
||||
}
|
||||
|
||||
//The indexvalues a just continuous numbers (0, 1, 2, 3, 4, 5, 6...)
|
||||
UniqueFaceList[i].VertexIndices[0]=3*i+0;
|
||||
UniqueFaceList[i].VertexIndices[1]=3*i+1;
|
||||
UniqueFaceList[i].VertexIndices[2]=3*i+2;
|
||||
}
|
||||
//_________________________________________________________________________________________
|
||||
|
||||
//now we have the unique datas, but want them in the SubMesh, so we swap all the containers:
|
||||
//if we don't have one of them, we just swap empty containers, so everything is ok
|
||||
theSubMesh.FaceList.swap(UniqueFaceList);
|
||||
theSubMesh.Positions.swap(UniquePositions);
|
||||
theSubMesh.Normals.swap(UniqueNormals);
|
||||
theSubMesh.Tangents.swap(UniqueTangents);
|
||||
theSubMesh.Uvs.swap(UniqueUvs);
|
||||
theSubMesh.Weights.swap(UniqueWeights);
|
||||
|
||||
|
||||
|
||||
//------------- normalize weights -----------------------------
|
||||
//The Blender exporter doesn't care about whether the sum of all boneweights for a single vertex equals 1 or not,
|
||||
//so we have to make this sure:
|
||||
for(unsigned int VertexId=0; VertexId<theSubMesh.Weights.size(); ++VertexId)//iterate over all vertices
|
||||
{
|
||||
float WeightSum=0.0f;
|
||||
for(unsigned int BoneId=0; BoneId<theSubMesh.Weights[VertexId].size(); ++BoneId)//iterate over all bones
|
||||
if (vertexSource.HasTangents)
|
||||
{
|
||||
WeightSum+=theSubMesh.Weights[VertexId][BoneId].Value;
|
||||
uniqueTangents[pos] = vertexSource.Tangents[v1];
|
||||
uniqueTangents[pos+1] = vertexSource.Tangents[v2];
|
||||
uniqueTangents[pos+2] = vertexSource.Tangents[v3];
|
||||
}
|
||||
|
||||
for(size_t uvi=0; uvi<uniqueUvs.size(); ++uvi)
|
||||
{
|
||||
const std::vector<aiVector3D> &uv = vertexSource.Uvs[uvi];
|
||||
uniqueUvs[uvi][pos] = uv[v1];
|
||||
uniqueUvs[uvi][pos+1] = uv[v2];
|
||||
uniqueUvs[uvi][pos+2] = uv[v3];
|
||||
}
|
||||
|
||||
if (!vertexSource.Weights.empty())
|
||||
{
|
||||
uniqueWeights[pos] = vertexSource.Weights[v1];
|
||||
uniqueWeights[pos+1] = vertexSource.Weights[v2];
|
||||
uniqueWeights[pos+2] = vertexSource.Weights[v3];
|
||||
}
|
||||
}
|
||||
|
||||
// Now we have the unique data, but want them in the SubMesh, so we swap all the containers.
|
||||
// If we don't have one of them, we just swap empty containers, so everything is ok.
|
||||
submesh.Faces.swap(uniqueFaceList);
|
||||
submesh.Positions.swap(uniquePositions);
|
||||
submesh.Normals.swap(uniqueNormals);
|
||||
submesh.Tangents.swap(uniqueTangents);
|
||||
submesh.Uvs.swap(uniqueUvs);
|
||||
submesh.Weights.swap(uniqueWeights);
|
||||
|
||||
// Normalize bone weights
|
||||
// For example the Blender exporter doesn't care about whether the sum of all bone
|
||||
// weights for a single vertex equals 1 or not, so validate here.
|
||||
for(size_t vertexId=0, wlen=submesh.Weights.size(); vertexId<wlen; ++vertexId)
|
||||
{
|
||||
std::vector<BoneWeight> &weights = submesh.Weights[vertexId];
|
||||
|
||||
float sum = 0.0f;
|
||||
for(size_t boneId=0, blen=weights.size(); boneId<blen; ++boneId) {
|
||||
sum += weights[boneId].Value;
|
||||
}
|
||||
|
||||
//check if the sum is too far away from 1
|
||||
if(WeightSum<1.0f-0.05f || WeightSum>1.0f+0.05f)
|
||||
if ((sum < (1.0f - 0.05f)) || (sum > (1.0f + 0.05f)))
|
||||
{
|
||||
//normalize all weights:
|
||||
for(unsigned int BoneId=0; BoneId<theSubMesh.Weights[VertexId].size(); ++BoneId)//iterate over all bones
|
||||
{
|
||||
theSubMesh.Weights[VertexId][BoneId].Value/=WeightSum;
|
||||
for(size_t boneId=0, blen=weights.size(); boneId<blen; ++boneId) {
|
||||
weights[boneId].Value /= sum;
|
||||
}
|
||||
}
|
||||
}
|
||||
//_________________________________________________________
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
aiMesh* OgreImporter::CreateAssimpSubMesh(const SubMesh& theSubMesh, const vector<Bone>& Bones) const
|
||||
aiMesh *OgreImporter::CreateAssimpSubMesh(aiScene *pScene, const SubMesh& submesh, const vector<Bone>& bones) const
|
||||
{
|
||||
const aiScene* const m_CurrentScene=this->m_CurrentScene;//make sure, that we can access but not change the scene
|
||||
(void)m_CurrentScene;
|
||||
const size_t sizeVector3D = sizeof(aiVector3D);
|
||||
|
||||
aiMesh* NewAiMesh=new aiMesh();
|
||||
|
||||
//Positions
|
||||
NewAiMesh->mVertices=new aiVector3D[theSubMesh.Positions.size()];
|
||||
memcpy(NewAiMesh->mVertices, &theSubMesh.Positions[0], theSubMesh.Positions.size()*sizeof(aiVector3D));
|
||||
NewAiMesh->mNumVertices=theSubMesh.Positions.size();
|
||||
aiMesh *dest = new aiMesh();
|
||||
|
||||
//Normals
|
||||
if(theSubMesh.HasNormals)
|
||||
// Material
|
||||
dest->mMaterialIndex = submesh.MaterialIndex;
|
||||
|
||||
// Positions
|
||||
dest->mVertices = new aiVector3D[submesh.Positions.size()];
|
||||
dest->mNumVertices = submesh.Positions.size();
|
||||
memcpy(dest->mVertices, &submesh.Positions[0], submesh.Positions.size() * sizeVector3D);
|
||||
|
||||
// Normals
|
||||
if (submesh.HasNormals)
|
||||
{
|
||||
NewAiMesh->mNormals=new aiVector3D[theSubMesh.Normals.size()];
|
||||
memcpy(NewAiMesh->mNormals, &theSubMesh.Normals[0], theSubMesh.Normals.size()*sizeof(aiVector3D));
|
||||
dest->mNormals = new aiVector3D[submesh.Normals.size()];
|
||||
memcpy(dest->mNormals, &submesh.Normals[0], submesh.Normals.size() * sizeVector3D);
|
||||
}
|
||||
|
||||
// Tangents
|
||||
// Until we have support for bitangents, no tangents will be written
|
||||
/// @todo Investigate why the above?
|
||||
if (submesh.HasTangents)
|
||||
{
|
||||
DefaultLogger::get()->warn("Tangents found from Ogre mesh but writing to Assimp mesh not yet supported!");
|
||||
//dest->mTangents = new aiVector3D[submesh.Tangents.size()];
|
||||
//memcpy(dest->mTangents, &submesh.Tangents[0], submesh.Tangents.size() * sizeVector3D);
|
||||
}
|
||||
|
||||
|
||||
//until we have support for bitangents, no tangents will be written
|
||||
/*
|
||||
//Tangents
|
||||
if(theSubMesh.HasTangents)
|
||||
// UVs
|
||||
for (size_t i=0, len=submesh.Uvs.size(); i<len; ++i)
|
||||
{
|
||||
NewAiMesh->mTangents=new aiVector3D[theSubMesh.Tangents.size()];
|
||||
memcpy(NewAiMesh->mTangents, &theSubMesh.Tangents[0], theSubMesh.Tangents.size()*sizeof(aiVector3D));
|
||||
dest->mNumUVComponents[i] = 2;
|
||||
dest->mTextureCoords[i] = new aiVector3D[submesh.Uvs[i].size()];
|
||||
memcpy(dest->mTextureCoords[i], &(submesh.Uvs[i][0]), submesh.Uvs[i].size() * sizeVector3D);
|
||||
}
|
||||
*/
|
||||
|
||||
//Uvs
|
||||
if(theSubMesh.Uvs.size()>0)
|
||||
// Bone weights. Convert internal vertex-to-bone mapping to bone-to-vertex.
|
||||
vector<vector<aiVertexWeight> > assimpWeights(submesh.BonesUsed);
|
||||
for(size_t vertexId=0, len=submesh.Weights.size(); vertexId<len; ++vertexId)
|
||||
{
|
||||
for(unsigned int i=0; i<theSubMesh.Uvs.size(); ++i)
|
||||
const vector<BoneWeight> &vertexWeights = submesh.Weights[vertexId];
|
||||
for (size_t boneId=0, len=vertexWeights.size(); boneId<len; ++boneId)
|
||||
{
|
||||
NewAiMesh->mNumUVComponents[i]=2;
|
||||
NewAiMesh->mTextureCoords[i]=new aiVector3D[theSubMesh.Uvs[i].size()];
|
||||
memcpy(NewAiMesh->mTextureCoords[i], &(theSubMesh.Uvs[i][0]), theSubMesh.Uvs[i].size()*sizeof(aiVector3D));
|
||||
const BoneWeight &ogreWeight = vertexWeights[boneId];
|
||||
assimpWeights[ogreWeight.Id].push_back(aiVertexWeight(vertexId, ogreWeight.Value));
|
||||
}
|
||||
}
|
||||
|
||||
// Bones.
|
||||
vector<aiBone*> assimpBones;
|
||||
assimpBones.reserve(submesh.BonesUsed);
|
||||
|
||||
//---------------------------------------- Bones --------------------------------------------
|
||||
|
||||
//Copy the weights in in Bone-Vertices Struktur
|
||||
//(we have them in a Vertex-Bones Structur, this is much easier for making them unique, which is required by assimp
|
||||
vector< vector<aiVertexWeight> > aiWeights(theSubMesh.BonesUsed);//now the outer list are the bones, and the inner vector the vertices
|
||||
for(unsigned int VertexId=0; VertexId<theSubMesh.Weights.size(); ++VertexId)//iterate over all vertices
|
||||
for(size_t boneId=0, len=submesh.BonesUsed; boneId<len; ++boneId)
|
||||
{
|
||||
for(unsigned int BoneId=0; BoneId<theSubMesh.Weights[VertexId].size(); ++BoneId)//iterate over all bones
|
||||
{
|
||||
aiVertexWeight NewWeight;
|
||||
NewWeight.mVertexId=VertexId;//the current Vertex, we can't use the Id form the submehs weights, because they are bone id's
|
||||
NewWeight.mWeight=theSubMesh.Weights[VertexId][BoneId].Value;
|
||||
aiWeights[theSubMesh.Weights[VertexId][BoneId].BoneId].push_back(NewWeight);
|
||||
const vector<aiVertexWeight> &boneWeights = assimpWeights[boneId];
|
||||
if (boneWeights.size() == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// @note The bones list is sorted by id's, this was done in LoadSkeleton.
|
||||
aiBone *assimpBone = new aiBone();
|
||||
assimpBone->mName = bones[boneId].Name;
|
||||
assimpBone->mOffsetMatrix = bones[boneId].BoneToWorldSpace;
|
||||
assimpBone->mNumWeights = boneWeights.size();
|
||||
assimpBone->mWeights = new aiVertexWeight[boneWeights.size()];
|
||||
memcpy(assimpBone->mWeights, &boneWeights[0], boneWeights.size() * sizeof(aiVertexWeight));
|
||||
|
||||
assimpBones.push_back(assimpBone);
|
||||
}
|
||||
|
||||
if (!assimpBones.empty())
|
||||
{
|
||||
dest->mBones = new aiBone*[assimpBones.size()];
|
||||
dest->mNumBones = assimpBones.size();
|
||||
|
||||
for(size_t i=0, len=assimpBones.size(); i<len; ++i) {
|
||||
dest->mBones[i] = assimpBones[i];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Faces
|
||||
dest->mFaces = new aiFace[submesh.Faces.size()];
|
||||
dest->mNumFaces = submesh.Faces.size();
|
||||
|
||||
vector<aiBone*> aiBones;
|
||||
aiBones.reserve(theSubMesh.BonesUsed);//the vector might be smaller, because there might be empty bones (bones that are not attached to any vertex)
|
||||
|
||||
//create all the bones and fill them with informations
|
||||
for(unsigned int i=0; i<theSubMesh.BonesUsed; ++i)
|
||||
for(size_t i=0, len=submesh.Faces.size(); i<len; ++i)
|
||||
{
|
||||
if(aiWeights[i].size()>0)
|
||||
{
|
||||
aiBone* NewBone=new aiBone();
|
||||
NewBone->mNumWeights=aiWeights[i].size();
|
||||
NewBone->mWeights=new aiVertexWeight[aiWeights[i].size()];
|
||||
memcpy(NewBone->mWeights, &(aiWeights[i][0]), sizeof(aiVertexWeight)*aiWeights[i].size());
|
||||
NewBone->mName=Bones[i].Name;//The bone list should be sorted after its id's, this was done in LoadSkeleton
|
||||
NewBone->mOffsetMatrix=Bones[i].BoneToWorldSpace;
|
||||
|
||||
aiBones.push_back(NewBone);
|
||||
}
|
||||
}
|
||||
NewAiMesh->mNumBones=aiBones.size();
|
||||
|
||||
// mBones must be NULL if mNumBones is non 0 or the validation fails.
|
||||
if (aiBones.size()) {
|
||||
NewAiMesh->mBones=new aiBone* [aiBones.size()];
|
||||
memcpy(NewAiMesh->mBones, &(aiBones[0]), aiBones.size()*sizeof(aiBone*));
|
||||
dest->mFaces[i].mNumIndices = 3;
|
||||
dest->mFaces[i].mIndices = new unsigned int[3];
|
||||
|
||||
const Face &f = submesh.Faces[i];
|
||||
dest->mFaces[i].mIndices[0] = f.VertexIndices[0];
|
||||
dest->mFaces[i].mIndices[1] = f.VertexIndices[1];
|
||||
dest->mFaces[i].mIndices[2] = f.VertexIndices[2];
|
||||
}
|
||||
|
||||
//______________________________________________________________________________________________________
|
||||
|
||||
|
||||
|
||||
//Faces
|
||||
NewAiMesh->mFaces=new aiFace[theSubMesh.FaceList.size()];
|
||||
for(unsigned int i=0; i<theSubMesh.FaceList.size(); ++i)
|
||||
{
|
||||
NewAiMesh->mFaces[i].mNumIndices=3;
|
||||
NewAiMesh->mFaces[i].mIndices=new unsigned int[3];
|
||||
|
||||
NewAiMesh->mFaces[i].mIndices[0]=theSubMesh.FaceList[i].VertexIndices[0];
|
||||
NewAiMesh->mFaces[i].mIndices[1]=theSubMesh.FaceList[i].VertexIndices[1];
|
||||
NewAiMesh->mFaces[i].mIndices[2]=theSubMesh.FaceList[i].VertexIndices[2];
|
||||
}
|
||||
NewAiMesh->mNumFaces=theSubMesh.FaceList.size();
|
||||
|
||||
//Link the material:
|
||||
NewAiMesh->mMaterialIndex=theSubMesh.MaterialIndex;//the index is set by the function who called ReadSubMesh
|
||||
|
||||
return NewAiMesh;
|
||||
return dest;
|
||||
}
|
||||
|
||||
} // Ogre
|
||||
} // Assimp
|
||||
|
||||
}//namespace Ogre
|
||||
}//namespace Assimp
|
||||
|
||||
#endif // !! ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
#endif // ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
|
|
@ -0,0 +1,214 @@
|
|||
|
||||
#ifndef AI_OGREPARSINGUTILS_H_INC
|
||||
#define AI_OGREPARSINGUTILS_H_INC
|
||||
|
||||
#ifndef ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
||||
#include "ParsingUtils.h"
|
||||
#include "irrXMLWrapper.h"
|
||||
#include "fast_atof.h"
|
||||
|
||||
namespace Assimp
|
||||
{
|
||||
namespace Ogre
|
||||
{
|
||||
|
||||
typedef irr::io::IrrXMLReader XmlReader;
|
||||
|
||||
static void ThrowAttibuteError(const XmlReader* reader, const std::string &name, const std::string &error = "")
|
||||
{
|
||||
if (!error.empty())
|
||||
{
|
||||
throw DeadlyImportError(error + " in node '" + std::string(reader->getNodeName()) + "' and attribute '" + name + "'");
|
||||
}
|
||||
else
|
||||
{
|
||||
throw DeadlyImportError("Attribute '" + name + "' does not exist in node '" + std::string(reader->getNodeName()) + "'");
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
inline T GetAttribute(const XmlReader* reader, const std::string &name);
|
||||
|
||||
template<>
|
||||
inline int GetAttribute<int>(const XmlReader* reader, const std::string &name)
|
||||
{
|
||||
const char* value = reader->getAttributeValue(name.c_str());
|
||||
if (value)
|
||||
{
|
||||
return atoi(value);
|
||||
}
|
||||
else
|
||||
{
|
||||
ThrowAttibuteError(reader, name);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
inline unsigned int GetAttribute<unsigned int>(const XmlReader* reader, const std::string &name)
|
||||
{
|
||||
const char* value = reader->getAttributeValue(name.c_str());
|
||||
if (value)
|
||||
{
|
||||
return static_cast<unsigned int>(atoi(value)); ///< @todo Find a better way...
|
||||
}
|
||||
else
|
||||
{
|
||||
ThrowAttibuteError(reader, name);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
inline float GetAttribute<float>(const XmlReader* reader, const std::string &name)
|
||||
{
|
||||
const char* value = reader->getAttributeValue(name.c_str());
|
||||
if (value)
|
||||
{
|
||||
return fast_atof(value);
|
||||
}
|
||||
else
|
||||
{
|
||||
ThrowAttibuteError(reader, name);
|
||||
return 0.f;
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
inline std::string GetAttribute<std::string>(const XmlReader* reader, const std::string &name)
|
||||
{
|
||||
const char* value = reader->getAttributeValue(name.c_str());
|
||||
if (value)
|
||||
{
|
||||
return std::string(value);
|
||||
}
|
||||
else
|
||||
{
|
||||
ThrowAttibuteError(reader, name);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
template<>
|
||||
inline bool GetAttribute<bool>(const XmlReader* reader, const std::string &name)
|
||||
{
|
||||
std::string value = GetAttribute<std::string>(reader, name);
|
||||
if (ASSIMP_stricmp(value, "true") == 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
else if (ASSIMP_stricmp(value, "false") == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
else
|
||||
{
|
||||
ThrowAttibuteError(reader, name, "Boolean value is expected to be 'true' or 'false', encountered '" + value + "'");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
inline bool NextNode(XmlReader* reader)
|
||||
{
|
||||
do
|
||||
{
|
||||
if (!reader->read()) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
while(reader->getNodeType() != irr::io::EXN_ELEMENT);
|
||||
return true;
|
||||
}
|
||||
|
||||
inline bool CurrentNodeNameEquals(const XmlReader* reader, const std::string &name)
|
||||
{
|
||||
return (ASSIMP_stricmp(std::string(reader->getNodeName()), name) == 0);
|
||||
}
|
||||
|
||||
/// Skips a line from current @ss position until a newline. Returns the skipped part.
|
||||
static inline std::string SkipLine(std::stringstream &ss)
|
||||
{
|
||||
std::string skipped;
|
||||
getline(ss, skipped);
|
||||
return skipped;
|
||||
}
|
||||
|
||||
/// Skips a line and reads next element from @c ss to @c nextElement.
|
||||
/** @return Skipped line content until newline. */
|
||||
static inline std::string NextAfterNewLine(std::stringstream &ss, std::string &nextElement)
|
||||
{
|
||||
std::string skipped = SkipLine(ss);
|
||||
ss >> nextElement;
|
||||
return skipped;
|
||||
}
|
||||
|
||||
/// Returns a lower cased copy of @s.
|
||||
static inline std::string ToLower(std::string s)
|
||||
{
|
||||
std::transform(s.begin(), s.end(), s.begin(), ::tolower);
|
||||
return s;
|
||||
}
|
||||
|
||||
/// Returns if @c s ends with @c suffix. If @c caseSensitive is false, both strings will be lower cased before matching.
|
||||
static inline bool EndsWith(const std::string &s, const std::string &suffix, bool caseSensitive = true)
|
||||
{
|
||||
if (s.empty() || suffix.empty())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
else if (s.length() < suffix.length())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!caseSensitive) {
|
||||
return EndsWith(ToLower(s), ToLower(suffix), true);
|
||||
}
|
||||
|
||||
size_t len = suffix.length();
|
||||
std::string sSuffix = s.substr(s.length()-len, len);
|
||||
return (ASSIMP_stricmp(sSuffix, suffix) == 0);
|
||||
}
|
||||
|
||||
// Below trim functions adapted from http://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
|
||||
|
||||
/// Trim from start
|
||||
static inline std::string &TrimLeft(std::string &s, bool newlines = true)
|
||||
{
|
||||
if (!newlines)
|
||||
{
|
||||
s.erase(s.begin(), std::find_if(s.begin(), s.end(), std::not1(std::ptr_fun(Assimp::IsSpace<char>))));
|
||||
}
|
||||
else
|
||||
{
|
||||
s.erase(s.begin(), std::find_if(s.begin(), s.end(), std::not1(std::ptr_fun(Assimp::IsSpaceOrNewLine<char>))));
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
/// Trim from end
|
||||
static inline std::string &TrimRight(std::string &s, bool newlines = true)
|
||||
{
|
||||
if (!newlines)
|
||||
{
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), std::not1(std::ptr_fun(Assimp::IsSpace<char>))).base(),s.end());
|
||||
}
|
||||
else
|
||||
{
|
||||
s.erase(s.begin(), std::find_if(s.begin(), s.end(), std::not1(std::ptr_fun(Assimp::IsSpaceOrNewLine<char>))));
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
/// Trim from both ends
|
||||
static inline std::string &Trim(std::string &s, bool newlines = true)
|
||||
{
|
||||
return TrimLeft(TrimRight(s, newlines), newlines);
|
||||
}
|
||||
|
||||
} // Ogre
|
||||
} // Assimp
|
||||
|
||||
#endif // ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
#endif // AI_OGREPARSINGUTILS_H_INC
|
|
@ -5,11 +5,11 @@ Open Asset Import Library (assimp)
|
|||
Copyright (c) 2006-2012, assimp team
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use of this software in source and binary forms,
|
||||
Redistribution and use of this software in aSource and binary forms,
|
||||
with or without modification, are permitted provided that the
|
||||
following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above
|
||||
* Redistributions of aSource code must retain the above
|
||||
copyright notice, this list of conditions and the
|
||||
following disclaimer.
|
||||
|
||||
|
@ -42,7 +42,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
||||
#ifndef ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
||||
#include "OgreImporter.hpp"
|
||||
#include "OgreImporter.h"
|
||||
#include "TinyFormatter.h"
|
||||
|
||||
using namespace std;
|
||||
|
@ -52,400 +52,395 @@ namespace Assimp
|
|||
namespace Ogre
|
||||
{
|
||||
|
||||
|
||||
|
||||
void OgreImporter::LoadSkeleton(std::string FileName, vector<Bone> &Bones, vector<Animation> &Animations) const
|
||||
void OgreImporter::ReadSkeleton(const std::string &pFile, Assimp::IOSystem *pIOHandler, const aiScene *pScene,
|
||||
const std::string &skeletonFile, vector<Bone> &Bones, vector<Animation> &Animations) const
|
||||
{
|
||||
const aiScene* const m_CurrentScene=this->m_CurrentScene;//make sure, that we can access but not change the scene
|
||||
(void)m_CurrentScene;
|
||||
|
||||
|
||||
//most likely the skeleton file will only end with .skeleton
|
||||
//But this is a xml reader, so we need: .skeleton.xml
|
||||
FileName+=".xml";
|
||||
|
||||
DefaultLogger::get()->debug(string("Loading Skeleton: ")+FileName);
|
||||
|
||||
//Open the File:
|
||||
boost::scoped_ptr<IOStream> File(m_CurrentIOHandler->Open(FileName));
|
||||
if(NULL==File.get())
|
||||
throw DeadlyImportError("Failed to open skeleton file "+FileName+".");
|
||||
|
||||
//Read the Mesh File:
|
||||
boost::scoped_ptr<CIrrXML_IOStreamReader> mIOWrapper(new CIrrXML_IOStreamReader(File.get()));
|
||||
XmlReader* SkeletonFile = irr::io::createIrrXMLReader(mIOWrapper.get());
|
||||
if(!SkeletonFile)
|
||||
throw DeadlyImportError(string("Failed to create XML Reader for ")+FileName);
|
||||
|
||||
XmlRead(SkeletonFile);
|
||||
if(string("skeleton")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("No <skeleton> node in SkeletonFile: "+FileName);
|
||||
|
||||
|
||||
|
||||
//------------------------------------load bones-----------------------------------------
|
||||
XmlRead(SkeletonFile);
|
||||
if(string("bones")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("No bones node in skeleton "+FileName);
|
||||
|
||||
XmlRead(SkeletonFile);
|
||||
|
||||
while(string("bone")==SkeletonFile->getNodeName())
|
||||
string filename = skeletonFile;
|
||||
if (EndsWith(filename, ".skeleton"))
|
||||
{
|
||||
//TODO: Maybe we can have bone ids for the errrors, but normaly, they should never appear, so what....
|
||||
|
||||
//read a new bone:
|
||||
Bone NewBone;
|
||||
NewBone.Id=GetAttribute<int>(SkeletonFile, "id");
|
||||
NewBone.Name=GetAttribute<string>(SkeletonFile, "name");
|
||||
|
||||
//load the position:
|
||||
XmlRead(SkeletonFile);
|
||||
if(string("position")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("Position is not first node in Bone!");
|
||||
NewBone.Position.x=GetAttribute<float>(SkeletonFile, "x");
|
||||
NewBone.Position.y=GetAttribute<float>(SkeletonFile, "y");
|
||||
NewBone.Position.z=GetAttribute<float>(SkeletonFile, "z");
|
||||
|
||||
//Rotation:
|
||||
XmlRead(SkeletonFile);
|
||||
if(string("rotation")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("Rotation is not the second node in Bone!");
|
||||
NewBone.RotationAngle=GetAttribute<float>(SkeletonFile, "angle");
|
||||
XmlRead(SkeletonFile);
|
||||
if(string("axis")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("No axis specified for bone rotation!");
|
||||
NewBone.RotationAxis.x=GetAttribute<float>(SkeletonFile, "x");
|
||||
NewBone.RotationAxis.y=GetAttribute<float>(SkeletonFile, "y");
|
||||
NewBone.RotationAxis.z=GetAttribute<float>(SkeletonFile, "z");
|
||||
|
||||
//append the newly loaded bone to the bone list
|
||||
Bones.push_back(NewBone);
|
||||
|
||||
//Proceed to the next bone:
|
||||
XmlRead(SkeletonFile);
|
||||
DefaultLogger::get()->warn("Mesh is referencing a Ogre binary skeleton. Parsing binary Ogre assets is not supported at the moment. Trying to find .skeleton.xml file instead.");
|
||||
filename += ".xml";
|
||||
}
|
||||
//The bones in the file a not neccesarly ordered by there id's so we do it now:
|
||||
|
||||
if (!pIOHandler->Exists(filename))
|
||||
{
|
||||
DefaultLogger::get()->error("Failed to find skeleton file '" + filename + "', skeleton will be missing.");
|
||||
return;
|
||||
}
|
||||
|
||||
boost::scoped_ptr<IOStream> file(pIOHandler->Open(filename));
|
||||
if (!file.get()) {
|
||||
throw DeadlyImportError("Failed to open skeleton file " + filename);
|
||||
}
|
||||
|
||||
boost::scoped_ptr<CIrrXML_IOStreamReader> stream(new CIrrXML_IOStreamReader(file.get()));
|
||||
XmlReader* reader = irr::io::createIrrXMLReader(stream.get());
|
||||
if (!reader) {
|
||||
throw DeadlyImportError("Failed to create XML reader for skeleton file " + filename);
|
||||
}
|
||||
|
||||
DefaultLogger::get()->debug("Reading skeleton '" + filename + "'");
|
||||
|
||||
// Root
|
||||
NextNode(reader);
|
||||
if (!CurrentNodeNameEquals(reader, "skeleton")) {
|
||||
throw DeadlyImportError("Root node is not <skeleton> but <" + string(reader->getNodeName()) + "> in " + filename);
|
||||
}
|
||||
|
||||
// Bones
|
||||
NextNode(reader);
|
||||
if (!CurrentNodeNameEquals(reader, "bones")) {
|
||||
throw DeadlyImportError("No <bones> node in skeleton " + skeletonFile);
|
||||
}
|
||||
|
||||
NextNode(reader);
|
||||
while(CurrentNodeNameEquals(reader, "bone"))
|
||||
{
|
||||
/** @todo Fix this mandatory ordering. Some exporters might just write rotation first etc.
|
||||
There is no technical reason this has to be so strict. */
|
||||
|
||||
Bone bone;
|
||||
bone.Id = GetAttribute<int>(reader, "id");
|
||||
bone.Name = GetAttribute<string>(reader, "name");
|
||||
|
||||
NextNode(reader);
|
||||
if (!CurrentNodeNameEquals(reader, "position")) {
|
||||
throw DeadlyImportError("Position is not first node in Bone!");
|
||||
}
|
||||
|
||||
bone.Position.x = GetAttribute<float>(reader, "x");
|
||||
bone.Position.y = GetAttribute<float>(reader, "y");
|
||||
bone.Position.z = GetAttribute<float>(reader, "z");
|
||||
|
||||
NextNode(reader);
|
||||
if (!CurrentNodeNameEquals(reader, "rotation")) {
|
||||
throw DeadlyImportError("Rotation is not the second node in Bone!");
|
||||
}
|
||||
|
||||
bone.RotationAngle = GetAttribute<float>(reader, "angle");
|
||||
|
||||
NextNode(reader);
|
||||
if (!CurrentNodeNameEquals(reader, "axis")) {
|
||||
throw DeadlyImportError("No axis specified for bone rotation!");
|
||||
}
|
||||
|
||||
bone.RotationAxis.x = GetAttribute<float>(reader, "x");
|
||||
bone.RotationAxis.y = GetAttribute<float>(reader, "y");
|
||||
bone.RotationAxis.z = GetAttribute<float>(reader, "z");
|
||||
|
||||
Bones.push_back(bone);
|
||||
|
||||
NextNode(reader);
|
||||
}
|
||||
|
||||
// Order bones by Id
|
||||
std::sort(Bones.begin(), Bones.end());
|
||||
|
||||
//now the id of each bone should be equal to its position in the vector:
|
||||
//so we do a simple check:
|
||||
// Validate that bone indexes are not skipped.
|
||||
/** @note Left this from original authors code, but not sure if this is strictly necessary
|
||||
as per the Ogre skeleton spec. It might be more that other (later) code in this imported does not break. */
|
||||
for (size_t i=0, len=Bones.size(); i<len; ++i)
|
||||
{
|
||||
bool IdsOk=true;
|
||||
for(int i=0; i<static_cast<signed int>(Bones.size()); ++i)//i is signed, because all Id's are also signed!
|
||||
{
|
||||
if(Bones[i].Id!=i)
|
||||
IdsOk=false;
|
||||
if (static_cast<int>(Bones[i].Id) != static_cast<int>(i)) {
|
||||
throw DeadlyImportError("Bone Ids are not in sequence in " + skeletonFile);
|
||||
}
|
||||
if(!IdsOk)
|
||||
throw DeadlyImportError("Bone Ids are not valid!"+FileName);
|
||||
}
|
||||
DefaultLogger::get()->debug((Formatter::format(),"Number of bones: ",Bones.size()));
|
||||
//________________________________________________________________________________
|
||||
|
||||
DefaultLogger::get()->debug(Formatter::format() << " - Bones " << Bones.size());
|
||||
|
||||
// Bone hierarchy
|
||||
if (!CurrentNodeNameEquals(reader, "bonehierarchy")) {
|
||||
throw DeadlyImportError("No <bonehierarchy> node found after <bones> in " + skeletonFile);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
//----------------------------load bonehierarchy--------------------------------
|
||||
if(string("bonehierarchy")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("no bonehierarchy node in "+FileName);
|
||||
|
||||
DefaultLogger::get()->debug("loading bonehierarchy...");
|
||||
XmlRead(SkeletonFile);
|
||||
while(string("boneparent")==SkeletonFile->getNodeName())
|
||||
NextNode(reader);
|
||||
while(CurrentNodeNameEquals(reader, "boneparent"))
|
||||
{
|
||||
string Child, Parent;
|
||||
Child=GetAttribute<string>(SkeletonFile, "bone");
|
||||
Parent=GetAttribute<string>(SkeletonFile, "parent");
|
||||
string childName = GetAttribute<string>(reader, "bone");
|
||||
string parentName = GetAttribute<string>(reader, "parent");
|
||||
|
||||
unsigned int ChildId, ParentId;
|
||||
ChildId=find(Bones.begin(), Bones.end(), Child)->Id;
|
||||
ParentId=find(Bones.begin(), Bones.end(), Parent)->Id;
|
||||
vector<Bone>::iterator iterChild = find(Bones.begin(), Bones.end(), childName);
|
||||
vector<Bone>::iterator iterParent = find(Bones.begin(), Bones.end(), parentName);
|
||||
|
||||
if (iterChild != Bones.end() && iterParent != Bones.end())
|
||||
{
|
||||
iterChild->ParentId = iterParent->Id;
|
||||
iterParent->Children.push_back(iterChild->Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
DefaultLogger::get()->warn("Failed to find bones for parenting: Child " + childName + " Parent " + parentName);
|
||||
}
|
||||
|
||||
Bones[ChildId].ParentId=ParentId;
|
||||
Bones[ParentId].Children.push_back(ChildId);
|
||||
|
||||
XmlRead(SkeletonFile);
|
||||
NextNode(reader);
|
||||
}
|
||||
//_____________________________________________________________________________
|
||||
|
||||
|
||||
//--------- Calculate the WorldToBoneSpace Matrix recursively for all bones: ------------------
|
||||
// Calculate bone matrices for root bones. Recursively does their children.
|
||||
BOOST_FOREACH(Bone &theBone, Bones)
|
||||
{
|
||||
if(-1==theBone.ParentId) //the bone is a root bone
|
||||
{
|
||||
if (!theBone.IsParented()) {
|
||||
theBone.CalculateBoneToWorldSpaceMatrix(Bones);
|
||||
}
|
||||
}
|
||||
//_______________________________________________________________________
|
||||
|
||||
aiVector3D zeroVec(0.f, 0.f, 0.f);
|
||||
|
||||
//---------------------------load animations-----------------------------
|
||||
if(string("animations")==SkeletonFile->getNodeName())//animations are optional values
|
||||
// Animations
|
||||
if (CurrentNodeNameEquals(reader, "animations"))
|
||||
{
|
||||
DefaultLogger::get()->debug("Loading Animations");
|
||||
XmlRead(SkeletonFile);
|
||||
while(string("animation")==SkeletonFile->getNodeName())
|
||||
DefaultLogger::get()->debug(" - Animations");
|
||||
|
||||
NextNode(reader);
|
||||
while(CurrentNodeNameEquals(reader, "animation"))
|
||||
{
|
||||
Animation NewAnimation;
|
||||
NewAnimation.Name=GetAttribute<string>(SkeletonFile, "name");
|
||||
NewAnimation.Length=GetAttribute<float>(SkeletonFile, "length");
|
||||
Animation animation;
|
||||
animation.Name = GetAttribute<string>(reader, "name");
|
||||
animation.Length = GetAttribute<float>(reader, "length");
|
||||
|
||||
//Load all Tracks
|
||||
XmlRead(SkeletonFile);
|
||||
if(string("tracks")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("no tracks node in animation");
|
||||
XmlRead(SkeletonFile);
|
||||
while(string("track")==SkeletonFile->getNodeName())
|
||||
// Tracks
|
||||
NextNode(reader);
|
||||
if (!CurrentNodeNameEquals(reader, "tracks")) {
|
||||
throw DeadlyImportError("No <tracks> node found in animation '" + animation.Name + "' in " + skeletonFile);
|
||||
}
|
||||
|
||||
NextNode(reader);
|
||||
while(CurrentNodeNameEquals(reader, "track"))
|
||||
{
|
||||
Track NewTrack;
|
||||
NewTrack.BoneName=GetAttribute<string>(SkeletonFile, "bone");
|
||||
Track track;
|
||||
track.BoneName = GetAttribute<string>(reader, "bone");
|
||||
|
||||
//Load all keyframes;
|
||||
XmlRead(SkeletonFile);
|
||||
if(string("keyframes")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("no keyframes node!");
|
||||
XmlRead(SkeletonFile);
|
||||
while(string("keyframe")==SkeletonFile->getNodeName())
|
||||
// Keyframes
|
||||
NextNode(reader);
|
||||
if (!CurrentNodeNameEquals(reader, "keyframes")) {
|
||||
throw DeadlyImportError("No <keyframes> node found in a track in animation '" + animation.Name + "' in " + skeletonFile);
|
||||
}
|
||||
|
||||
NextNode(reader);
|
||||
while(CurrentNodeNameEquals(reader, "keyframe"))
|
||||
{
|
||||
Keyframe NewKeyframe;
|
||||
NewKeyframe.Time=GetAttribute<float>(SkeletonFile, "time");
|
||||
|
||||
//loop over the attributes:
|
||||
|
||||
while(true) //will quit, if a Node is not a animationkey
|
||||
KeyFrame keyFrame;
|
||||
keyFrame.Time = GetAttribute<float>(reader, "time");
|
||||
|
||||
NextNode(reader);
|
||||
while(CurrentNodeNameEquals(reader, "translate") || CurrentNodeNameEquals(reader, "rotate") || CurrentNodeNameEquals(reader, "scale"))
|
||||
{
|
||||
XmlRead(SkeletonFile);
|
||||
|
||||
//If any property doesn't show up, it will keep its initialization value
|
||||
|
||||
//Position:
|
||||
if(string("translate")==SkeletonFile->getNodeName())
|
||||
if (CurrentNodeNameEquals(reader, "translate"))
|
||||
{
|
||||
NewKeyframe.Position.x=GetAttribute<float>(SkeletonFile, "x");
|
||||
NewKeyframe.Position.y=GetAttribute<float>(SkeletonFile, "y");
|
||||
NewKeyframe.Position.z=GetAttribute<float>(SkeletonFile, "z");
|
||||
keyFrame.Position.x = GetAttribute<float>(reader, "x");
|
||||
keyFrame.Position.y = GetAttribute<float>(reader, "y");
|
||||
keyFrame.Position.z = GetAttribute<float>(reader, "z");
|
||||
}
|
||||
|
||||
//Rotation:
|
||||
else if(string("rotate")==SkeletonFile->getNodeName())
|
||||
else if (CurrentNodeNameEquals(reader, "rotate"))
|
||||
{
|
||||
float RotationAngle=GetAttribute<float>(SkeletonFile, "angle");
|
||||
aiVector3D RotationAxis;
|
||||
XmlRead(SkeletonFile);
|
||||
if(string("axis")!=SkeletonFile->getNodeName())
|
||||
throw DeadlyImportError("No axis for keyframe rotation!");
|
||||
RotationAxis.x=GetAttribute<float>(SkeletonFile, "x");
|
||||
RotationAxis.y=GetAttribute<float>(SkeletonFile, "y");
|
||||
RotationAxis.z=GetAttribute<float>(SkeletonFile, "z");
|
||||
float angle = GetAttribute<float>(reader, "angle");
|
||||
|
||||
if(0==RotationAxis.x && 0==RotationAxis.y && 0==RotationAxis.z)//we have an invalid rotation axis
|
||||
NextNode(reader);
|
||||
if (!CurrentNodeNameEquals(reader, "axis")) {
|
||||
throw DeadlyImportError("No axis for keyframe rotation in animation '" + animation.Name + "'");
|
||||
}
|
||||
|
||||
aiVector3D axis;
|
||||
axis.x = GetAttribute<float>(reader, "x");
|
||||
axis.y = GetAttribute<float>(reader, "y");
|
||||
axis.z = GetAttribute<float>(reader, "z");
|
||||
|
||||
if (axis.Equal(zeroVec))
|
||||
{
|
||||
RotationAxis.x=1.0f;
|
||||
if(0!=RotationAngle)//if we don't rotate at all, the axis does not matter
|
||||
{
|
||||
DefaultLogger::get()->warn("Invalid Rotation Axis in Keyframe!");
|
||||
axis.x = 1.0f;
|
||||
if (angle != 0) {
|
||||
DefaultLogger::get()->warn("Found invalid a key frame with a zero rotation axis in animation '" + animation.Name + "'");
|
||||
}
|
||||
}
|
||||
NewKeyframe.Rotation=aiQuaternion(RotationAxis, RotationAngle);
|
||||
keyFrame.Rotation = aiQuaternion(axis, angle);
|
||||
}
|
||||
|
||||
//Scaling:
|
||||
else if(string("scale")==SkeletonFile->getNodeName())
|
||||
else if (CurrentNodeNameEquals(reader, "scale"))
|
||||
{
|
||||
NewKeyframe.Scaling.x=GetAttribute<float>(SkeletonFile, "x");
|
||||
NewKeyframe.Scaling.y=GetAttribute<float>(SkeletonFile, "y");
|
||||
NewKeyframe.Scaling.z=GetAttribute<float>(SkeletonFile, "z");
|
||||
}
|
||||
|
||||
//we suppose, that we read all attributes and this is a new keyframe or the end of the animation
|
||||
else
|
||||
break;
|
||||
keyFrame.Scaling.x = GetAttribute<float>(reader, "x");
|
||||
keyFrame.Scaling.y = GetAttribute<float>(reader, "y");
|
||||
keyFrame.Scaling.z = GetAttribute<float>(reader, "z");
|
||||
}
|
||||
NextNode(reader);
|
||||
}
|
||||
|
||||
NewTrack.Keyframes.push_back(NewKeyframe);
|
||||
track.Keyframes.push_back(keyFrame);
|
||||
}
|
||||
|
||||
NewAnimation.Tracks.push_back(NewTrack);
|
||||
animation.Tracks.push_back(track);
|
||||
}
|
||||
|
||||
Animations.push_back(NewAnimation);
|
||||
Animations.push_back(animation);
|
||||
|
||||
DefaultLogger::get()->debug(Formatter::format() << " " << animation.Name << " (" << animation.Length << " sec, " << animation.Tracks.size() << " tracks)");
|
||||
}
|
||||
}
|
||||
//_____________________________________________________________________________
|
||||
|
||||
}
|
||||
|
||||
|
||||
void OgreImporter::CreateAssimpSkeleton(const std::vector<Bone> &Bones, const std::vector<Animation> &/*Animations*/)
|
||||
void OgreImporter::CreateAssimpSkeleton(aiScene *pScene, const std::vector<Bone> &bones, const std::vector<Animation> &animations)
|
||||
{
|
||||
if(!m_CurrentScene->mRootNode)
|
||||
throw DeadlyImportError("No root node exists!!");
|
||||
if(0!=m_CurrentScene->mRootNode->mNumChildren)
|
||||
throw DeadlyImportError("Root Node already has childnodes!");
|
||||
if (bones.empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!pScene->mRootNode) {
|
||||
throw DeadlyImportError("Creating Assimp skeleton: No root node created!");
|
||||
}
|
||||
if (pScene->mRootNode->mNumChildren > 0) {
|
||||
throw DeadlyImportError("Creating Assimp skeleton: Root node already has children!");
|
||||
}
|
||||
|
||||
//Createt the assimp bone hierarchy
|
||||
vector<aiNode*> RootBoneNodes;
|
||||
BOOST_FOREACH(const Bone &theBone, Bones)
|
||||
// Bones
|
||||
vector<aiNode*> rootBones;
|
||||
BOOST_FOREACH(const Bone &bone, bones)
|
||||
{
|
||||
if(-1==theBone.ParentId) //the bone is a root bone
|
||||
{
|
||||
//which will recursily add all other nodes
|
||||
RootBoneNodes.push_back(CreateAiNodeFromBone(theBone.Id, Bones, m_CurrentScene->mRootNode));
|
||||
if (!bone.IsParented()) {
|
||||
rootBones.push_back(CreateNodeFromBone(bone.Id, bones, pScene->mRootNode));
|
||||
}
|
||||
}
|
||||
|
||||
if(RootBoneNodes.size() > 0)
|
||||
if (!rootBones.empty())
|
||||
{
|
||||
m_CurrentScene->mRootNode->mNumChildren=RootBoneNodes.size();
|
||||
m_CurrentScene->mRootNode->mChildren=new aiNode*[RootBoneNodes.size()];
|
||||
memcpy(m_CurrentScene->mRootNode->mChildren, &RootBoneNodes[0], sizeof(aiNode*)*RootBoneNodes.size());
|
||||
}
|
||||
}
|
||||
pScene->mRootNode->mChildren = new aiNode*[rootBones.size()];
|
||||
pScene->mRootNode->mNumChildren = rootBones.size();
|
||||
|
||||
|
||||
void OgreImporter::PutAnimationsInScene(const std::vector<Bone> &Bones, const std::vector<Animation> &Animations)
|
||||
{
|
||||
//-----------------Create the Assimp Animations --------------------
|
||||
if(Animations.size()>0)//Maybe the model had only a skeleton and no animations. (If it also has no skeleton, this function would'nt have been called
|
||||
{
|
||||
m_CurrentScene->mNumAnimations=Animations.size();
|
||||
m_CurrentScene->mAnimations=new aiAnimation*[Animations.size()];
|
||||
for(unsigned int i=0; i<Animations.size(); ++i)//create all animations
|
||||
{
|
||||
aiAnimation* NewAnimation=new aiAnimation();
|
||||
NewAnimation->mName=Animations[i].Name;
|
||||
NewAnimation->mDuration=Animations[i].Length;
|
||||
NewAnimation->mTicksPerSecond=1.0f;
|
||||
|
||||
//Create all tracks in this animation
|
||||
NewAnimation->mNumChannels=Animations[i].Tracks.size();
|
||||
NewAnimation->mChannels=new aiNodeAnim*[Animations[i].Tracks.size()];
|
||||
for(unsigned int j=0; j<Animations[i].Tracks.size(); ++j)
|
||||
{
|
||||
aiNodeAnim* NewNodeAnim=new aiNodeAnim();
|
||||
NewNodeAnim->mNodeName=Animations[i].Tracks[j].BoneName;
|
||||
|
||||
//we need this, to acces the bones default pose, which we need to make keys absolute to the default bone pose
|
||||
vector<Bone>::const_iterator CurBone=find(Bones.begin(), Bones.end(), NewNodeAnim->mNodeName);
|
||||
aiMatrix4x4 t0, t1;
|
||||
aiMatrix4x4 DefBonePose=aiMatrix4x4::Translation(CurBone->Position, t1)
|
||||
* aiMatrix4x4::Rotation(CurBone->RotationAngle, CurBone->RotationAxis, t0);
|
||||
|
||||
|
||||
//Create the keyframe arrays...
|
||||
unsigned int KeyframeCount=Animations[i].Tracks[j].Keyframes.size();
|
||||
NewNodeAnim->mNumPositionKeys=KeyframeCount;
|
||||
NewNodeAnim->mNumRotationKeys=KeyframeCount;
|
||||
NewNodeAnim->mNumScalingKeys =KeyframeCount;
|
||||
NewNodeAnim->mPositionKeys=new aiVectorKey[KeyframeCount];
|
||||
NewNodeAnim->mRotationKeys=new aiQuatKey[KeyframeCount];
|
||||
NewNodeAnim->mScalingKeys =new aiVectorKey[KeyframeCount];
|
||||
|
||||
//...and fill them
|
||||
for(unsigned int k=0; k<KeyframeCount; ++k)
|
||||
{
|
||||
aiMatrix4x4 t2, t3;
|
||||
|
||||
//Create a matrix to transfrom a vector from the bones default pose to the bone bones in this animation key
|
||||
aiMatrix4x4 PoseToKey=
|
||||
aiMatrix4x4::Translation(Animations[i].Tracks[j].Keyframes[k].Position, t3) //pos
|
||||
* aiMatrix4x4(Animations[i].Tracks[j].Keyframes[k].Rotation.GetMatrix()) //rot
|
||||
* aiMatrix4x4::Scaling(Animations[i].Tracks[j].Keyframes[k].Scaling, t2); //scale
|
||||
|
||||
|
||||
//calculate the complete transformation from world space to bone space
|
||||
aiMatrix4x4 CompleteTransform=DefBonePose * PoseToKey;
|
||||
|
||||
aiVector3D Pos;
|
||||
aiQuaternion Rot;
|
||||
aiVector3D Scale;
|
||||
|
||||
CompleteTransform.Decompose(Scale, Rot, Pos);
|
||||
|
||||
double Time=Animations[i].Tracks[j].Keyframes[k].Time;
|
||||
|
||||
NewNodeAnim->mPositionKeys[k].mTime=Time;
|
||||
NewNodeAnim->mPositionKeys[k].mValue=Pos;
|
||||
|
||||
NewNodeAnim->mRotationKeys[k].mTime=Time;
|
||||
NewNodeAnim->mRotationKeys[k].mValue=Rot;
|
||||
|
||||
NewNodeAnim->mScalingKeys[k].mTime=Time;
|
||||
NewNodeAnim->mScalingKeys[k].mValue=Scale;
|
||||
}
|
||||
|
||||
NewAnimation->mChannels[j]=NewNodeAnim;
|
||||
}
|
||||
|
||||
m_CurrentScene->mAnimations[i]=NewAnimation;
|
||||
for(size_t i=0, len=rootBones.size(); i<len; ++i) {
|
||||
pScene->mRootNode->mChildren[i] = rootBones[i];
|
||||
}
|
||||
}
|
||||
//TODO: Auf nicht vorhandene Animationskeys achten!
|
||||
//#pragma warning (s.o.)
|
||||
//__________________________________________________________________
|
||||
}
|
||||
|
||||
|
||||
aiNode* OgreImporter::CreateAiNodeFromBone(int BoneId, const std::vector<Bone> &Bones, aiNode* ParentNode)
|
||||
{
|
||||
//----Create the node for this bone and set its values-----
|
||||
aiNode* NewNode=new aiNode(Bones[BoneId].Name);
|
||||
NewNode->mParent=ParentNode;
|
||||
|
||||
aiMatrix4x4 t0,t1;
|
||||
NewNode->mTransformation=
|
||||
aiMatrix4x4::Translation(Bones[BoneId].Position, t0)
|
||||
*aiMatrix4x4::Rotation(Bones[BoneId].RotationAngle, Bones[BoneId].RotationAxis, t1)
|
||||
;
|
||||
//__________________________________________________________
|
||||
|
||||
|
||||
//---------- recursivly create all children Nodes: ----------
|
||||
NewNode->mNumChildren=Bones[BoneId].Children.size();
|
||||
NewNode->mChildren=new aiNode*[Bones[BoneId].Children.size()];
|
||||
for(unsigned int i=0; i<Bones[BoneId].Children.size(); ++i)
|
||||
// TODO: Auf nicht vorhandene Animationskeys achten!
|
||||
// @todo Pay attention to non-existing animation Keys (google translated from above german comment)
|
||||
|
||||
// Animations
|
||||
if (!animations.empty())
|
||||
{
|
||||
NewNode->mChildren[i]=CreateAiNodeFromBone(Bones[BoneId].Children[i], Bones, NewNode);
|
||||
pScene->mAnimations = new aiAnimation*[animations.size()];
|
||||
pScene->mNumAnimations = animations.size();
|
||||
|
||||
for(size_t ai=0, alen=animations.size(); ai<alen; ++ai)
|
||||
{
|
||||
const Animation &aSource = animations[ai];
|
||||
|
||||
aiAnimation *animation = new aiAnimation();
|
||||
animation->mName = aSource.Name;
|
||||
animation->mDuration = aSource.Length;
|
||||
animation->mTicksPerSecond = 1.0f;
|
||||
|
||||
// Tracks
|
||||
animation->mChannels = new aiNodeAnim*[aSource.Tracks.size()];
|
||||
animation->mNumChannels = aSource.Tracks.size();
|
||||
|
||||
for(size_t ti=0, tlen=aSource.Tracks.size(); ti<tlen; ++ti)
|
||||
{
|
||||
const Track &tSource = aSource.Tracks[ti];
|
||||
|
||||
aiNodeAnim *animationNode = new aiNodeAnim();
|
||||
animationNode->mNodeName = tSource.BoneName;
|
||||
|
||||
// We need this, to access the bones default pose.
|
||||
// Which we need to make keys absolute to the default bone pose.
|
||||
vector<Bone>::const_iterator boneIter = find(bones.begin(), bones.end(), tSource.BoneName);
|
||||
if (boneIter == bones.end())
|
||||
{
|
||||
for(size_t createdAnimationIndex=0; createdAnimationIndex<ai; createdAnimationIndex++) {
|
||||
delete pScene->mAnimations[createdAnimationIndex];
|
||||
}
|
||||
delete [] pScene->mAnimations;
|
||||
pScene->mAnimations = NULL;
|
||||
pScene->mNumAnimations = 0;
|
||||
|
||||
DefaultLogger::get()->error("Failed to find bone for name " + tSource.BoneName + " when creating animation " + aSource.Name +
|
||||
". This is a serious error, animations wont be imported.");
|
||||
return;
|
||||
}
|
||||
|
||||
aiMatrix4x4 t0, t1;
|
||||
aiMatrix4x4 defaultBonePose = aiMatrix4x4::Translation(boneIter->Position, t1) * aiMatrix4x4::Rotation(boneIter->RotationAngle, boneIter->RotationAxis, t0);
|
||||
|
||||
// Keyframes
|
||||
unsigned int numKeyframes = tSource.Keyframes.size();
|
||||
|
||||
animationNode->mPositionKeys = new aiVectorKey[numKeyframes];
|
||||
animationNode->mRotationKeys = new aiQuatKey[numKeyframes];
|
||||
animationNode->mScalingKeys = new aiVectorKey[numKeyframes];
|
||||
animationNode->mNumPositionKeys = numKeyframes;
|
||||
animationNode->mNumRotationKeys = numKeyframes;
|
||||
animationNode->mNumScalingKeys = numKeyframes;
|
||||
|
||||
//...and fill them
|
||||
for(size_t kfi=0; kfi<numKeyframes; ++kfi)
|
||||
{
|
||||
const KeyFrame &kfSource = tSource.Keyframes[kfi];
|
||||
|
||||
// Create a matrix to transform a vector from the bones
|
||||
// default pose to the bone bones in this animation key
|
||||
aiMatrix4x4 t2, t3;
|
||||
aiMatrix4x4 keyBonePose =
|
||||
aiMatrix4x4::Translation(kfSource.Position, t3) *
|
||||
aiMatrix4x4(kfSource.Rotation.GetMatrix()) *
|
||||
aiMatrix4x4::Scaling(kfSource.Scaling, t2);
|
||||
|
||||
// Calculate the complete transformation from world space to bone space
|
||||
aiMatrix4x4 CompleteTransform = defaultBonePose * keyBonePose;
|
||||
|
||||
aiVector3D kfPos; aiQuaternion kfRot; aiVector3D kfScale;
|
||||
CompleteTransform.Decompose(kfScale, kfRot, kfPos);
|
||||
|
||||
animationNode->mPositionKeys[kfi].mTime = static_cast<double>(kfSource.Time);
|
||||
animationNode->mRotationKeys[kfi].mTime = static_cast<double>(kfSource.Time);
|
||||
animationNode->mScalingKeys[kfi].mTime = static_cast<double>(kfSource.Time);
|
||||
|
||||
animationNode->mPositionKeys[kfi].mValue = kfPos;
|
||||
animationNode->mRotationKeys[kfi].mValue = kfRot;
|
||||
animationNode->mScalingKeys[kfi].mValue = kfScale;
|
||||
}
|
||||
animation->mChannels[ti] = animationNode;
|
||||
}
|
||||
pScene->mAnimations[ai] = animation;
|
||||
}
|
||||
}
|
||||
//____________________________________________________
|
||||
|
||||
|
||||
return NewNode;
|
||||
}
|
||||
|
||||
aiNode* OgreImporter::CreateNodeFromBone(int boneId, const std::vector<Bone> &bones, aiNode* parent)
|
||||
{
|
||||
aiMatrix4x4 t0,t1;
|
||||
const Bone &source = bones[boneId];
|
||||
|
||||
aiNode* boneNode = new aiNode(source.Name);
|
||||
boneNode->mParent = parent;
|
||||
boneNode->mTransformation = aiMatrix4x4::Translation(source.Position, t0) * aiMatrix4x4::Rotation(source.RotationAngle, source.RotationAxis, t1);
|
||||
|
||||
if (!source.Children.empty())
|
||||
{
|
||||
boneNode->mChildren = new aiNode*[source.Children.size()];
|
||||
boneNode->mNumChildren = source.Children.size();
|
||||
|
||||
for(size_t i=0, len=source.Children.size(); i<len; ++i) {
|
||||
boneNode->mChildren[i] = CreateNodeFromBone(source.Children[i], bones, boneNode);
|
||||
}
|
||||
}
|
||||
|
||||
return boneNode;
|
||||
}
|
||||
|
||||
void Bone::CalculateBoneToWorldSpaceMatrix(vector<Bone> &Bones)
|
||||
{
|
||||
//Calculate the matrix for this bone:
|
||||
aiMatrix4x4 t0, t1;
|
||||
aiMatrix4x4 transform = aiMatrix4x4::Rotation(-RotationAngle, RotationAxis, t1) * aiMatrix4x4::Translation(-Position, t0);
|
||||
|
||||
aiMatrix4x4 t0,t1;
|
||||
aiMatrix4x4 Transf= aiMatrix4x4::Rotation(-RotationAngle, RotationAxis, t1)
|
||||
* aiMatrix4x4::Translation(-Position, t0);
|
||||
|
||||
if(-1==ParentId)
|
||||
if (!IsParented())
|
||||
{
|
||||
BoneToWorldSpace=Transf;
|
||||
BoneToWorldSpace = transform;
|
||||
}
|
||||
else
|
||||
{
|
||||
BoneToWorldSpace=Transf*Bones[ParentId].BoneToWorldSpace;
|
||||
BoneToWorldSpace = transform * Bones[ParentId].BoneToWorldSpace;
|
||||
}
|
||||
|
||||
|
||||
//and recursivly for all children:
|
||||
BOOST_FOREACH(int theChildren, Children)
|
||||
// Recursively for all children now that the parent matrix has been calculated.
|
||||
BOOST_FOREACH(int childId, Children)
|
||||
{
|
||||
Bones[theChildren].CalculateBoneToWorldSpaceMatrix(Bones);
|
||||
Bones[childId].CalculateBoneToWorldSpaceMatrix(Bones);
|
||||
}
|
||||
}
|
||||
|
||||
} // Ogre
|
||||
} // Assimp
|
||||
|
||||
}//namespace Ogre
|
||||
}//namespace Assimp
|
||||
|
||||
#endif // !! ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
#endif // ASSIMP_BUILD_NO_OGRE_IMPORTER
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
|
||||
#include "irrXMLWrapper.h"
|
||||
#include "fast_atof.h"
|
||||
|
||||
namespace Assimp
|
||||
{
|
||||
namespace Ogre
|
||||
{
|
||||
|
||||
typedef irr::io::IrrXMLReader XmlReader;
|
||||
|
||||
|
||||
//------------Helper Funktion to Get a Attribute Save---------------
|
||||
template<typename t> inline t GetAttribute(XmlReader* Reader, std::string Name);
|
||||
|
||||
/*
|
||||
{
|
||||
BOOST_STATIC_ASSERT(false);
|
||||
return t();
|
||||
}
|
||||
*/
|
||||
|
||||
template<> inline int GetAttribute<int>(XmlReader* Reader, std::string Name)
|
||||
{
|
||||
const char* Value=Reader->getAttributeValue(Name.c_str());
|
||||
if(Value)
|
||||
return atoi(Value);
|
||||
else
|
||||
throw DeadlyImportError(std::string("Attribute "+Name+" does not exist in "+Reader->getNodeName()).c_str());
|
||||
}
|
||||
|
||||
template<> inline unsigned int GetAttribute<unsigned int>(XmlReader* Reader, std::string Name)
|
||||
{
|
||||
const char* Value=Reader->getAttributeValue(Name.c_str());
|
||||
if(Value)
|
||||
return static_cast<unsigned int>(atoi(Value));//yes, ugly, but pfff
|
||||
else
|
||||
throw DeadlyImportError(std::string("Attribute "+Name+" does not exist in "+Reader->getNodeName()).c_str());
|
||||
}
|
||||
|
||||
template<> inline float GetAttribute<float>(XmlReader* Reader, std::string Name)
|
||||
{
|
||||
const char* Value=Reader->getAttributeValue(Name.c_str());
|
||||
if(Value)
|
||||
return fast_atof(Value);
|
||||
else
|
||||
throw DeadlyImportError(std::string("Attribute "+Name+" does not exist in "+Reader->getNodeName()).c_str());
|
||||
}
|
||||
|
||||
template<> inline std::string GetAttribute<std::string>(XmlReader* Reader, std::string Name)
|
||||
{
|
||||
const char* Value=Reader->getAttributeValue(Name.c_str());
|
||||
if(Value)
|
||||
return std::string(Value);
|
||||
else
|
||||
throw DeadlyImportError(std::string("Attribute "+Name+" does not exist in "+Reader->getNodeName()).c_str());
|
||||
}
|
||||
|
||||
template<> inline bool GetAttribute<bool>(XmlReader* Reader, std::string Name)
|
||||
{
|
||||
const char* Value=Reader->getAttributeValue(Name.c_str());
|
||||
if(Value)
|
||||
{
|
||||
if(Value==std::string("true"))
|
||||
return true;
|
||||
else if(Value==std::string("false"))
|
||||
return false;
|
||||
else
|
||||
throw DeadlyImportError(std::string("Bool value has invalid value: "+Name+" / "+Value+" / "+Reader->getNodeName()));
|
||||
}
|
||||
else
|
||||
throw DeadlyImportError(std::string("Attribute "+Name+" does not exist in "+Reader->getNodeName()).c_str());
|
||||
}
|
||||
//__________________________________________________________________
|
||||
|
||||
inline bool XmlRead(XmlReader* Reader)
|
||||
{
|
||||
do
|
||||
{
|
||||
if(!Reader->read())
|
||||
return false;
|
||||
}
|
||||
while(Reader->getNodeType()!=irr::io::EXN_ELEMENT);
|
||||
return true;
|
||||
}
|
||||
|
||||
}//namespace Ogre
|
||||
}//namespace Assimp
|
|
@ -801,29 +801,37 @@ enum aiComponent
|
|||
#define AI_CONFIG_IMPORT_IRR_ANIM_FPS \
|
||||
"IMPORT_IRR_ANIM_FPS"
|
||||
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
/** @brief Ogre Importer will try to load this Materialfile.
|
||||
/** @brief Ogre Importer will try to find referenced materials from this file.
|
||||
*
|
||||
* Ogre Meshes contain only the MaterialName, not the MaterialFile. If there
|
||||
* is no material file with the same name as the material, Ogre Importer will
|
||||
* try to load this file and search the material in it.
|
||||
* Ogre meshes reference with material names, this does not tell Assimp the file
|
||||
* where it is located in. Assimp will try to find the source file in the following
|
||||
* order: <material-name>.material, <mesh-filename-base>.material and
|
||||
* lastly the material name defined by this config property.
|
||||
* <br>
|
||||
* Property type: String. Default value: guessed.
|
||||
* Property type: String. Default value: Scene.material.
|
||||
*/
|
||||
#define AI_CONFIG_IMPORT_OGRE_MATERIAL_FILE "IMPORT_OGRE_MATERIAL_FILE"
|
||||
|
||||
#define AI_CONFIG_IMPORT_OGRE_MATERIAL_FILE \
|
||||
"IMPORT_OGRE_MATERIAL_FILE"
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
/** @brief Ogre Importer detect the texture usage from its filename
|
||||
/** @brief Ogre Importer detect the texture usage from its filename.
|
||||
*
|
||||
* Normally, a texture is loaded as a colormap, if no target is specified in the
|
||||
* materialfile. Is this switch is enabled, texture names ending with _n, _l, _s
|
||||
* are used as normalmaps, lightmaps or specularmaps.
|
||||
* Ogre material texture units do not define texture type, the textures usage
|
||||
* depends on the used shader or Ogres fixed pipeline. If this config property
|
||||
* is true Assimp will try to detect the type from the textures filename postfix:
|
||||
* _n, _nrm, _nrml, _normal, _normals and _normalmap for normal map, _s, _spec,
|
||||
* _specular and _specularmap for specular map, _l, _light, _lightmap, _occ
|
||||
* and _occlusion for light map, _disp and _displacement for displacement map.
|
||||
* The matching is case insensitive. Post fix is taken between last "_" and last ".".
|
||||
* Default behavior is to detect type from lower cased texture unit name by
|
||||
* matching against: normalmap, specularmap, lightmap and displacementmap.
|
||||
* For both cases if no match is found aiTextureType_DIFFUSE is used.
|
||||
* <br>
|
||||
* Property type: Bool. Default value: false.
|
||||
*/
|
||||
#define AI_CONFIG_IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME "IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME"
|
||||
#define AI_CONFIG_IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME \
|
||||
"IMPORT_OGRE_TEXTURETYPE_FROM_FILENAME"
|
||||
|
||||
/** @brief Specifies whether the IFC loader skips over IfcSpace elements.
|
||||
*
|
||||
|
|
Loading…
Reference in New Issue