- drop a few swear words from documentation. Some of these are years old and I think the authors (me included) don't want them associated with their commits.
parent
ae031733c2
commit
0b9d6bec0d
|
@ -707,7 +707,7 @@ void Discreet3DSImporter::GenerateNodeGraph(aiScene* pcOut)
|
||||||
if (0 == mRootNode->mChildren.size())
|
if (0 == mRootNode->mChildren.size())
|
||||||
{
|
{
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
// It seems the file is so fucked up that it has not even a hierarchy.
|
// It seems the file is so messed up that it has not even a hierarchy.
|
||||||
// generate a flat hiearachy which looks like this:
|
// generate a flat hiearachy which looks like this:
|
||||||
//
|
//
|
||||||
// ROOT_NODE
|
// ROOT_NODE
|
||||||
|
|
|
@ -255,7 +255,7 @@ namespace boost {
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Constructs a tuple with 2 elements (fucking idiot, use std::pair instead!)
|
// Constructs a tuple with 2 elements
|
||||||
template <typename T0,typename T1>
|
template <typename T0,typename T1>
|
||||||
inline tuple <T0,T1> make_tuple (const T0& t0,
|
inline tuple <T0,T1> make_tuple (const T0& t0,
|
||||||
const T1& t1) {
|
const T1& t1) {
|
||||||
|
@ -265,7 +265,7 @@ namespace boost {
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Constructs a tuple with 1 elements (no comment ...)
|
// Constructs a tuple with 1 elements (well ...)
|
||||||
template <typename T0>
|
template <typename T0>
|
||||||
inline tuple <T0> make_tuple (const T0& t0) {
|
inline tuple <T0> make_tuple (const T0& t0) {
|
||||||
tuple <T0> t;
|
tuple <T0> t;
|
||||||
|
@ -273,7 +273,7 @@ namespace boost {
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Constructs a tuple with 0 elements (ehm? Try http://www.promillerechner.net)
|
// Constructs a tuple with 0 elements (well ...)
|
||||||
inline tuple <> make_tuple () {
|
inline tuple <> make_tuple () {
|
||||||
tuple <> t;
|
tuple <> t;
|
||||||
return t;
|
return t;
|
||||||
|
|
|
@ -1045,9 +1045,9 @@ void COBImporter::ReadPolH_Binary(COB::Scene& out, StreamReaderLE& reader, const
|
||||||
v.y = reader.GetF4();
|
v.y = reader.GetF4();
|
||||||
}
|
}
|
||||||
|
|
||||||
const size_t numfuck = reader.GetI4();
|
const size_t numf = reader.GetI4();
|
||||||
msh.faces.reserve(numfuck);
|
msh.faces.reserve(numf);
|
||||||
for(size_t i = 0; i < numfuck; ++i) {
|
for(size_t i = 0; i < numf; ++i) {
|
||||||
// XXX backface culling flag is 0x10 in flags
|
// XXX backface culling flag is 0x10 in flags
|
||||||
|
|
||||||
// hole?
|
// hole?
|
||||||
|
|
|
@ -99,7 +99,7 @@ void ColladaExporter::WriteFile()
|
||||||
|
|
||||||
WriteSceneLibrary();
|
WriteSceneLibrary();
|
||||||
|
|
||||||
// useless Collada bullshit at the end, just in case we haven't had enough indirections, yet.
|
// useless Collada fu at the end, just in case we haven't had enough indirections, yet.
|
||||||
mOutput << startstr << "<scene>" << endstr;
|
mOutput << startstr << "<scene>" << endstr;
|
||||||
PushTag();
|
PushTag();
|
||||||
mOutput << startstr << "<instance_visual_scene url=\"#myScene\" />" << endstr;
|
mOutput << startstr << "<instance_visual_scene url=\"#myScene\" />" << endstr;
|
||||||
|
@ -495,7 +495,7 @@ void ColladaExporter::WriteFloatArray( const std::string& pIdString, FloatDataTy
|
||||||
mOutput << "</float_array>" << endstr;
|
mOutput << "</float_array>" << endstr;
|
||||||
PopTag();
|
PopTag();
|
||||||
|
|
||||||
// the usual Collada bullshit. Let's bloat it even more!
|
// the usual Collada fun. Let's bloat it even more!
|
||||||
mOutput << startstr << "<technique_common>" << endstr;
|
mOutput << startstr << "<technique_common>" << endstr;
|
||||||
PushTag();
|
PushTag();
|
||||||
mOutput << startstr << "<accessor count=\"" << pElementCount << "\" offset=\"0\" source=\"#" << arrayId << "\" stride=\"" << floatsPerElement << "\">" << endstr;
|
mOutput << startstr << "<accessor count=\"" << pElementCount << "\" offset=\"0\" source=\"#" << arrayId << "\" stride=\"" << floatsPerElement << "\">" << endstr;
|
||||||
|
|
|
@ -325,10 +325,10 @@ void ColladaLoader::BuildLightsForNode( const ColladaParser& pParser, const Coll
|
||||||
|
|
||||||
out->mAngleInnerCone = AI_DEG_TO_RAD( srcLight->mFalloffAngle );
|
out->mAngleInnerCone = AI_DEG_TO_RAD( srcLight->mFalloffAngle );
|
||||||
|
|
||||||
// ... some extension magic. FUCKING COLLADA.
|
// ... some extension magic. O
|
||||||
if (srcLight->mOuterAngle == 10e10f)
|
if (srcLight->mOuterAngle == 10e10f)
|
||||||
{
|
{
|
||||||
// ... some deprecation magic. FUCKING FCOLLADA.
|
// ... some deprecation magic.
|
||||||
if (srcLight->mPenumbraAngle == 10e10f)
|
if (srcLight->mPenumbraAngle == 10e10f)
|
||||||
{
|
{
|
||||||
// Need to rely on falloff_exponent. I don't know how to interpret it, so I need to guess ....
|
// Need to rely on falloff_exponent. I don't know how to interpret it, so I need to guess ....
|
||||||
|
@ -382,7 +382,7 @@ void ColladaLoader::BuildCamerasForNode( const ColladaParser& pParser, const Col
|
||||||
out->mClipPlaneNear = srcCamera->mZNear;
|
out->mClipPlaneNear = srcCamera->mZNear;
|
||||||
|
|
||||||
// ... but for the rest some values are optional
|
// ... but for the rest some values are optional
|
||||||
// and we need to compute the others in any combination. FUCKING COLLADA.
|
// and we need to compute the others in any combination.
|
||||||
if (srcCamera->mAspect != 10e10f)
|
if (srcCamera->mAspect != 10e10f)
|
||||||
out->mAspect = srcCamera->mAspect;
|
out->mAspect = srcCamera->mAspect;
|
||||||
|
|
||||||
|
@ -545,7 +545,7 @@ aiMesh* ColladaLoader::CreateMesh( const ColladaParser& pParser, const Collada::
|
||||||
std::copy( pSrcMesh->mPositions.begin() + pStartVertex, pSrcMesh->mPositions.begin() +
|
std::copy( pSrcMesh->mPositions.begin() + pStartVertex, pSrcMesh->mPositions.begin() +
|
||||||
pStartVertex + numVertices, dstMesh->mVertices);
|
pStartVertex + numVertices, dstMesh->mVertices);
|
||||||
|
|
||||||
// normals, if given. HACK: (thom) Due to the fucking Collada spec we never
|
// normals, if given. HACK: (thom) Due to the glorious Collada spec we never
|
||||||
// know if we have the same number of normals as there are positions. So we
|
// know if we have the same number of normals as there are positions. So we
|
||||||
// also ignore any vertex attribute if it has a different count
|
// also ignore any vertex attribute if it has a different count
|
||||||
if( pSrcMesh->mNormals.size() >= pStartVertex + numVertices)
|
if( pSrcMesh->mNormals.size() >= pStartVertex + numVertices)
|
||||||
|
@ -636,7 +636,7 @@ aiMesh* ColladaLoader::CreateMesh( const ColladaParser& pParser, const Collada::
|
||||||
throw DeadlyImportError( "Data type mismatch while resolving mesh joints");
|
throw DeadlyImportError( "Data type mismatch while resolving mesh joints");
|
||||||
// sanity check: we rely on the vertex weights always coming as pairs of BoneIndex-WeightIndex
|
// sanity check: we rely on the vertex weights always coming as pairs of BoneIndex-WeightIndex
|
||||||
if( pSrcController->mWeightInputJoints.mOffset != 0 || pSrcController->mWeightInputWeights.mOffset != 1)
|
if( pSrcController->mWeightInputJoints.mOffset != 0 || pSrcController->mWeightInputWeights.mOffset != 1)
|
||||||
throw DeadlyImportError( "Unsupported vertex_weight adresssing scheme. Fucking collada spec.");
|
throw DeadlyImportError( "Unsupported vertex_weight adressing scheme. ");
|
||||||
|
|
||||||
// create containers to collect the weights for each bone
|
// create containers to collect the weights for each bone
|
||||||
size_t numBones = jointNames.mStrings.size();
|
size_t numBones = jointNames.mStrings.size();
|
||||||
|
|
|
@ -487,7 +487,7 @@ void ColladaParser::ReadController( Collada::Controller& pController)
|
||||||
else if( IsElement( "skin"))
|
else if( IsElement( "skin"))
|
||||||
{
|
{
|
||||||
// read the mesh it refers to. According to the spec this could also be another
|
// read the mesh it refers to. According to the spec this could also be another
|
||||||
// controller, but I refuse to implement every bullshit idea they've come up with
|
// controller, but I refuse to implement every single idea they've come up with
|
||||||
int sourceIndex = GetAttribute( "source");
|
int sourceIndex = GetAttribute( "source");
|
||||||
pController.mMeshId = mReader->getAttributeValue( sourceIndex) + 1;
|
pController.mMeshId = mReader->getAttributeValue( sourceIndex) + 1;
|
||||||
}
|
}
|
||||||
|
@ -1097,9 +1097,6 @@ void ColladaParser::ReadEffectLibrary()
|
||||||
if( IsElement( "effect"))
|
if( IsElement( "effect"))
|
||||||
{
|
{
|
||||||
// read ID. Do I have to repeat my ranting about "optional" attributes?
|
// read ID. Do I have to repeat my ranting about "optional" attributes?
|
||||||
// Alex: .... no, not necessary. Please shut up and leave more space for
|
|
||||||
// me to complain about the fucking Collada spec with its fucking
|
|
||||||
// 'optional' attributes ...
|
|
||||||
int attrID = GetAttribute( "id");
|
int attrID = GetAttribute( "id");
|
||||||
std::string id = mReader->getAttributeValue( attrID);
|
std::string id = mReader->getAttributeValue( attrID);
|
||||||
|
|
||||||
|
@ -1613,7 +1610,7 @@ void ColladaParser::ReadSource()
|
||||||
}
|
}
|
||||||
else if( IsElement( "technique_common"))
|
else if( IsElement( "technique_common"))
|
||||||
{
|
{
|
||||||
// I don't fucking care for your profiles bullshit
|
// I don't care for your profiles
|
||||||
}
|
}
|
||||||
else if( IsElement( "accessor"))
|
else if( IsElement( "accessor"))
|
||||||
{
|
{
|
||||||
|
|
|
@ -448,8 +448,8 @@ void AnimResolver::GetKeys(std::vector<aiVectorKey>& out,
|
||||||
|
|
||||||
if ((*cur_x).time == (*cur_y).time && (*cur_x).time == (*cur_z).time ) {
|
if ((*cur_x).time == (*cur_y).time && (*cur_x).time == (*cur_z).time ) {
|
||||||
|
|
||||||
// we have a keyframe for all of them defined .. great,
|
// we have a keyframe for all of them defined .. this means
|
||||||
// we don't need to fucking interpolate here ...
|
// we don't need to interpolate here.
|
||||||
fill.mTime = (*cur_x).time;
|
fill.mTime = (*cur_x).time;
|
||||||
|
|
||||||
fill.mValue.x = (*cur_x).value;
|
fill.mValue.x = (*cur_x).value;
|
||||||
|
|
|
@ -787,7 +787,6 @@ void MDLImporter::SkipSkinLump_3DGS_MDL7(
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------------
|
||||||
// What the fuck does this function do? Can't remember
|
|
||||||
void MDLImporter::ParseSkinLump_3DGS_MDL7(
|
void MDLImporter::ParseSkinLump_3DGS_MDL7(
|
||||||
const unsigned char* szCurrent,
|
const unsigned char* szCurrent,
|
||||||
const unsigned char** szCurrentOut,
|
const unsigned char** szCurrentOut,
|
||||||
|
|
|
@ -182,7 +182,7 @@ void OgreImporter::ReadVertexBuffer(SubMesh &theSubMesh, XmlReader *Reader, unsi
|
||||||
XmlRead(Reader);
|
XmlRead(Reader);
|
||||||
|
|
||||||
/*it might happen, that we have more than one attribute per vertex (they are not splitted to different buffers)
|
/*it might happen, that we have more than one attribute per vertex (they are not splitted to different buffers)
|
||||||
so the break condition is a bit tricky (well, IrrXML just sucks :( )*/
|
so the break condition is a bit tricky */
|
||||||
while(Reader->getNodeName()==string("vertex")
|
while(Reader->getNodeName()==string("vertex")
|
||||||
||Reader->getNodeName()==string("position")
|
||Reader->getNodeName()==string("position")
|
||||||
||Reader->getNodeName()==string("normal")
|
||Reader->getNodeName()==string("normal")
|
||||||
|
|
|
@ -77,8 +77,6 @@ void OgreImporter::LoadSkeleton(std::string FileName, vector<Bone> &Bones, vecto
|
||||||
if(!SkeletonFile)
|
if(!SkeletonFile)
|
||||||
throw DeadlyImportError(string("Failed to create XML Reader for ")+FileName);
|
throw DeadlyImportError(string("Failed to create XML Reader for ")+FileName);
|
||||||
|
|
||||||
//Quick note: Whoever read this should know this one thing: irrXml fucking sucks!!!
|
|
||||||
|
|
||||||
XmlRead(SkeletonFile);
|
XmlRead(SkeletonFile);
|
||||||
if(string("skeleton")!=SkeletonFile->getNodeName())
|
if(string("skeleton")!=SkeletonFile->getNodeName())
|
||||||
throw DeadlyImportError("No <skeleton> node in SkeletonFile: "+FileName);
|
throw DeadlyImportError("No <skeleton> node in SkeletonFile: "+FileName);
|
||||||
|
@ -169,12 +167,12 @@ void OgreImporter::LoadSkeleton(std::string FileName, vector<Bone> &Bones, vecto
|
||||||
Bones[ChildId].ParentId=ParentId;
|
Bones[ChildId].ParentId=ParentId;
|
||||||
Bones[ParentId].Children.push_back(ChildId);
|
Bones[ParentId].Children.push_back(ChildId);
|
||||||
|
|
||||||
XmlRead(SkeletonFile);//I once forget this line, which led to an endless loop, did i mentioned, that irrxml sucks??
|
XmlRead(SkeletonFile);
|
||||||
}
|
}
|
||||||
//_____________________________________________________________________________
|
//_____________________________________________________________________________
|
||||||
|
|
||||||
|
|
||||||
//--------- Calculate the WorldToBoneSpace Matrix recursivly for all bones: ------------------
|
//--------- Calculate the WorldToBoneSpace Matrix recursively for all bones: ------------------
|
||||||
BOOST_FOREACH(Bone &theBone, Bones)
|
BOOST_FOREACH(Bone &theBone, Bones)
|
||||||
{
|
{
|
||||||
if(-1==theBone.ParentId) //the bone is a root bone
|
if(-1==theBone.ParentId) //the bone is a root bone
|
||||||
|
|
|
@ -379,8 +379,7 @@ void Q3DImporter::InternReadFile( const std::string& pFile,
|
||||||
light->mColorSpecular = light->mColorDiffuse;
|
light->mColorSpecular = light->mColorDiffuse;
|
||||||
|
|
||||||
|
|
||||||
// We don't need the rest, but we need to know where
|
// We don't need the rest, but we need to know where this chunk ends.
|
||||||
// this fucking chunk ends.
|
|
||||||
unsigned int temp = (unsigned int)(stream.GetI4() * stream.GetI4());
|
unsigned int temp = (unsigned int)(stream.GetI4() * stream.GetI4());
|
||||||
|
|
||||||
// skip the background file name
|
// skip the background file name
|
||||||
|
|
|
@ -384,7 +384,7 @@ bool STLImporter::LoadBinaryFile()
|
||||||
}
|
}
|
||||||
aiColor4D* clr = &pMesh->mColors[0][i*3];
|
aiColor4D* clr = &pMesh->mColors[0][i*3];
|
||||||
clr->a = 1.0f;
|
clr->a = 1.0f;
|
||||||
if (bIsMaterialise) // fuck, this is reversed
|
if (bIsMaterialise) // this is reversed
|
||||||
{
|
{
|
||||||
clr->r = (color & 0x31u) / 31.0f;
|
clr->r = (color & 0x31u) / 31.0f;
|
||||||
clr->g = ((color & (0x31u<<5))>>5u) / 31.0f;
|
clr->g = ((color & (0x31u<<5))>>5u) / 31.0f;
|
||||||
|
|
|
@ -985,7 +985,7 @@ void OpenAsset()
|
||||||
void SetupPPUIState()
|
void SetupPPUIState()
|
||||||
{
|
{
|
||||||
|
|
||||||
// fucking hell, that's ugly. anyone willing to rewrite me from scratch?
|
// that's ugly. anyone willing to rewrite me from scratch?
|
||||||
HMENU hMenu = GetMenu(g_hDlg);
|
HMENU hMenu = GetMenu(g_hDlg);
|
||||||
CheckMenuItem(hMenu,ID_VIEWER_PP_JIV,ppsteps & aiProcess_JoinIdenticalVertices ? MF_CHECKED : MF_UNCHECKED);
|
CheckMenuItem(hMenu,ID_VIEWER_PP_JIV,ppsteps & aiProcess_JoinIdenticalVertices ? MF_CHECKED : MF_UNCHECKED);
|
||||||
CheckMenuItem(hMenu,ID_VIEWER_PP_CTS,ppsteps & aiProcess_CalcTangentSpace ? MF_CHECKED : MF_UNCHECKED);
|
CheckMenuItem(hMenu,ID_VIEWER_PP_CTS,ppsteps & aiProcess_CalcTangentSpace ? MF_CHECKED : MF_UNCHECKED);
|
||||||
|
@ -1916,7 +1916,7 @@ INT_PTR CALLBACK MessageProc(HWND hwndDlg,UINT uMsg,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// fucking hell, this is ugly. anyone willing to rewrite it from scratch using wxwidgets or similar?
|
// this is ugly. anyone willing to rewrite it from scratch using wxwidgets or similar?
|
||||||
else if (ID_VIEWER_PP_JIV == LOWORD(wParam)) {
|
else if (ID_VIEWER_PP_JIV == LOWORD(wParam)) {
|
||||||
ppsteps ^= aiProcess_JoinIdenticalVertices;
|
ppsteps ^= aiProcess_JoinIdenticalVertices;
|
||||||
CheckMenuItem(hMenu,ID_VIEWER_PP_JIV,ppsteps & aiProcess_JoinIdenticalVertices ? MF_CHECKED : MF_UNCHECKED);
|
CheckMenuItem(hMenu,ID_VIEWER_PP_JIV,ppsteps & aiProcess_JoinIdenticalVertices ? MF_CHECKED : MF_UNCHECKED);
|
||||||
|
|
Loading…
Reference in New Issue