[FBX] add support for multiple animations (by using LocalStart/LocalStop in takes) + change key type from uint to int (fixes bugs from negative int_t becoming a junk uint_t value )
parent
7a01f77a08
commit
f71d86aa62
|
@ -1968,9 +1968,12 @@ private:
|
|||
|
||||
// strip AnimationStack:: prefix
|
||||
std::string name = st.Name();
|
||||
if(name.substr(0,16) == "AnimationStack::") {
|
||||
if (name.substr(0, 16) == "AnimationStack::") {
|
||||
name = name.substr(16);
|
||||
}
|
||||
else if (name.substr(0, 11) == "AnimStack::") {
|
||||
name = name.substr(11);
|
||||
}
|
||||
|
||||
anim->mName.Set(name);
|
||||
|
||||
|
@ -2014,12 +2017,18 @@ private:
|
|||
double min_time = 1e10;
|
||||
double max_time = -1e10;
|
||||
|
||||
int64_t start_time = st.LocalStart();
|
||||
int64_t stop_time = st.LocalStop();
|
||||
double start_timeF = CONVERT_FBX_TIME(start_time);
|
||||
double stop_timeF = CONVERT_FBX_TIME(stop_time);
|
||||
|
||||
try {
|
||||
BOOST_FOREACH(const NodeMap::value_type& kv, node_map) {
|
||||
GenerateNodeAnimations(node_anims,
|
||||
kv.first,
|
||||
kv.second,
|
||||
layer_map,
|
||||
start_time, stop_time,
|
||||
max_time,
|
||||
min_time);
|
||||
}
|
||||
|
@ -2043,9 +2052,27 @@ private:
|
|||
return;
|
||||
}
|
||||
|
||||
//adjust relative timing for animation
|
||||
{
|
||||
double start_fps = start_timeF * anim_fps;
|
||||
|
||||
for (unsigned int c = 0; c < anim->mNumChannels; c++)
|
||||
{
|
||||
aiNodeAnim* channel = anim->mChannels[c];
|
||||
for (uint32_t i = 0; i < channel->mNumPositionKeys; i++)
|
||||
channel->mPositionKeys[i].mTime -= start_fps;
|
||||
for (uint32_t i = 0; i < channel->mNumRotationKeys; i++)
|
||||
channel->mRotationKeys[i].mTime -= start_fps;
|
||||
for (uint32_t i = 0; i < channel->mNumScalingKeys; i++)
|
||||
channel->mScalingKeys[i].mTime -= start_fps;
|
||||
}
|
||||
|
||||
max_time -= min_time;
|
||||
}
|
||||
|
||||
// for some mysterious reason, mDuration is simply the maximum key -- the
|
||||
// validator always assumes animations to start at zero.
|
||||
anim->mDuration = max_time /*- min_time */;
|
||||
anim->mDuration = (stop_timeF - start_timeF) * anim_fps;
|
||||
anim->mTicksPerSecond = anim_fps;
|
||||
}
|
||||
|
||||
|
@ -2055,6 +2082,7 @@ private:
|
|||
const std::string& fixed_name,
|
||||
const std::vector<const AnimationCurveNode*>& curves,
|
||||
const LayerMap& layer_map,
|
||||
int64_t start, int64_t stop,
|
||||
double& max_time,
|
||||
double& min_time)
|
||||
{
|
||||
|
@ -2147,6 +2175,7 @@ private:
|
|||
aiNodeAnim* const nd = GenerateSimpleNodeAnim(fixed_name, target, chain,
|
||||
node_property_map.end(),
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time,
|
||||
true // input is TRS order, assimp is SRT
|
||||
|
@ -2185,6 +2214,7 @@ private:
|
|||
target,
|
||||
(*chain[i]).second,
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time);
|
||||
|
||||
|
@ -2200,6 +2230,7 @@ private:
|
|||
target,
|
||||
(*chain[i]).second,
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time);
|
||||
|
||||
|
@ -2212,6 +2243,7 @@ private:
|
|||
target,
|
||||
(*chain[i]).second,
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time,
|
||||
true);
|
||||
|
@ -2230,6 +2262,7 @@ private:
|
|||
target,
|
||||
(*chain[i]).second,
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time,
|
||||
true);
|
||||
|
@ -2249,6 +2282,7 @@ private:
|
|||
target,
|
||||
(*chain[i]).second,
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time);
|
||||
|
||||
|
@ -2320,13 +2354,14 @@ private:
|
|||
const Model& target,
|
||||
const std::vector<const AnimationCurveNode*>& curves,
|
||||
const LayerMap& layer_map,
|
||||
int64_t start, int64_t stop,
|
||||
double& max_time,
|
||||
double& min_time)
|
||||
{
|
||||
ScopeGuard<aiNodeAnim> na(new aiNodeAnim());
|
||||
na->mNodeName.Set(name);
|
||||
|
||||
ConvertRotationKeys(na, curves, layer_map, max_time,min_time, target.RotationOrder());
|
||||
ConvertRotationKeys(na, curves, layer_map, start, stop, max_time, min_time, target.RotationOrder());
|
||||
|
||||
// dummy scaling key
|
||||
na->mScalingKeys = new aiVectorKey[1];
|
||||
|
@ -2351,13 +2386,14 @@ private:
|
|||
const Model& /*target*/,
|
||||
const std::vector<const AnimationCurveNode*>& curves,
|
||||
const LayerMap& layer_map,
|
||||
int64_t start, int64_t stop,
|
||||
double& max_time,
|
||||
double& min_time)
|
||||
{
|
||||
ScopeGuard<aiNodeAnim> na(new aiNodeAnim());
|
||||
na->mNodeName.Set(name);
|
||||
|
||||
ConvertScaleKeys(na, curves, layer_map, max_time,min_time);
|
||||
ConvertScaleKeys(na, curves, layer_map, start, stop, max_time, min_time);
|
||||
|
||||
// dummy rotation key
|
||||
na->mRotationKeys = new aiQuatKey[1];
|
||||
|
@ -2382,6 +2418,7 @@ private:
|
|||
const Model& /*target*/,
|
||||
const std::vector<const AnimationCurveNode*>& curves,
|
||||
const LayerMap& layer_map,
|
||||
int64_t start, int64_t stop,
|
||||
double& max_time,
|
||||
double& min_time,
|
||||
bool inverse = false)
|
||||
|
@ -2389,7 +2426,7 @@ private:
|
|||
ScopeGuard<aiNodeAnim> na(new aiNodeAnim());
|
||||
na->mNodeName.Set(name);
|
||||
|
||||
ConvertTranslationKeys(na, curves, layer_map, max_time,min_time);
|
||||
ConvertTranslationKeys(na, curves, layer_map, start, stop, max_time, min_time);
|
||||
|
||||
if (inverse) {
|
||||
for (unsigned int i = 0; i < na->mNumPositionKeys; ++i) {
|
||||
|
@ -2422,6 +2459,7 @@ private:
|
|||
NodeMap::const_iterator chain[TransformationComp_MAXIMUM],
|
||||
NodeMap::const_iterator iter_end,
|
||||
const LayerMap& layer_map,
|
||||
int64_t start, int64_t stop,
|
||||
double& max_time,
|
||||
double& min_time,
|
||||
bool reverse_order = false)
|
||||
|
@ -2443,21 +2481,21 @@ private:
|
|||
KeyFrameListList rotation;
|
||||
|
||||
if(chain[TransformationComp_Scaling] != iter_end) {
|
||||
scaling = GetKeyframeList((*chain[TransformationComp_Scaling]).second);
|
||||
scaling = GetKeyframeList((*chain[TransformationComp_Scaling]).second, start, stop);
|
||||
}
|
||||
else {
|
||||
def_scale = PropertyGet(props,"Lcl Scaling",aiVector3D(1.f,1.f,1.f));
|
||||
}
|
||||
|
||||
if(chain[TransformationComp_Translation] != iter_end) {
|
||||
translation = GetKeyframeList((*chain[TransformationComp_Translation]).second);
|
||||
translation = GetKeyframeList((*chain[TransformationComp_Translation]).second, start, stop);
|
||||
}
|
||||
else {
|
||||
def_translate = PropertyGet(props,"Lcl Translation",aiVector3D(0.f,0.f,0.f));
|
||||
}
|
||||
|
||||
if(chain[TransformationComp_Rotation] != iter_end) {
|
||||
rotation = GetKeyframeList((*chain[TransformationComp_Rotation]).second);
|
||||
rotation = GetKeyframeList((*chain[TransformationComp_Rotation]).second, start, stop);
|
||||
}
|
||||
else {
|
||||
def_rot = EulerToQuaternion(PropertyGet(props,"Lcl Rotation",aiVector3D(0.f,0.f,0.f)),
|
||||
|
@ -2507,6 +2545,7 @@ private:
|
|||
if(chain[TransformationComp_Scaling] != iter_end) {
|
||||
ConvertScaleKeys(na, (*chain[TransformationComp_Scaling]).second,
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time);
|
||||
}
|
||||
|
@ -2522,6 +2561,7 @@ private:
|
|||
if(chain[TransformationComp_Rotation] != iter_end) {
|
||||
ConvertRotationKeys(na, (*chain[TransformationComp_Rotation]).second,
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time,
|
||||
target.RotationOrder());
|
||||
|
@ -2539,6 +2579,7 @@ private:
|
|||
if(chain[TransformationComp_Translation] != iter_end) {
|
||||
ConvertTranslationKeys(na, (*chain[TransformationComp_Translation]).second,
|
||||
layer_map,
|
||||
start, stop,
|
||||
max_time,
|
||||
min_time);
|
||||
}
|
||||
|
@ -2558,17 +2599,21 @@ private:
|
|||
|
||||
|
||||
// key (time), value, mapto (component index)
|
||||
typedef boost::tuple< const KeyTimeList*, const KeyValueList*, unsigned int > KeyFrameList;
|
||||
typedef boost::tuple<boost::shared_ptr<KeyTimeList>, boost::shared_ptr<KeyValueList>, unsigned int > KeyFrameList;
|
||||
typedef std::vector<KeyFrameList> KeyFrameListList;
|
||||
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
KeyFrameListList GetKeyframeList(const std::vector<const AnimationCurveNode*>& nodes)
|
||||
KeyFrameListList GetKeyframeList(const std::vector<const AnimationCurveNode*>& nodes, int64_t start, int64_t stop)
|
||||
{
|
||||
KeyFrameListList inputs;
|
||||
inputs.reserve(nodes.size()*3);
|
||||
|
||||
//give some breathing room for rounding errors
|
||||
int64_t adj_start = start - 10000;
|
||||
int64_t adj_stop = stop + 10000;
|
||||
|
||||
BOOST_FOREACH(const AnimationCurveNode* node, nodes) {
|
||||
ai_assert(node);
|
||||
|
||||
|
@ -2593,7 +2638,23 @@ private:
|
|||
const AnimationCurve* const curve = kv.second;
|
||||
ai_assert(curve->GetKeys().size() == curve->GetValues().size() && curve->GetKeys().size());
|
||||
|
||||
inputs.push_back(boost::make_tuple(&curve->GetKeys(), &curve->GetValues(), mapto));
|
||||
//get values within the start/stop time window
|
||||
boost::shared_ptr<KeyTimeList> Keys(new KeyTimeList());
|
||||
boost::shared_ptr<KeyValueList> Values(new KeyValueList());
|
||||
const int count = curve->GetKeys().size();
|
||||
Keys->reserve(count);
|
||||
Values->reserve(count);
|
||||
for (int n = 0; n < count; n++)
|
||||
{
|
||||
int64_t k = curve->GetKeys().at(n);
|
||||
if (k >= adj_start && k <= adj_stop)
|
||||
{
|
||||
Keys->push_back(k);
|
||||
Values->push_back(curve->GetValues().at(n));
|
||||
}
|
||||
}
|
||||
|
||||
inputs.push_back(boost::make_tuple(Keys, Values, mapto));
|
||||
}
|
||||
}
|
||||
return inputs; // pray for NRVO :-)
|
||||
|
@ -2623,7 +2684,7 @@ private:
|
|||
const size_t count = inputs.size();
|
||||
while(true) {
|
||||
|
||||
uint64_t min_tick = std::numeric_limits<uint64_t>::max();
|
||||
int64_t min_tick = std::numeric_limits<int64_t>::max();
|
||||
for (size_t i = 0; i < count; ++i) {
|
||||
const KeyFrameList& kfl = inputs[i];
|
||||
|
||||
|
@ -2632,7 +2693,7 @@ private:
|
|||
}
|
||||
}
|
||||
|
||||
if (min_tick == std::numeric_limits<uint64_t>::max()) {
|
||||
if (min_tick == std::numeric_limits<int64_t>::max()) {
|
||||
break;
|
||||
}
|
||||
keys.push_back(min_tick);
|
||||
|
@ -2832,6 +2893,7 @@ private:
|
|||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ConvertScaleKeys(aiNodeAnim* na, const std::vector<const AnimationCurveNode*>& nodes, const LayerMap& /*layers*/,
|
||||
int64_t start, int64_t stop,
|
||||
double& maxTime,
|
||||
double& minTime)
|
||||
{
|
||||
|
@ -2841,36 +2903,40 @@ private:
|
|||
// layers should be multiplied with each other). There is a FBX
|
||||
// property in the layer to specify the behaviour, though.
|
||||
|
||||
const KeyFrameListList& inputs = GetKeyframeList(nodes);
|
||||
const KeyFrameListList& inputs = GetKeyframeList(nodes, start, stop);
|
||||
const KeyTimeList& keys = GetKeyTimeList(inputs);
|
||||
|
||||
na->mNumScalingKeys = static_cast<unsigned int>(keys.size());
|
||||
na->mScalingKeys = new aiVectorKey[keys.size()];
|
||||
InterpolateKeys(na->mScalingKeys, keys, inputs, true, maxTime, minTime);
|
||||
if (keys.size() > 0)
|
||||
InterpolateKeys(na->mScalingKeys, keys, inputs, true, maxTime, minTime);
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ConvertTranslationKeys(aiNodeAnim* na, const std::vector<const AnimationCurveNode*>& nodes,
|
||||
const LayerMap& /*layers*/,
|
||||
int64_t start, int64_t stop,
|
||||
double& maxTime,
|
||||
double& minTime)
|
||||
{
|
||||
ai_assert(nodes.size());
|
||||
|
||||
// XXX see notes in ConvertScaleKeys()
|
||||
const KeyFrameListList& inputs = GetKeyframeList(nodes);
|
||||
const KeyFrameListList& inputs = GetKeyframeList(nodes, start, stop);
|
||||
const KeyTimeList& keys = GetKeyTimeList(inputs);
|
||||
|
||||
na->mNumPositionKeys = static_cast<unsigned int>(keys.size());
|
||||
na->mPositionKeys = new aiVectorKey[keys.size()];
|
||||
InterpolateKeys(na->mPositionKeys, keys, inputs, false, maxTime, minTime);
|
||||
if (keys.size() > 0)
|
||||
InterpolateKeys(na->mPositionKeys, keys, inputs, false, maxTime, minTime);
|
||||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
void ConvertRotationKeys(aiNodeAnim* na, const std::vector<const AnimationCurveNode*>& nodes,
|
||||
const LayerMap& /*layers*/,
|
||||
int64_t start, int64_t stop,
|
||||
double& maxTime,
|
||||
double& minTime,
|
||||
Model::RotOrder order)
|
||||
|
@ -2878,12 +2944,13 @@ private:
|
|||
ai_assert(nodes.size());
|
||||
|
||||
// XXX see notes in ConvertScaleKeys()
|
||||
const std::vector< KeyFrameList >& inputs = GetKeyframeList(nodes);
|
||||
const std::vector< KeyFrameList >& inputs = GetKeyframeList(nodes, start, stop);
|
||||
const KeyTimeList& keys = GetKeyTimeList(inputs);
|
||||
|
||||
na->mNumRotationKeys = static_cast<unsigned int>(keys.size());
|
||||
na->mRotationKeys = new aiQuatKey[keys.size()];
|
||||
InterpolateKeys(na->mRotationKeys, keys, inputs, false, maxTime, minTime, order);
|
||||
if (keys.size() > 0)
|
||||
InterpolateKeys(na->mRotationKeys, keys, inputs, false, maxTime, minTime, order);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -871,7 +871,7 @@ private:
|
|||
std::vector<unsigned int> mappings;
|
||||
};
|
||||
|
||||
typedef std::vector<uint64_t> KeyTimeList;
|
||||
typedef std::vector<int64_t> KeyTimeList;
|
||||
typedef std::vector<float> KeyValueList;
|
||||
|
||||
/** Represents a FBX animation curve (i.e. a 1-dimensional set of keyframes and values therefor) */
|
||||
|
@ -1015,10 +1015,10 @@ public:
|
|||
|
||||
public:
|
||||
|
||||
fbx_simple_property(LocalStart, uint64_t, 0L)
|
||||
fbx_simple_property(LocalStop, uint64_t, 0L)
|
||||
fbx_simple_property(ReferenceStart, uint64_t, 0L)
|
||||
fbx_simple_property(ReferenceStop, uint64_t, 0L)
|
||||
fbx_simple_property(LocalStart, int64_t, 0L)
|
||||
fbx_simple_property(LocalStop, int64_t, 0L)
|
||||
fbx_simple_property(ReferenceStart, int64_t, 0L)
|
||||
fbx_simple_property(ReferenceStop, int64_t, 0L)
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -431,6 +431,43 @@ int ParseTokenAsInt(const Token& t, const char*& err_out)
|
|||
}
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
int64_t ParseTokenAsInt64(const Token& t, const char*& err_out)
|
||||
{
|
||||
err_out = NULL;
|
||||
|
||||
if (t.Type() != TokenType_DATA) {
|
||||
err_out = "expected TOK_DATA token";
|
||||
return 0L;
|
||||
}
|
||||
|
||||
if (t.IsBinary())
|
||||
{
|
||||
const char* data = t.begin();
|
||||
if (data[0] != 'L') {
|
||||
err_out = "failed to parse Int64, unexpected data type";
|
||||
return 0L;
|
||||
}
|
||||
|
||||
BE_NCONST int64_t id = SafeParse<int64_t>(data + 1, t.end());
|
||||
AI_SWAP8(id);
|
||||
return id;
|
||||
}
|
||||
|
||||
// XXX: should use size_t here
|
||||
unsigned int length = static_cast<unsigned int>(t.end() - t.begin());
|
||||
ai_assert(length > 0);
|
||||
|
||||
const char* out;
|
||||
const int64_t id = strtoul10_64(t.begin(), &out, &length);
|
||||
if (out > t.end()) {
|
||||
err_out = "failed to parse Int64 (text)";
|
||||
return 0L;
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
std::string ParseTokenAsString(const Token& t, const char*& err_out)
|
||||
{
|
||||
|
@ -1062,6 +1099,63 @@ void ParseVectorDataArray(std::vector<uint64_t>& out, const Element& el)
|
|||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// read an array of int64_ts
|
||||
void ParseVectorDataArray(std::vector<int64_t>& out, const Element& el)
|
||||
{
|
||||
out.clear();
|
||||
const TokenList& tok = el.Tokens();
|
||||
if (tok.empty()) {
|
||||
ParseError("unexpected empty element", &el);
|
||||
}
|
||||
|
||||
if (tok[0]->IsBinary()) {
|
||||
const char* data = tok[0]->begin(), *end = tok[0]->end();
|
||||
|
||||
char type;
|
||||
uint32_t count;
|
||||
ReadBinaryDataArrayHead(data, end, type, count, el);
|
||||
|
||||
if (!count) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (type != 'l') {
|
||||
ParseError("expected long array (binary)", &el);
|
||||
}
|
||||
|
||||
std::vector<char> buff;
|
||||
ReadBinaryDataArray(type, count, data, end, buff, el);
|
||||
|
||||
ai_assert(data == end);
|
||||
ai_assert(buff.size() == count * 8);
|
||||
|
||||
out.reserve(count);
|
||||
|
||||
const int64_t* ip = reinterpret_cast<const int64_t*>(&buff[0]);
|
||||
for (unsigned int i = 0; i < count; ++i, ++ip) {
|
||||
BE_NCONST int64_t val = *ip;
|
||||
AI_SWAP8(val);
|
||||
out.push_back(val);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const size_t dim = ParseTokenAsDim(*tok[0]);
|
||||
|
||||
// see notes in ParseVectorDataArray()
|
||||
out.reserve(dim);
|
||||
|
||||
const Scope& scope = GetRequiredScope(el);
|
||||
const Element& a = GetRequiredElement(scope, "a", &el);
|
||||
|
||||
for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end;) {
|
||||
const int64_t ival = ParseTokenAsInt64(**it++);
|
||||
|
||||
out.push_back(ival);
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
aiMatrix4x4 ReadMatrix(const Element& element)
|
||||
|
@ -1205,6 +1299,18 @@ int ParseTokenAsInt(const Token& t)
|
|||
|
||||
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// wrapper around ParseTokenAsInt64() with ParseError handling
|
||||
int64_t ParseTokenAsInt64(const Token& t)
|
||||
{
|
||||
const char* err;
|
||||
const int64_t i = ParseTokenAsInt64(t, err);
|
||||
if (err) {
|
||||
ParseError(err, t);
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
} // !FBX
|
||||
} // !Assimp
|
||||
|
||||
|
|
|
@ -206,6 +206,7 @@ size_t ParseTokenAsDim(const Token& t, const char*& err_out);
|
|||
|
||||
float ParseTokenAsFloat(const Token& t, const char*& err_out);
|
||||
int ParseTokenAsInt(const Token& t, const char*& err_out);
|
||||
int64_t ParseTokenAsInt64(const Token& t, const char*& err_out);
|
||||
std::string ParseTokenAsString(const Token& t, const char*& err_out);
|
||||
|
||||
|
||||
|
@ -214,6 +215,7 @@ uint64_t ParseTokenAsID(const Token& t);
|
|||
size_t ParseTokenAsDim(const Token& t);
|
||||
float ParseTokenAsFloat(const Token& t);
|
||||
int ParseTokenAsInt(const Token& t);
|
||||
int64_t ParseTokenAsInt64(const Token& t);
|
||||
std::string ParseTokenAsString(const Token& t);
|
||||
|
||||
/* read data arrays */
|
||||
|
@ -224,6 +226,7 @@ void ParseVectorDataArray(std::vector<int>& out, const Element& el);
|
|||
void ParseVectorDataArray(std::vector<float>& out, const Element& el);
|
||||
void ParseVectorDataArray(std::vector<unsigned int>& out, const Element& el);
|
||||
void ParseVectorDataArray(std::vector<uint64_t>& out, const Element& e);
|
||||
void ParseVectorDataArray(std::vector<int64_t>& out, const Element& el);
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -88,9 +88,12 @@ Property* ReadTypedProperty(const Element& element)
|
|||
else if (!strcmp(cs, "int") || !strcmp(cs, "Int") || !strcmp(cs, "enum") || !strcmp(cs, "Enum")) {
|
||||
return new TypedProperty<int>(ParseTokenAsInt(*tok[4]));
|
||||
}
|
||||
else if (!strcmp(cs,"ULongLong")) {
|
||||
else if (!strcmp(cs, "ULongLong")) {
|
||||
return new TypedProperty<uint64_t>(ParseTokenAsID(*tok[4]));
|
||||
}
|
||||
else if (!strcmp(cs, "KTime")) {
|
||||
return new TypedProperty<int64_t>(ParseTokenAsInt64(*tok[4]));
|
||||
}
|
||||
else if (!strcmp(cs,"Vector3D") ||
|
||||
!strcmp(cs,"ColorRGB") ||
|
||||
!strcmp(cs,"Vector") ||
|
||||
|
@ -105,7 +108,7 @@ Property* ReadTypedProperty(const Element& element)
|
|||
ParseTokenAsFloat(*tok[6]))
|
||||
);
|
||||
}
|
||||
else if (!strcmp(cs,"double") || !strcmp(cs,"Number") || !strcmp(cs,"KTime") || !strcmp(cs,"Float") || !strcmp(cs,"FieldOfView")) {
|
||||
else if (!strcmp(cs,"double") || !strcmp(cs,"Number") || !strcmp(cs,"Float") || !strcmp(cs,"FieldOfView")) {
|
||||
return new TypedProperty<float>(ParseTokenAsFloat(*tok[4]));
|
||||
}
|
||||
return NULL;
|
||||
|
|
Loading…
Reference in New Issue