- ff=unix for all Python scripts in ./scripts to avoid shebang trouble under *nix.
git-svn-id: https://assimp.svn.sourceforge.net/svnroot/assimp/trunk@1048 67173fc5-114c-0410-ac8e-9d2fd5bffc1fpull/2/head
parent
a84eead533
commit
00be2f920b
|
@ -1,235 +1,235 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- Coding: UTF-8 -*-
|
# -*- Coding: UTF-8 -*-
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Open Asset Import Library (ASSIMP)
|
# Open Asset Import Library (ASSIMP)
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
#
|
#
|
||||||
# Copyright (c) 2006-2010, ASSIMP Development Team
|
# Copyright (c) 2006-2010, ASSIMP Development Team
|
||||||
#
|
#
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use of this software in source and binary forms,
|
# Redistribution and use of this software in source and binary forms,
|
||||||
# with or without modification, are permitted provided that the following
|
# with or without modification, are permitted provided that the following
|
||||||
# conditions are met:
|
# conditions are met:
|
||||||
#
|
#
|
||||||
# * Redistributions of source code must retain the above
|
# * Redistributions of source code must retain the above
|
||||||
# copyright notice, this list of conditions and the
|
# copyright notice, this list of conditions and the
|
||||||
# following disclaimer.
|
# following disclaimer.
|
||||||
#
|
#
|
||||||
# * Redistributions in binary form must reproduce the above
|
# * Redistributions in binary form must reproduce the above
|
||||||
# copyright notice, this list of conditions and the
|
# copyright notice, this list of conditions and the
|
||||||
# following disclaimer in the documentation and/or other
|
# following disclaimer in the documentation and/or other
|
||||||
# materials provided with the distribution.
|
# materials provided with the distribution.
|
||||||
#
|
#
|
||||||
# * Neither the name of the ASSIMP team, nor the names of its
|
# * Neither the name of the ASSIMP team, nor the names of its
|
||||||
# contributors may be used to endorse or promote products
|
# contributors may be used to endorse or promote products
|
||||||
# derived from this software without specific prior
|
# derived from this software without specific prior
|
||||||
# written permission of the ASSIMP Development Team.
|
# written permission of the ASSIMP Development Team.
|
||||||
#
|
#
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
"""Generate BlenderSceneGen.h and BlenderScene.cpp from the
|
"""Generate BlenderSceneGen.h and BlenderScene.cpp from the
|
||||||
data structures in BlenderScene.h to map from *any* DNA to
|
data structures in BlenderScene.h to map from *any* DNA to
|
||||||
*our* DNA"""
|
*our* DNA"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
inputfile = os.path.join("..","..","code","BlenderScene.h")
|
inputfile = os.path.join("..","..","code","BlenderScene.h")
|
||||||
outputfile_gen = os.path.join("..","..","code","BlenderSceneGen.h")
|
outputfile_gen = os.path.join("..","..","code","BlenderSceneGen.h")
|
||||||
outputfile_src = os.path.join("..","..","code","BlenderScene.cpp")
|
outputfile_src = os.path.join("..","..","code","BlenderScene.cpp")
|
||||||
|
|
||||||
template_gen = "BlenderSceneGen.h.template"
|
template_gen = "BlenderSceneGen.h.template"
|
||||||
template_src = "BlenderScene.cpp.template"
|
template_src = "BlenderScene.cpp.template"
|
||||||
|
|
||||||
|
|
||||||
Structure_Convert_decl = """
|
Structure_Convert_decl = """
|
||||||
template <> void Structure :: Convert<{a}> (
|
template <> void Structure :: Convert<{a}> (
|
||||||
{a}& dest,
|
{a}& dest,
|
||||||
const FileDatabase& db
|
const FileDatabase& db
|
||||||
) const
|
) const
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
Structure_Convert_ptrdecl = """
|
Structure_Convert_ptrdecl = """
|
||||||
ReadFieldPtr<{policy}>({destcast}dest.{name_canonical},"{name_dna}",db);"""
|
ReadFieldPtr<{policy}>({destcast}dest.{name_canonical},"{name_dna}",db);"""
|
||||||
|
|
||||||
Structure_Convert_arraydecl = """
|
Structure_Convert_arraydecl = """
|
||||||
ReadFieldArray<{policy}>({destcast}dest.{name_canonical},"{name_dna}",db);"""
|
ReadFieldArray<{policy}>({destcast}dest.{name_canonical},"{name_dna}",db);"""
|
||||||
|
|
||||||
Structure_Convert_arraydecl2d = """
|
Structure_Convert_arraydecl2d = """
|
||||||
ReadFieldArray2<{policy}>({destcast}dest.{name_canonical},"{name_dna}",db);"""
|
ReadFieldArray2<{policy}>({destcast}dest.{name_canonical},"{name_dna}",db);"""
|
||||||
|
|
||||||
Structure_Convert_normal = """
|
Structure_Convert_normal = """
|
||||||
ReadField<{policy}>({destcast}dest.{name_canonical},"{name_dna}",db);"""
|
ReadField<{policy}>({destcast}dest.{name_canonical},"{name_dna}",db);"""
|
||||||
|
|
||||||
|
|
||||||
DNA_RegisterConverters_decl = """
|
DNA_RegisterConverters_decl = """
|
||||||
void DNA::RegisterConverters() """
|
void DNA::RegisterConverters() """
|
||||||
|
|
||||||
DNA_RegisterConverters_add = """
|
DNA_RegisterConverters_add = """
|
||||||
converters["{a}"] = DNA::FactoryPair( &Structure::Allocate<{a}>, &Structure::Convert<{a}> );"""
|
converters["{a}"] = DNA::FactoryPair( &Structure::Allocate<{a}>, &Structure::Convert<{a}> );"""
|
||||||
|
|
||||||
|
|
||||||
map_policy = {
|
map_policy = {
|
||||||
"" : "ErrorPolicy_Igno"
|
"" : "ErrorPolicy_Igno"
|
||||||
,"IGNO" : "ErrorPolicy_Igno"
|
,"IGNO" : "ErrorPolicy_Igno"
|
||||||
,"WARN" : "ErrorPolicy_Warn"
|
,"WARN" : "ErrorPolicy_Warn"
|
||||||
,"FAIL" : "ErrorPolicy_Fail"
|
,"FAIL" : "ErrorPolicy_Fail"
|
||||||
}
|
}
|
||||||
|
|
||||||
#
|
#
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
# Parse structure definitions from BlenderScene.h
|
# Parse structure definitions from BlenderScene.h
|
||||||
input = open(inputfile,"rt").read()
|
input = open(inputfile,"rt").read()
|
||||||
|
|
||||||
flags = re.ASCII|re.DOTALL|re.MULTILINE
|
flags = re.ASCII|re.DOTALL|re.MULTILINE
|
||||||
#stripcoms = re.compile(r"/\*(.*?)*\/",flags)
|
#stripcoms = re.compile(r"/\*(.*?)*\/",flags)
|
||||||
getstruct = re.compile(r"struct\s+(\w+?)\s*(:\s*ElemBase)?\s*\{(.*?)^\}\s*;",flags)
|
getstruct = re.compile(r"struct\s+(\w+?)\s*(:\s*ElemBase)?\s*\{(.*?)^\}\s*;",flags)
|
||||||
getsmartx = re.compile(r"(std\s*::\s*)?(vector)\s*<\s*(boost\s*::\s*)?shared_(ptr)\s*<\s*(\w+)\s*>\s*>\s*",flags)
|
getsmartx = re.compile(r"(std\s*::\s*)?(vector)\s*<\s*(boost\s*::\s*)?shared_(ptr)\s*<\s*(\w+)\s*>\s*>\s*",flags)
|
||||||
getsmartp = re.compile(r"(boost\s*::\s*)?shared_(ptr)\s*<\s*(\w+)\s*>\s*",flags)
|
getsmartp = re.compile(r"(boost\s*::\s*)?shared_(ptr)\s*<\s*(\w+)\s*>\s*",flags)
|
||||||
getsmarta = re.compile(r"(std\s*::\s*)?(vector)\s*<\s*(\w+)\s*>\s*",flags)
|
getsmarta = re.compile(r"(std\s*::\s*)?(vector)\s*<\s*(\w+)\s*>\s*",flags)
|
||||||
getpolicy = re.compile(r"\s*(WARN|FAIL|IGNO)",flags)
|
getpolicy = re.compile(r"\s*(WARN|FAIL|IGNO)",flags)
|
||||||
stripenum = re.compile(r"enum\s+(\w+)\s*{.*?\}\s*;",flags)
|
stripenum = re.compile(r"enum\s+(\w+)\s*{.*?\}\s*;",flags)
|
||||||
|
|
||||||
assert getsmartx and getsmartp and getsmarta and getpolicy and stripenum
|
assert getsmartx and getsmartp and getsmarta and getpolicy and stripenum
|
||||||
|
|
||||||
enums = set()
|
enums = set()
|
||||||
#re.sub(stripcoms," ",input)
|
#re.sub(stripcoms," ",input)
|
||||||
#print(input)
|
#print(input)
|
||||||
|
|
||||||
hits = {}
|
hits = {}
|
||||||
while 1:
|
while 1:
|
||||||
match = re.search(getstruct,input)
|
match = re.search(getstruct,input)
|
||||||
if match is None:
|
if match is None:
|
||||||
break
|
break
|
||||||
|
|
||||||
tmp = match.groups()[2]
|
tmp = match.groups()[2]
|
||||||
while 1:
|
while 1:
|
||||||
match2 = re.search(stripenum,tmp)
|
match2 = re.search(stripenum,tmp)
|
||||||
if match2 is None:
|
if match2 is None:
|
||||||
break
|
break
|
||||||
tmp = tmp[match2.end():]
|
tmp = tmp[match2.end():]
|
||||||
enums.add(match2.groups()[0])
|
enums.add(match2.groups()[0])
|
||||||
|
|
||||||
hits[match.groups()[0]] = list(
|
hits[match.groups()[0]] = list(
|
||||||
filter(lambda x:x[:2] != "//" and len(x),
|
filter(lambda x:x[:2] != "//" and len(x),
|
||||||
map(str.strip,
|
map(str.strip,
|
||||||
re.sub(stripenum," ",match.groups()[2]).split(";")
|
re.sub(stripenum," ",match.groups()[2]).split(";")
|
||||||
)))
|
)))
|
||||||
|
|
||||||
input = input[match.end():]
|
input = input[match.end():]
|
||||||
|
|
||||||
[print ("Enum: "+e) for e in enums]
|
[print ("Enum: "+e) for e in enums]
|
||||||
for k,v in hits.items():
|
for k,v in hits.items():
|
||||||
out = []
|
out = []
|
||||||
for line in v:
|
for line in v:
|
||||||
|
|
||||||
policy = "IGNO"
|
policy = "IGNO"
|
||||||
py = re.search(getpolicy,line)
|
py = re.search(getpolicy,line)
|
||||||
if not py is None:
|
if not py is None:
|
||||||
policy = py.groups()[0]
|
policy = py.groups()[0]
|
||||||
line = re.sub(getpolicy,"",line)
|
line = re.sub(getpolicy,"",line)
|
||||||
|
|
||||||
ty = re.match(getsmartx,line) or re.match(getsmartp,line) or re.match(getsmarta,line)
|
ty = re.match(getsmartx,line) or re.match(getsmartp,line) or re.match(getsmarta,line)
|
||||||
if ty is None:
|
if ty is None:
|
||||||
ty = line.split(None,1)[0]
|
ty = line.split(None,1)[0]
|
||||||
else:
|
else:
|
||||||
if ty.groups()[1] == "ptr":
|
if ty.groups()[1] == "ptr":
|
||||||
ty = ty.groups()[2] + "*"
|
ty = ty.groups()[2] + "*"
|
||||||
elif ty.groups()[1] == "vector":
|
elif ty.groups()[1] == "vector":
|
||||||
ty = ty.groups()[-1] + ("*" if len(ty.groups()) == 3 else "**")
|
ty = ty.groups()[-1] + ("*" if len(ty.groups()) == 3 else "**")
|
||||||
|
|
||||||
#print(line)
|
#print(line)
|
||||||
sp = line.split(',')
|
sp = line.split(',')
|
||||||
out.append((ty,sp[0].split(None)[-1].strip(),policy))
|
out.append((ty,sp[0].split(None)[-1].strip(),policy))
|
||||||
for m in sp[1:]:
|
for m in sp[1:]:
|
||||||
out.append((ty,m.strip(),policy))
|
out.append((ty,m.strip(),policy))
|
||||||
|
|
||||||
v[:] = out
|
v[:] = out
|
||||||
print("Structure {0}".format(k))
|
print("Structure {0}".format(k))
|
||||||
[print("\t"+"\t".join(elem)) for elem in out]
|
[print("\t"+"\t".join(elem)) for elem in out]
|
||||||
print("")
|
print("")
|
||||||
|
|
||||||
|
|
||||||
output = open(outputfile_gen,"wt")
|
output = open(outputfile_gen,"wt")
|
||||||
templt = open(template_gen,"rt").read()
|
templt = open(template_gen,"rt").read()
|
||||||
s = ""
|
s = ""
|
||||||
|
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
# Structure::Convert<T> declarations for all supported structures
|
# Structure::Convert<T> declarations for all supported structures
|
||||||
for k,v in hits.items():
|
for k,v in hits.items():
|
||||||
s += Structure_Convert_decl.format(a=k)+";\n";
|
s += Structure_Convert_decl.format(a=k)+";\n";
|
||||||
output.write(templt.replace("<HERE>",s))
|
output.write(templt.replace("<HERE>",s))
|
||||||
|
|
||||||
output = open(outputfile_src,"wt")
|
output = open(outputfile_src,"wt")
|
||||||
templt = open(template_src,"rt").read()
|
templt = open(template_src,"rt").read()
|
||||||
s = ""
|
s = ""
|
||||||
|
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
# Structure::Convert<T> definitions for all supported structures
|
# Structure::Convert<T> definitions for all supported structures
|
||||||
for k,v in hits.items():
|
for k,v in hits.items():
|
||||||
s += "//" + "-"*80 + Structure_Convert_decl.format(a=k)+ "{ \n";
|
s += "//" + "-"*80 + Structure_Convert_decl.format(a=k)+ "{ \n";
|
||||||
|
|
||||||
for type, name, policy in v:
|
for type, name, policy in v:
|
||||||
splits = name.split("[",1)
|
splits = name.split("[",1)
|
||||||
name_canonical = splits[0]
|
name_canonical = splits[0]
|
||||||
#array_part = "" if len(splits)==1 else "["+splits[1]
|
#array_part = "" if len(splits)==1 else "["+splits[1]
|
||||||
ptr_decl = "*"*type.count("*")
|
ptr_decl = "*"*type.count("*")
|
||||||
name_dna = ptr_decl+name_canonical #+array_part
|
name_dna = ptr_decl+name_canonical #+array_part
|
||||||
|
|
||||||
#required = "false"
|
#required = "false"
|
||||||
policy = map_policy[policy]
|
policy = map_policy[policy]
|
||||||
destcast = "(int&)" if type in enums else ""
|
destcast = "(int&)" if type in enums else ""
|
||||||
|
|
||||||
# POINTER
|
# POINTER
|
||||||
if ptr_decl:
|
if ptr_decl:
|
||||||
s += Structure_Convert_ptrdecl.format(**locals())
|
s += Structure_Convert_ptrdecl.format(**locals())
|
||||||
# ARRAY MEMBER
|
# ARRAY MEMBER
|
||||||
elif name.count('[')==1:
|
elif name.count('[')==1:
|
||||||
s += Structure_Convert_arraydecl.format(**locals())
|
s += Structure_Convert_arraydecl.format(**locals())
|
||||||
elif name.count('[')==2:
|
elif name.count('[')==2:
|
||||||
s += Structure_Convert_arraydecl2d.format(**locals())
|
s += Structure_Convert_arraydecl2d.format(**locals())
|
||||||
# NORMAL MEMBER
|
# NORMAL MEMBER
|
||||||
else:
|
else:
|
||||||
s += Structure_Convert_normal.format(**locals())
|
s += Structure_Convert_normal.format(**locals())
|
||||||
|
|
||||||
s += "\n\n\tdb.reader->IncPtr(size);\n}\n\n"
|
s += "\n\n\tdb.reader->IncPtr(size);\n}\n\n"
|
||||||
|
|
||||||
|
|
||||||
# -----------------------------------------------------------------------
|
# -----------------------------------------------------------------------
|
||||||
# DNA::RegisterConverters - collect all available converter functions
|
# DNA::RegisterConverters - collect all available converter functions
|
||||||
# in a std::map<name,converter_proc>
|
# in a std::map<name,converter_proc>
|
||||||
#s += "#if 0\n"
|
#s += "#if 0\n"
|
||||||
s += "//" + "-"*80 + DNA_RegisterConverters_decl + "{\n"
|
s += "//" + "-"*80 + DNA_RegisterConverters_decl + "{\n"
|
||||||
for k,v in hits.items():
|
for k,v in hits.items():
|
||||||
s += DNA_RegisterConverters_add.format(a=k)
|
s += DNA_RegisterConverters_add.format(a=k)
|
||||||
|
|
||||||
s += "\n}\n"
|
s += "\n}\n"
|
||||||
#s += "#endif\n"
|
#s += "#endif\n"
|
||||||
|
|
||||||
output.write(templt.replace("<HERE>",s))
|
output.write(templt.replace("<HERE>",s))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,283 +1,283 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- Coding: UTF-8 -*-
|
# -*- Coding: UTF-8 -*-
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Open Asset Import Library (ASSIMP)
|
# Open Asset Import Library (ASSIMP)
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
#
|
#
|
||||||
# Copyright (c) 2006-2010, ASSIMP Development Team
|
# Copyright (c) 2006-2010, ASSIMP Development Team
|
||||||
#
|
#
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use of this software in source and binary forms,
|
# Redistribution and use of this software in source and binary forms,
|
||||||
# with or without modification, are permitted provided that the following
|
# with or without modification, are permitted provided that the following
|
||||||
# conditions are met:
|
# conditions are met:
|
||||||
#
|
#
|
||||||
# * Redistributions of source code must retain the above
|
# * Redistributions of source code must retain the above
|
||||||
# copyright notice, this list of conditions and the
|
# copyright notice, this list of conditions and the
|
||||||
# following disclaimer.
|
# following disclaimer.
|
||||||
#
|
#
|
||||||
# * Redistributions in binary form must reproduce the above
|
# * Redistributions in binary form must reproduce the above
|
||||||
# copyright notice, this list of conditions and the
|
# copyright notice, this list of conditions and the
|
||||||
# following disclaimer in the documentation and/or other
|
# following disclaimer in the documentation and/or other
|
||||||
# materials provided with the distribution.
|
# materials provided with the distribution.
|
||||||
#
|
#
|
||||||
# * Neither the name of the ASSIMP team, nor the names of its
|
# * Neither the name of the ASSIMP team, nor the names of its
|
||||||
# contributors may be used to endorse or promote products
|
# contributors may be used to endorse or promote products
|
||||||
# derived from this software without specific prior
|
# derived from this software without specific prior
|
||||||
# written permission of the ASSIMP Development Team.
|
# written permission of the ASSIMP Development Team.
|
||||||
#
|
#
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
"""Generate the C++ glue code needed to map EXPRESS to C++"""
|
"""Generate the C++ glue code needed to map EXPRESS to C++"""
|
||||||
|
|
||||||
import sys, os, re
|
import sys, os, re
|
||||||
|
|
||||||
input_template_h = 'IFCReaderGen.h.template'
|
input_template_h = 'IFCReaderGen.h.template'
|
||||||
input_template_cpp = 'IFCReaderGen.cpp.template'
|
input_template_cpp = 'IFCReaderGen.cpp.template'
|
||||||
|
|
||||||
output_file_h = os.path.join('..','..','code','IFCReaderGen.h')
|
output_file_h = os.path.join('..','..','code','IFCReaderGen.h')
|
||||||
output_file_cpp = os.path.join('..','..','code','IFCReaderGen.cpp')
|
output_file_cpp = os.path.join('..','..','code','IFCReaderGen.cpp')
|
||||||
|
|
||||||
template_entity_predef = '\tstruct {entity};\n'
|
template_entity_predef = '\tstruct {entity};\n'
|
||||||
template_entity_predef_ni = '\ttypedef NotImplemented {entity}; // (not currently used by Assimp)\n'
|
template_entity_predef_ni = '\ttypedef NotImplemented {entity}; // (not currently used by Assimp)\n'
|
||||||
template_entity = r"""
|
template_entity = r"""
|
||||||
|
|
||||||
// C++ wrapper for {entity}
|
// C++ wrapper for {entity}
|
||||||
struct {entity} : {parent} ObjectHelper<{entity},{argcnt}> {{ {entity}() : Object("{entity}") {{}}
|
struct {entity} : {parent} ObjectHelper<{entity},{argcnt}> {{ {entity}() : Object("{entity}") {{}}
|
||||||
{fields}
|
{fields}
|
||||||
}};"""
|
}};"""
|
||||||
|
|
||||||
template_entity_ni = ''
|
template_entity_ni = ''
|
||||||
|
|
||||||
template_type = r"""
|
template_type = r"""
|
||||||
// C++ wrapper type for {type}
|
// C++ wrapper type for {type}
|
||||||
typedef {real_type} {type};"""
|
typedef {real_type} {type};"""
|
||||||
|
|
||||||
template_stub_decl = '\tDECL_CONV_STUB({type});\n'
|
template_stub_decl = '\tDECL_CONV_STUB({type});\n'
|
||||||
template_schema = '\t\tSchemaEntry("{normalized_name}",&STEP::ObjectHelper<{type},{argcnt}>::Construct )\n'
|
template_schema = '\t\tSchemaEntry("{normalized_name}",&STEP::ObjectHelper<{type},{argcnt}>::Construct )\n'
|
||||||
template_schema_type = '\t\tSchemaEntry("{normalized_name}",NULL )\n'
|
template_schema_type = '\t\tSchemaEntry("{normalized_name}",NULL )\n'
|
||||||
template_converter = r"""
|
template_converter = r"""
|
||||||
// -----------------------------------------------------------------------------------------------------------
|
// -----------------------------------------------------------------------------------------------------------
|
||||||
template <> size_t GenericFill<{type}>(const DB& db, const LIST& params, {type}* in)
|
template <> size_t GenericFill<{type}>(const DB& db, const LIST& params, {type}* in)
|
||||||
{{
|
{{
|
||||||
{contents}
|
{contents}
|
||||||
}}"""
|
}}"""
|
||||||
|
|
||||||
template_converter_prologue_a = '\tsize_t base = GenericFill(db,params,static_cast<{parent}*>(in));\n'
|
template_converter_prologue_a = '\tsize_t base = GenericFill(db,params,static_cast<{parent}*>(in));\n'
|
||||||
template_converter_prologue_b = '\tsize_t base = 0;\n'
|
template_converter_prologue_b = '\tsize_t base = 0;\n'
|
||||||
template_converter_check_argcnt = '\tif (params.GetSize() < {max_arg}) {{ throw STEP::TypeError("expected {max_arg} arguments to {name}"); }}'
|
template_converter_check_argcnt = '\tif (params.GetSize() < {max_arg}) {{ throw STEP::TypeError("expected {max_arg} arguments to {name}"); }}'
|
||||||
template_converter_code_per_field = r""" do {{ // convert the '{fieldname}' argument
|
template_converter_code_per_field = r""" do {{ // convert the '{fieldname}' argument
|
||||||
boost::shared_ptr<const DataType> arg = params[base++];{handle_unset}{convert}
|
boost::shared_ptr<const DataType> arg = params[base++];{handle_unset}{convert}
|
||||||
}} while(0);
|
}} while(0);
|
||||||
"""
|
"""
|
||||||
template_allow_optional = r"""
|
template_allow_optional = r"""
|
||||||
if (dynamic_cast<const UNSET*>(&*arg)) break;"""
|
if (dynamic_cast<const UNSET*>(&*arg)) break;"""
|
||||||
template_allow_derived = r"""
|
template_allow_derived = r"""
|
||||||
if (dynamic_cast<const ISDERIVED*>(&*arg)) {{ in->ObjectHelper<Assimp::IFC::{type},{argcnt}>::aux_is_derived[{argnum}]=true; break; }}"""
|
if (dynamic_cast<const ISDERIVED*>(&*arg)) {{ in->ObjectHelper<Assimp::IFC::{type},{argcnt}>::aux_is_derived[{argnum}]=true; break; }}"""
|
||||||
template_convert_single = r"""
|
template_convert_single = r"""
|
||||||
try {{ GenericConvert( in->{name}, arg, db ); break; }}
|
try {{ GenericConvert( in->{name}, arg, db ); break; }}
|
||||||
catch (const TypeError& t) {{ throw TypeError(t.what() + std::string(" - expected argument {argnum} to {classname} to be a `{full_type}`")); }}"""
|
catch (const TypeError& t) {{ throw TypeError(t.what() + std::string(" - expected argument {argnum} to {classname} to be a `{full_type}`")); }}"""
|
||||||
|
|
||||||
template_converter_ommitted = '// this data structure is not used yet, so there is no code generated to fill its members\n'
|
template_converter_ommitted = '// this data structure is not used yet, so there is no code generated to fill its members\n'
|
||||||
template_converter_epilogue = '\treturn base;'
|
template_converter_epilogue = '\treturn base;'
|
||||||
|
|
||||||
import ExpressReader
|
import ExpressReader
|
||||||
|
|
||||||
|
|
||||||
def get_list_bounds(collection_spec):
|
def get_list_bounds(collection_spec):
|
||||||
start,end = [(int(n) if n!='?' else 0) for n in re.findall(r'(\d+|\?)',collection_spec)]
|
start,end = [(int(n) if n!='?' else 0) for n in re.findall(r'(\d+|\?)',collection_spec)]
|
||||||
return start,end
|
return start,end
|
||||||
|
|
||||||
def get_cpp_type(field,schema):
|
def get_cpp_type(field,schema):
|
||||||
isobjref = field.type in schema.entities
|
isobjref = field.type in schema.entities
|
||||||
base = field.type
|
base = field.type
|
||||||
if isobjref:
|
if isobjref:
|
||||||
base = 'Lazy< '+(base if base in schema.whitelist else 'NotImplemented')+' >'
|
base = 'Lazy< '+(base if base in schema.whitelist else 'NotImplemented')+' >'
|
||||||
if field.collection:
|
if field.collection:
|
||||||
start,end = get_list_bounds(field.collection)
|
start,end = get_list_bounds(field.collection)
|
||||||
base = 'ListOf< {0}, {1}, {2} >'.format(base,start,end)
|
base = 'ListOf< {0}, {1}, {2} >'.format(base,start,end)
|
||||||
if not isobjref:
|
if not isobjref:
|
||||||
base += '::Out'
|
base += '::Out'
|
||||||
if field.optional:
|
if field.optional:
|
||||||
base = 'Maybe< '+base+' >'
|
base = 'Maybe< '+base+' >'
|
||||||
|
|
||||||
return base
|
return base
|
||||||
|
|
||||||
def generate_fields(entity,schema):
|
def generate_fields(entity,schema):
|
||||||
fields = []
|
fields = []
|
||||||
for e in entity.members:
|
for e in entity.members:
|
||||||
fields.append('\t\t{type} {name};'.format(type=get_cpp_type(e,schema),name=e.name))
|
fields.append('\t\t{type} {name};'.format(type=get_cpp_type(e,schema),name=e.name))
|
||||||
return '\n'.join(fields)
|
return '\n'.join(fields)
|
||||||
|
|
||||||
def handle_unset_args(field,entity,schema,argnum):
|
def handle_unset_args(field,entity,schema,argnum):
|
||||||
n = ''
|
n = ''
|
||||||
# if someone derives from this class, check for derived fields.
|
# if someone derives from this class, check for derived fields.
|
||||||
if any(entity.name==e.parent for e in schema.entities.values()):
|
if any(entity.name==e.parent for e in schema.entities.values()):
|
||||||
n += template_allow_derived.format(type=entity.name,argcnt=len(entity.members),argnum=argnum)
|
n += template_allow_derived.format(type=entity.name,argcnt=len(entity.members),argnum=argnum)
|
||||||
|
|
||||||
if not field.optional:
|
if not field.optional:
|
||||||
return n+''
|
return n+''
|
||||||
return n+template_allow_optional.format()
|
return n+template_allow_optional.format()
|
||||||
|
|
||||||
def get_single_conversion(field,schema,argnum=0,classname='?'):
|
def get_single_conversion(field,schema,argnum=0,classname='?'):
|
||||||
typen = field.type
|
typen = field.type
|
||||||
name = field.name
|
name = field.name
|
||||||
if field.collection:
|
if field.collection:
|
||||||
typen = 'LIST'
|
typen = 'LIST'
|
||||||
return template_convert_single.format(type=typen,name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
|
return template_convert_single.format(type=typen,name=name,argnum=argnum,classname=classname,full_type=field.fullspec)
|
||||||
|
|
||||||
def count_args_up(entity,schema):
|
def count_args_up(entity,schema):
|
||||||
return len(entity.members) + (count_args_up(schema.entities[entity.parent],schema) if entity.parent else 0)
|
return len(entity.members) + (count_args_up(schema.entities[entity.parent],schema) if entity.parent else 0)
|
||||||
|
|
||||||
def resolve_base_type(base,schema):
|
def resolve_base_type(base,schema):
|
||||||
if base in ('INTEGER','REAL','STRING','ENUMERATION','BOOLEAN','NUMBER', 'SELECT','LOGICAL'):
|
if base in ('INTEGER','REAL','STRING','ENUMERATION','BOOLEAN','NUMBER', 'SELECT','LOGICAL'):
|
||||||
return base
|
return base
|
||||||
if base in schema.types:
|
if base in schema.types:
|
||||||
return resolve_base_type(schema.types[base].equals,schema)
|
return resolve_base_type(schema.types[base].equals,schema)
|
||||||
print(base)
|
print(base)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def gen_type_struct(typen,schema):
|
def gen_type_struct(typen,schema):
|
||||||
base = resolve_base_type(typen.equals,schema)
|
base = resolve_base_type(typen.equals,schema)
|
||||||
if not base:
|
if not base:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
if typen.aggregate:
|
if typen.aggregate:
|
||||||
start,end = get_list_bounds(typen.aggregate)
|
start,end = get_list_bounds(typen.aggregate)
|
||||||
base = 'ListOf< {0}, {1}, {2} >'.format(base,start,end)
|
base = 'ListOf< {0}, {1}, {2} >'.format(base,start,end)
|
||||||
|
|
||||||
return template_type.format(type=typen.name,real_type=base)
|
return template_type.format(type=typen.name,real_type=base)
|
||||||
|
|
||||||
def gen_converter(entity,schema):
|
def gen_converter(entity,schema):
|
||||||
max_arg = count_args_up(entity,schema)
|
max_arg = count_args_up(entity,schema)
|
||||||
arg_idx = arg_idx_ofs = max_arg - len(entity.members)
|
arg_idx = arg_idx_ofs = max_arg - len(entity.members)
|
||||||
|
|
||||||
code = template_converter_prologue_a.format(parent=entity.parent) if entity.parent else template_converter_prologue_b
|
code = template_converter_prologue_a.format(parent=entity.parent) if entity.parent else template_converter_prologue_b
|
||||||
if entity.name in schema.blacklist_partial:
|
if entity.name in schema.blacklist_partial:
|
||||||
return code+template_converter_ommitted+template_converter_epilogue;
|
return code+template_converter_ommitted+template_converter_epilogue;
|
||||||
|
|
||||||
if max_arg > 0:
|
if max_arg > 0:
|
||||||
code +=template_converter_check_argcnt.format(max_arg=max_arg,name=entity.name)
|
code +=template_converter_check_argcnt.format(max_arg=max_arg,name=entity.name)
|
||||||
|
|
||||||
for field in entity.members:
|
for field in entity.members:
|
||||||
code += template_converter_code_per_field.format(fieldname=field.name,
|
code += template_converter_code_per_field.format(fieldname=field.name,
|
||||||
handle_unset=handle_unset_args(field,entity,schema,arg_idx-arg_idx_ofs),
|
handle_unset=handle_unset_args(field,entity,schema,arg_idx-arg_idx_ofs),
|
||||||
convert=get_single_conversion(field,schema,arg_idx,entity.name))
|
convert=get_single_conversion(field,schema,arg_idx,entity.name))
|
||||||
|
|
||||||
arg_idx += 1
|
arg_idx += 1
|
||||||
return code+template_converter_epilogue
|
return code+template_converter_epilogue
|
||||||
|
|
||||||
def get_base_classes(e,schema):
|
def get_base_classes(e,schema):
|
||||||
def addit(e,out):
|
def addit(e,out):
|
||||||
if e.parent:
|
if e.parent:
|
||||||
out.append(e.parent)
|
out.append(e.parent)
|
||||||
addit(schema.entities[e.parent],out)
|
addit(schema.entities[e.parent],out)
|
||||||
res = []
|
res = []
|
||||||
addit(e,res)
|
addit(e,res)
|
||||||
return list(reversed(res))
|
return list(reversed(res))
|
||||||
|
|
||||||
def get_derived(e,schema):
|
def get_derived(e,schema):
|
||||||
def get_deriv(e,out): # bit slow, but doesn't matter here
|
def get_deriv(e,out): # bit slow, but doesn't matter here
|
||||||
s = [ee for ee in schema.entities.values() if ee.parent == e.name]
|
s = [ee for ee in schema.entities.values() if ee.parent == e.name]
|
||||||
for sel in s:
|
for sel in s:
|
||||||
out.append(sel.name)
|
out.append(sel.name)
|
||||||
get_deriv(sel,out)
|
get_deriv(sel,out)
|
||||||
res = []
|
res = []
|
||||||
get_deriv(e,res)
|
get_deriv(e,res)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
def get_hierarchy(e,schema):
|
def get_hierarchy(e,schema):
|
||||||
return get_derived(e.schema)+[e.name]+get_base_classes(e,schema)
|
return get_derived(e.schema)+[e.name]+get_base_classes(e,schema)
|
||||||
|
|
||||||
def sort_entity_list(schema):
|
def sort_entity_list(schema):
|
||||||
deps = []
|
deps = []
|
||||||
entities = schema.entities
|
entities = schema.entities
|
||||||
for e in entities.values():
|
for e in entities.values():
|
||||||
deps += get_base_classes(e,schema)+[e.name]
|
deps += get_base_classes(e,schema)+[e.name]
|
||||||
|
|
||||||
checked = []
|
checked = []
|
||||||
for e in deps:
|
for e in deps:
|
||||||
if e not in checked:
|
if e not in checked:
|
||||||
checked.append(e)
|
checked.append(e)
|
||||||
return [entities[e] for e in checked]
|
return [entities[e] for e in checked]
|
||||||
|
|
||||||
def work(filename):
|
def work(filename):
|
||||||
schema = ExpressReader.read(filename,silent=True)
|
schema = ExpressReader.read(filename,silent=True)
|
||||||
entities, stub_decls, schema_table, converters, typedefs, predefs = '','',[],'','',''
|
entities, stub_decls, schema_table, converters, typedefs, predefs = '','',[],'','',''
|
||||||
|
|
||||||
|
|
||||||
whitelist = []
|
whitelist = []
|
||||||
with open('entitylist.txt', 'rt') as inp:
|
with open('entitylist.txt', 'rt') as inp:
|
||||||
whitelist = [n.strip() for n in inp.read().split('\n') if n[:1]!='#' and n.strip()]
|
whitelist = [n.strip() for n in inp.read().split('\n') if n[:1]!='#' and n.strip()]
|
||||||
|
|
||||||
schema.whitelist = set()
|
schema.whitelist = set()
|
||||||
schema.blacklist_partial = set()
|
schema.blacklist_partial = set()
|
||||||
for ename in whitelist:
|
for ename in whitelist:
|
||||||
try:
|
try:
|
||||||
e = schema.entities[ename]
|
e = schema.entities[ename]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# type, not entity
|
# type, not entity
|
||||||
continue
|
continue
|
||||||
for base in [e.name]+get_base_classes(e,schema):
|
for base in [e.name]+get_base_classes(e,schema):
|
||||||
schema.whitelist.add(base)
|
schema.whitelist.add(base)
|
||||||
for base in get_derived(e,schema):
|
for base in get_derived(e,schema):
|
||||||
schema.blacklist_partial.add(base)
|
schema.blacklist_partial.add(base)
|
||||||
|
|
||||||
schema.blacklist_partial -= schema.whitelist
|
schema.blacklist_partial -= schema.whitelist
|
||||||
schema.whitelist |= schema.blacklist_partial
|
schema.whitelist |= schema.blacklist_partial
|
||||||
|
|
||||||
# uncomment this to disable automatic code reduction based on whitelisting all used entities
|
# uncomment this to disable automatic code reduction based on whitelisting all used entities
|
||||||
# (blacklisted entities are those who are in the whitelist and may be instanced, but will
|
# (blacklisted entities are those who are in the whitelist and may be instanced, but will
|
||||||
# only be accessed through a pointer to a base-class.
|
# only be accessed through a pointer to a base-class.
|
||||||
#schema.whitelist = set(schema.entities.keys())
|
#schema.whitelist = set(schema.entities.keys())
|
||||||
#schema.blacklist_partial = set()
|
#schema.blacklist_partial = set()
|
||||||
|
|
||||||
for ntype in schema.types.values():
|
for ntype in schema.types.values():
|
||||||
typedefs += gen_type_struct(ntype,schema)
|
typedefs += gen_type_struct(ntype,schema)
|
||||||
schema_table.append(template_schema_type.format(normalized_name=ntype.name.lower()))
|
schema_table.append(template_schema_type.format(normalized_name=ntype.name.lower()))
|
||||||
|
|
||||||
sorted_entities = sort_entity_list(schema)
|
sorted_entities = sort_entity_list(schema)
|
||||||
for entity in sorted_entities:
|
for entity in sorted_entities:
|
||||||
parent = entity.parent+',' if entity.parent else ''
|
parent = entity.parent+',' if entity.parent else ''
|
||||||
|
|
||||||
if entity.name in schema.whitelist:
|
if entity.name in schema.whitelist:
|
||||||
converters += template_converter.format(type=entity.name,contents=gen_converter(entity,schema))
|
converters += template_converter.format(type=entity.name,contents=gen_converter(entity,schema))
|
||||||
schema_table.append(template_schema.format(type=entity.name,normalized_name=entity.name.lower(),argcnt=len(entity.members)))
|
schema_table.append(template_schema.format(type=entity.name,normalized_name=entity.name.lower(),argcnt=len(entity.members)))
|
||||||
entities += template_entity.format(entity=entity.name,argcnt=len(entity.members),parent=parent,fields=generate_fields(entity,schema))
|
entities += template_entity.format(entity=entity.name,argcnt=len(entity.members),parent=parent,fields=generate_fields(entity,schema))
|
||||||
predefs += template_entity_predef.format(entity=entity.name)
|
predefs += template_entity_predef.format(entity=entity.name)
|
||||||
stub_decls += template_stub_decl.format(type=entity.name)
|
stub_decls += template_stub_decl.format(type=entity.name)
|
||||||
else:
|
else:
|
||||||
entities += template_entity_ni.format(entity=entity.name)
|
entities += template_entity_ni.format(entity=entity.name)
|
||||||
predefs += template_entity_predef_ni.format(entity=entity.name)
|
predefs += template_entity_predef_ni.format(entity=entity.name)
|
||||||
schema_table.append(template_schema.format(type="NotImplemented",normalized_name=entity.name.lower(),argcnt=0))
|
schema_table.append(template_schema.format(type="NotImplemented",normalized_name=entity.name.lower(),argcnt=0))
|
||||||
|
|
||||||
schema_table = ','.join(schema_table)
|
schema_table = ','.join(schema_table)
|
||||||
|
|
||||||
with open(input_template_h,'rt') as inp:
|
with open(input_template_h,'rt') as inp:
|
||||||
with open(output_file_h,'wt') as outp:
|
with open(output_file_h,'wt') as outp:
|
||||||
# can't use format() here since the C++ code templates contain single, unescaped curly brackets
|
# can't use format() here since the C++ code templates contain single, unescaped curly brackets
|
||||||
outp.write(inp.read().replace('{predefs}',predefs).replace('{types}',typedefs).replace('{entities}',entities).replace('{converter-decl}',stub_decls))
|
outp.write(inp.read().replace('{predefs}',predefs).replace('{types}',typedefs).replace('{entities}',entities).replace('{converter-decl}',stub_decls))
|
||||||
|
|
||||||
with open(input_template_cpp,'rt') as inp:
|
with open(input_template_cpp,'rt') as inp:
|
||||||
with open(output_file_cpp,'wt') as outp:
|
with open(output_file_cpp,'wt') as outp:
|
||||||
outp.write(inp.read().replace('{schema-static-table}',schema_table).replace('{converter-impl}',converters))
|
outp.write(inp.read().replace('{schema-static-table}',schema_table).replace('{converter-impl}',converters))
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(work(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))
|
sys.exit(work(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,120 +1,120 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- Coding: UTF-8 -*-
|
# -*- Coding: UTF-8 -*-
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Open Asset Import Library (ASSIMP)
|
# Open Asset Import Library (ASSIMP)
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
#
|
#
|
||||||
# Copyright (c) 2006-2010, ASSIMP Development Team
|
# Copyright (c) 2006-2010, ASSIMP Development Team
|
||||||
#
|
#
|
||||||
# All rights reserved.
|
# All rights reserved.
|
||||||
#
|
#
|
||||||
# Redistribution and use of this software in source and binary forms,
|
# Redistribution and use of this software in source and binary forms,
|
||||||
# with or without modification, are permitted provided that the following
|
# with or without modification, are permitted provided that the following
|
||||||
# conditions are met:
|
# conditions are met:
|
||||||
#
|
#
|
||||||
# * Redistributions of source code must retain the above
|
# * Redistributions of source code must retain the above
|
||||||
# copyright notice, this list of conditions and the
|
# copyright notice, this list of conditions and the
|
||||||
# following disclaimer.
|
# following disclaimer.
|
||||||
#
|
#
|
||||||
# * Redistributions in binary form must reproduce the above
|
# * Redistributions in binary form must reproduce the above
|
||||||
# copyright notice, this list of conditions and the
|
# copyright notice, this list of conditions and the
|
||||||
# following disclaimer in the documentation and/or other
|
# following disclaimer in the documentation and/or other
|
||||||
# materials provided with the distribution.
|
# materials provided with the distribution.
|
||||||
#
|
#
|
||||||
# * Neither the name of the ASSIMP team, nor the names of its
|
# * Neither the name of the ASSIMP team, nor the names of its
|
||||||
# contributors may be used to endorse or promote products
|
# contributors may be used to endorse or promote products
|
||||||
# derived from this software without specific prior
|
# derived from this software without specific prior
|
||||||
# written permission of the ASSIMP Development Team.
|
# written permission of the ASSIMP Development Team.
|
||||||
#
|
#
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
"""Parse an EXPRESS file and extract basic information on all
|
"""Parse an EXPRESS file and extract basic information on all
|
||||||
entities and data types contained"""
|
entities and data types contained"""
|
||||||
|
|
||||||
import sys, os, re
|
import sys, os, re
|
||||||
|
|
||||||
re_match_entity = re.compile(r"""
|
re_match_entity = re.compile(r"""
|
||||||
ENTITY\s+(\w+)\s* # 'ENTITY foo'
|
ENTITY\s+(\w+)\s* # 'ENTITY foo'
|
||||||
.*? # skip SUPERTYPE-of
|
.*? # skip SUPERTYPE-of
|
||||||
(?:SUBTYPE\s+OF\s+\((\w+)\))?; # 'SUBTYPE OF (bar);' or simply ';'
|
(?:SUBTYPE\s+OF\s+\((\w+)\))?; # 'SUBTYPE OF (bar);' or simply ';'
|
||||||
(.*?) # 'a : atype;' (0 or more lines like this)
|
(.*?) # 'a : atype;' (0 or more lines like this)
|
||||||
(?:(?:INVERSE|UNIQUE|WHERE)\s*$.*?)? # skip the INVERSE, UNIQUE, WHERE clauses and everything behind
|
(?:(?:INVERSE|UNIQUE|WHERE)\s*$.*?)? # skip the INVERSE, UNIQUE, WHERE clauses and everything behind
|
||||||
END_ENTITY;
|
END_ENTITY;
|
||||||
""",re.VERBOSE|re.DOTALL|re.MULTILINE)
|
""",re.VERBOSE|re.DOTALL|re.MULTILINE)
|
||||||
|
|
||||||
re_match_type = re.compile(r"""
|
re_match_type = re.compile(r"""
|
||||||
TYPE\s+(\w+?)\s*=\s*((?:LIST|SET)\s*\[\d+:[\d?]+\]\s*OF)?(?:\s*UNIQUE)?\s*(\w+) # TYPE foo = LIST[1:2] of blub
|
TYPE\s+(\w+?)\s*=\s*((?:LIST|SET)\s*\[\d+:[\d?]+\]\s*OF)?(?:\s*UNIQUE)?\s*(\w+) # TYPE foo = LIST[1:2] of blub
|
||||||
(?:(?<=ENUMERATION)\s*OF\s*\((.*?)\))?
|
(?:(?<=ENUMERATION)\s*OF\s*\((.*?)\))?
|
||||||
.*? # skip the WHERE clause
|
.*? # skip the WHERE clause
|
||||||
END_TYPE;
|
END_TYPE;
|
||||||
""",re.VERBOSE|re.DOTALL)
|
""",re.VERBOSE|re.DOTALL)
|
||||||
|
|
||||||
re_match_field = re.compile(r"""
|
re_match_field = re.compile(r"""
|
||||||
\s+(\w+?)\s*:\s*(OPTIONAL)?\s*((?:LIST|SET)\s*\[\d+:[\d?]+\]\s*OF)?(?:\s*UNIQUE)?\s*(\w+?);
|
\s+(\w+?)\s*:\s*(OPTIONAL)?\s*((?:LIST|SET)\s*\[\d+:[\d?]+\]\s*OF)?(?:\s*UNIQUE)?\s*(\w+?);
|
||||||
""",re.VERBOSE|re.DOTALL)
|
""",re.VERBOSE|re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
class Schema:
|
class Schema:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.entities = {}
|
self.entities = {}
|
||||||
self.types = {}
|
self.types = {}
|
||||||
|
|
||||||
class Entity:
|
class Entity:
|
||||||
def __init__(self,name,parent,members):
|
def __init__(self,name,parent,members):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.parent = parent
|
self.parent = parent
|
||||||
self.members = members
|
self.members = members
|
||||||
|
|
||||||
class Field:
|
class Field:
|
||||||
def __init__(self,name,type,optional,collection):
|
def __init__(self,name,type,optional,collection):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.type = type
|
self.type = type
|
||||||
self.optional = optional
|
self.optional = optional
|
||||||
self.collection = collection
|
self.collection = collection
|
||||||
self.fullspec = (self.collection+' ' if self.collection else '') + self.type
|
self.fullspec = (self.collection+' ' if self.collection else '') + self.type
|
||||||
|
|
||||||
class Type:
|
class Type:
|
||||||
def __init__(self,name,aggregate,equals,enums):
|
def __init__(self,name,aggregate,equals,enums):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.aggregate = aggregate
|
self.aggregate = aggregate
|
||||||
self.equals = equals
|
self.equals = equals
|
||||||
self.enums = enums
|
self.enums = enums
|
||||||
|
|
||||||
|
|
||||||
def read(filename,silent=False):
|
def read(filename,silent=False):
|
||||||
schema = Schema()
|
schema = Schema()
|
||||||
with open(filename,'rt') as inp:
|
with open(filename,'rt') as inp:
|
||||||
contents = inp.read()
|
contents = inp.read()
|
||||||
types = re.findall(re_match_type,contents)
|
types = re.findall(re_match_type,contents)
|
||||||
for name,aggregate,equals,enums in types:
|
for name,aggregate,equals,enums in types:
|
||||||
schema.types[name] = Type(name,aggregate,equals,enums)
|
schema.types[name] = Type(name,aggregate,equals,enums)
|
||||||
|
|
||||||
entities = re.findall(re_match_entity,contents)
|
entities = re.findall(re_match_entity,contents)
|
||||||
for name,parent,fields_raw in entities:
|
for name,parent,fields_raw in entities:
|
||||||
print('process entity {0}, parent is {1}'.format(name,parent)) if not silent else None
|
print('process entity {0}, parent is {1}'.format(name,parent)) if not silent else None
|
||||||
fields = re.findall(re_match_field,fields_raw)
|
fields = re.findall(re_match_field,fields_raw)
|
||||||
members = [Field(name,type,opt,coll) for name, opt, coll, type in fields]
|
members = [Field(name,type,opt,coll) for name, opt, coll, type in fields]
|
||||||
print(' got {0} fields'.format(len(members))) if not silent else None
|
print(' got {0} fields'.format(len(members))) if not silent else None
|
||||||
|
|
||||||
schema.entities[name] = Entity(name,parent,members)
|
schema.entities[name] = Entity(name,parent,members)
|
||||||
return schema
|
return schema
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(read(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))
|
sys.exit(read(sys.argv[1] if len(sys.argv)>1 else 'schema.exp'))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue