2016-10-17 08:50:25 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
Import('env')
|
2017-02-09 00:07:44 +01:00
|
|
|
env.editor_sources = []
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-12-16 12:12:22 +01:00
|
|
|
import os
|
|
|
|
|
2014-02-16 01:16:33 +01:00
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
def make_certs_header(target, source, env):
|
2014-02-16 01:16:33 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
src = source[0].srcnode().abspath
|
|
|
|
dst = target[0].srcnode().abspath
|
2016-10-30 18:57:40 +01:00
|
|
|
f = open(src, "rb")
|
|
|
|
g = open(dst, "wb")
|
2016-10-30 18:44:57 +01:00
|
|
|
buf = f.read()
|
|
|
|
decomp_size = len(buf)
|
|
|
|
import zlib
|
|
|
|
buf = zlib.compress(buf)
|
2014-02-16 01:16:33 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
2017-02-09 00:07:44 +01:00
|
|
|
g.write("#ifndef _CERTS_RAW_H\n")
|
|
|
|
g.write("#define _CERTS_RAW_H\n")
|
|
|
|
g.write("static const int _certs_compressed_size=" + str(len(buf)) + ";\n")
|
|
|
|
g.write("static const int _certs_uncompressed_size=" + str(decomp_size) + ";\n")
|
|
|
|
g.write("static const unsigned char _certs_compressed[]={\n")
|
2016-10-30 18:44:57 +01:00
|
|
|
for i in range(len(buf)):
|
2016-10-30 18:57:40 +01:00
|
|
|
g.write(str(ord(buf[i])) + ",\n")
|
2016-10-30 18:44:57 +01:00
|
|
|
g.write("};\n")
|
|
|
|
g.write("#endif")
|
2014-02-16 01:16:33 +01:00
|
|
|
|
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
def make_doc_header(target, source, env):
|
2016-03-12 14:44:12 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
dst = target[0].srcnode().abspath
|
2016-10-30 18:57:40 +01:00
|
|
|
g = open(dst, "wb")
|
2016-12-16 12:12:22 +01:00
|
|
|
buf = ""
|
|
|
|
docbegin = ""
|
|
|
|
docend = ""
|
|
|
|
for s in source:
|
|
|
|
src = s.srcnode().abspath
|
|
|
|
f = open(src, "rb")
|
|
|
|
content = f.read()
|
|
|
|
buf += content[content.find("<class"): content.rfind("</doc>")]
|
|
|
|
if len(docbegin) == 0:
|
|
|
|
docbegin = content[0: content.find("<class")]
|
|
|
|
if len(docend) == 0:
|
|
|
|
docend = content[content.rfind("</doc>"): len(buf)]
|
|
|
|
buf = docbegin + buf + docend
|
2016-10-30 18:44:57 +01:00
|
|
|
decomp_size = len(buf)
|
|
|
|
import zlib
|
|
|
|
buf = zlib.compress(buf)
|
2016-03-12 14:44:12 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
2017-02-09 00:07:44 +01:00
|
|
|
g.write("#ifndef _DOC_DATA_RAW_H\n")
|
|
|
|
g.write("#define _DOC_DATA_RAW_H\n")
|
|
|
|
g.write("static const int _doc_data_compressed_size=" + str(len(buf)) + ";\n")
|
|
|
|
g.write("static const int _doc_data_uncompressed_size=" + str(decomp_size) + ";\n")
|
|
|
|
g.write("static const unsigned char _doc_data_compressed[]={\n")
|
2016-10-30 18:44:57 +01:00
|
|
|
for i in range(len(buf)):
|
2016-10-30 18:57:40 +01:00
|
|
|
g.write(str(ord(buf[i])) + ",\n")
|
2016-10-30 18:44:57 +01:00
|
|
|
g.write("};\n")
|
|
|
|
g.write("#endif")
|
2016-03-12 14:44:12 +01:00
|
|
|
|
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
def make_fonts_header(target, source, env):
|
|
|
|
|
|
|
|
dst = target[0].srcnode().abspath
|
|
|
|
|
|
|
|
g = open(dst, "wb")
|
|
|
|
|
|
|
|
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
|
|
|
g.write("#ifndef _EDITOR_FONTS_H\n")
|
|
|
|
g.write("#define _EDITOR_FONTS_H\n")
|
|
|
|
|
|
|
|
# saving uncompressed, since freetype will reference from memory pointer
|
|
|
|
xl_names = []
|
|
|
|
for i in range(len(source)):
|
|
|
|
print("Appending font: " + source[i].srcnode().abspath)
|
|
|
|
f = open(source[i].srcnode().abspath, "rb")
|
|
|
|
buf = f.read()
|
|
|
|
import os.path
|
|
|
|
|
|
|
|
name = os.path.splitext(os.path.basename(source[i].srcnode().abspath))[0]
|
|
|
|
|
|
|
|
g.write("static const int _font_" + name + "_size=" + str(len(buf)) + ";\n")
|
|
|
|
g.write("static const unsigned char _font_" + name + "[]={\n")
|
|
|
|
for i in range(len(buf)):
|
|
|
|
g.write(str(ord(buf[i])) + ",\n")
|
|
|
|
|
|
|
|
g.write("};\n")
|
|
|
|
|
|
|
|
g.write("#endif")
|
|
|
|
|
|
|
|
|
|
|
|
def make_translations_header(target, source, env):
|
|
|
|
|
|
|
|
dst = target[0].srcnode().abspath
|
|
|
|
|
|
|
|
g = open(dst, "wb")
|
|
|
|
|
|
|
|
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
|
|
|
g.write("#ifndef _EDITOR_TRANSLATIONS_H\n")
|
|
|
|
g.write("#define _EDITOR_TRANSLATIONS_H\n")
|
|
|
|
|
|
|
|
import zlib
|
|
|
|
import os.path
|
|
|
|
|
|
|
|
paths = [node.srcnode().abspath for node in source]
|
|
|
|
sorted_paths = sorted(paths, key=lambda path: os.path.splitext(os.path.basename(path))[0])
|
|
|
|
|
|
|
|
xl_names = []
|
|
|
|
for i in range(len(sorted_paths)):
|
|
|
|
print("Appending translation: " + sorted_paths[i])
|
|
|
|
f = open(sorted_paths[i], "rb")
|
|
|
|
buf = f.read()
|
|
|
|
decomp_size = len(buf)
|
|
|
|
buf = zlib.compress(buf)
|
|
|
|
name = os.path.splitext(os.path.basename(sorted_paths[i]))[0]
|
|
|
|
|
|
|
|
#g.write("static const int _translation_"+name+"_compressed_size="+str(len(buf))+";\n")
|
|
|
|
#g.write("static const int _translation_"+name+"_uncompressed_size="+str(decomp_size)+";\n")
|
|
|
|
g.write("static const unsigned char _translation_" + name + "_compressed[]={\n")
|
|
|
|
for i in range(len(buf)):
|
|
|
|
g.write(str(ord(buf[i])) + ",\n")
|
|
|
|
|
|
|
|
g.write("};\n")
|
|
|
|
|
|
|
|
xl_names.append([name, len(buf), str(decomp_size)])
|
|
|
|
|
|
|
|
g.write("struct EditorTranslationList {\n")
|
|
|
|
g.write("\tconst char* lang;\n")
|
|
|
|
g.write("\tint comp_size;\n")
|
|
|
|
g.write("\tint uncomp_size;\n")
|
|
|
|
g.write("\tconst unsigned char* data;\n")
|
|
|
|
g.write("};\n\n")
|
|
|
|
g.write("static EditorTranslationList _editor_translations[]={\n")
|
|
|
|
for x in xl_names:
|
|
|
|
g.write("\t{ \"" + x[0] + "\", " + str(x[1]) + ", " + str(x[2]) + ",_translation_" + x[0] + "_compressed},\n")
|
|
|
|
g.write("\t{NULL,0,0,NULL}\n")
|
|
|
|
g.write("};\n")
|
|
|
|
|
|
|
|
g.write("#endif")
|
|
|
|
|
|
|
|
|
2016-10-30 18:57:40 +01:00
|
|
|
if (env["tools"] == "yes"):
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
# Register exporters
|
2016-10-30 18:57:40 +01:00
|
|
|
reg_exporters_inc = '#include "register_exporters.h"\n'
|
|
|
|
reg_exporters = 'void register_exporters() {\n'
|
2016-10-30 18:44:57 +01:00
|
|
|
for e in env.platform_exporters:
|
2017-02-09 00:07:44 +01:00
|
|
|
env.editor_sources.append("#platform/" + e + "/export/export.cpp")
|
2017-02-20 03:19:30 +01:00
|
|
|
reg_exporters += '\tregister_' + e + '_exporter();\n'
|
2016-10-30 18:57:40 +01:00
|
|
|
reg_exporters_inc += '#include "platform/' + e + '/export/export.h"\n'
|
|
|
|
reg_exporters += '}\n'
|
|
|
|
f = open("register_exporters.cpp", "wb")
|
2016-10-30 18:44:57 +01:00
|
|
|
f.write(reg_exporters_inc)
|
|
|
|
f.write(reg_exporters)
|
|
|
|
f.close()
|
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
# API documentation
|
2016-12-16 12:12:22 +01:00
|
|
|
docs = ["#doc/base/classes.xml"]
|
2017-03-05 14:21:25 +01:00
|
|
|
moduledir = os.path.join(os.getcwd(), "..", "modules")
|
2016-12-16 12:12:22 +01:00
|
|
|
for m in os.listdir(moduledir):
|
|
|
|
curmodle = os.path.join(moduledir, m)
|
|
|
|
docfile = os.path.join(curmodle, "classes.xml")
|
|
|
|
if os.path.isdir(curmodle) and os.path.isfile(docfile):
|
|
|
|
docs.append(docfile)
|
2017-03-05 14:21:25 +01:00
|
|
|
env.Depends("#editor/doc_data_compressed.h", docs)
|
|
|
|
env.Command("#editor/doc_data_compressed.h", docs, make_doc_header)
|
2016-10-30 18:44:57 +01:00
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
# Certificates
|
2017-03-05 14:21:25 +01:00
|
|
|
env.Depends("#editor/certs_compressed.h", "#thirdparty/certs/ca-certificates.crt")
|
|
|
|
env.Command("#editor/certs_compressed.h", "#thirdparty/certs/ca-certificates.crt", make_certs_header)
|
2016-10-30 18:44:57 +01:00
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
import glob
|
|
|
|
path = env.Dir('.').abspath
|
2016-10-30 18:44:57 +01:00
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
# Translations
|
|
|
|
tlist = glob.glob(path + "/translations/*.po")
|
|
|
|
print("translations: ", tlist)
|
2017-03-05 14:21:25 +01:00
|
|
|
env.Depends('#editor/translations.h', tlist)
|
|
|
|
env.Command('#editor/translations.h', tlist, make_translations_header)
|
2016-10-30 18:44:57 +01:00
|
|
|
|
2017-02-09 00:07:44 +01:00
|
|
|
# Fonts
|
2017-03-05 14:21:25 +01:00
|
|
|
flist = glob.glob(path + "/../thirdparty/fonts/*.ttf")
|
|
|
|
flist.append(glob.glob(path + "/../thirdparty/fonts/*.otf"))
|
2017-02-09 00:07:44 +01:00
|
|
|
print("fonts: ", flist)
|
2017-03-05 14:21:25 +01:00
|
|
|
env.Depends('#editor/builtin_fonts.h', flist)
|
|
|
|
env.Command('#editor/builtin_fonts.h', flist, make_fonts_header)
|
2017-02-09 00:07:44 +01:00
|
|
|
|
|
|
|
|
|
|
|
env.add_source_files(env.editor_sources, "*.cpp")
|
|
|
|
|
|
|
|
SConscript('collada/SCsub')
|
|
|
|
SConscript('doc/SCsub')
|
2016-11-01 00:24:30 +01:00
|
|
|
SConscript('fileserver/SCsub')
|
2017-02-09 00:07:44 +01:00
|
|
|
SConscript('icons/SCsub')
|
2017-02-01 13:45:45 +01:00
|
|
|
SConscript('import/SCsub')
|
2017-02-09 00:07:44 +01:00
|
|
|
SConscript('io_plugins/SCsub')
|
|
|
|
SConscript('plugins/SCsub')
|
|
|
|
|
|
|
|
lib = env.Library("editor", env.editor_sources)
|
|
|
|
env.Prepend(LIBS=[lib])
|
|
|
|
|
|
|
|
Export('env')
|