Merge pull request #89361 from Repiteo/scons/with-statement
SCons: Ensure `with` statement where applicable
This commit is contained in:
commit
53701a0234
21 changed files with 1649 additions and 1699 deletions
|
@ -31,35 +31,31 @@ def escape_string(s):
|
|||
def make_certs_header(target, source, env):
|
||||
src = source[0]
|
||||
dst = target[0]
|
||||
f = open(src, "rb")
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
buf = f.read()
|
||||
decomp_size = len(buf)
|
||||
with open(src, "rb") as f, open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
buf = f.read()
|
||||
decomp_size = len(buf)
|
||||
|
||||
# Use maximum zlib compression level to further reduce file size
|
||||
# (at the cost of initial build times).
|
||||
buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
|
||||
# Use maximum zlib compression level to further reduce file size
|
||||
# (at the cost of initial build times).
|
||||
buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef CERTS_COMPRESSED_GEN_H\n")
|
||||
g.write("#define CERTS_COMPRESSED_GEN_H\n")
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef CERTS_COMPRESSED_GEN_H\n")
|
||||
g.write("#define CERTS_COMPRESSED_GEN_H\n")
|
||||
|
||||
# System certs path. Editor will use them if defined. (for package maintainers)
|
||||
path = env["system_certs_path"]
|
||||
g.write('#define _SYSTEM_CERTS_PATH "%s"\n' % str(path))
|
||||
if env["builtin_certs"]:
|
||||
# Defined here and not in env so changing it does not trigger a full rebuild.
|
||||
g.write("#define BUILTIN_CERTS_ENABLED\n")
|
||||
g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const int _certs_uncompressed_size = " + str(decomp_size) + ";\n")
|
||||
g.write("static const unsigned char _certs_compressed[] = {\n")
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
g.write("};\n")
|
||||
g.write("#endif // CERTS_COMPRESSED_GEN_H")
|
||||
|
||||
g.close()
|
||||
f.close()
|
||||
# System certs path. Editor will use them if defined. (for package maintainers)
|
||||
path = env["system_certs_path"]
|
||||
g.write('#define _SYSTEM_CERTS_PATH "%s"\n' % str(path))
|
||||
if env["builtin_certs"]:
|
||||
# Defined here and not in env so changing it does not trigger a full rebuild.
|
||||
g.write("#define BUILTIN_CERTS_ENABLED\n")
|
||||
g.write("static const int _certs_compressed_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const int _certs_uncompressed_size = " + str(decomp_size) + ";\n")
|
||||
g.write("static const unsigned char _certs_compressed[] = {\n")
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
g.write("};\n")
|
||||
g.write("#endif // CERTS_COMPRESSED_GEN_H")
|
||||
|
||||
|
||||
def make_authors_header(target, source, env):
|
||||
|
@ -78,42 +74,37 @@ def make_authors_header(target, source, env):
|
|||
|
||||
src = source[0]
|
||||
dst = target[0]
|
||||
f = open(src, "r", encoding="utf-8")
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(src, "r", encoding="utf-8") as f, open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef AUTHORS_GEN_H\n")
|
||||
g.write("#define AUTHORS_GEN_H\n")
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef AUTHORS_GEN_H\n")
|
||||
g.write("#define AUTHORS_GEN_H\n")
|
||||
reading = False
|
||||
|
||||
reading = False
|
||||
def close_section():
|
||||
g.write("\t0\n")
|
||||
g.write("};\n")
|
||||
|
||||
def close_section():
|
||||
g.write("\t0\n")
|
||||
g.write("};\n")
|
||||
|
||||
for line in f:
|
||||
if reading:
|
||||
if line.startswith(" "):
|
||||
g.write('\t"' + escape_string(line.strip()) + '",\n')
|
||||
continue
|
||||
if line.startswith("## "):
|
||||
for line in f:
|
||||
if reading:
|
||||
close_section()
|
||||
reading = False
|
||||
for section, section_id in zip(sections, sections_id):
|
||||
if line.strip().endswith(section):
|
||||
current_section = escape_string(section_id)
|
||||
reading = True
|
||||
g.write("const char *const " + current_section + "[] = {\n")
|
||||
break
|
||||
if line.startswith(" "):
|
||||
g.write('\t"' + escape_string(line.strip()) + '",\n')
|
||||
continue
|
||||
if line.startswith("## "):
|
||||
if reading:
|
||||
close_section()
|
||||
reading = False
|
||||
for section, section_id in zip(sections, sections_id):
|
||||
if line.strip().endswith(section):
|
||||
current_section = escape_string(section_id)
|
||||
reading = True
|
||||
g.write("const char *const " + current_section + "[] = {\n")
|
||||
break
|
||||
|
||||
if reading:
|
||||
close_section()
|
||||
if reading:
|
||||
close_section()
|
||||
|
||||
g.write("#endif // AUTHORS_GEN_H\n")
|
||||
|
||||
g.close()
|
||||
f.close()
|
||||
g.write("#endif // AUTHORS_GEN_H\n")
|
||||
|
||||
|
||||
def make_donors_header(target, source, env):
|
||||
|
@ -140,42 +131,37 @@ def make_donors_header(target, source, env):
|
|||
|
||||
src = source[0]
|
||||
dst = target[0]
|
||||
f = open(src, "r", encoding="utf-8")
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(src, "r", encoding="utf-8") as f, open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef DONORS_GEN_H\n")
|
||||
g.write("#define DONORS_GEN_H\n")
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef DONORS_GEN_H\n")
|
||||
g.write("#define DONORS_GEN_H\n")
|
||||
reading = False
|
||||
|
||||
reading = False
|
||||
def close_section():
|
||||
g.write("\t0\n")
|
||||
g.write("};\n")
|
||||
|
||||
def close_section():
|
||||
g.write("\t0\n")
|
||||
g.write("};\n")
|
||||
for line in f:
|
||||
if reading >= 0:
|
||||
if line.startswith(" "):
|
||||
g.write('\t"' + escape_string(line.strip()) + '",\n')
|
||||
continue
|
||||
if line.startswith("## "):
|
||||
if reading:
|
||||
close_section()
|
||||
reading = False
|
||||
for section, section_id in zip(sections, sections_id):
|
||||
if line.strip().endswith(section):
|
||||
current_section = escape_string(section_id)
|
||||
reading = True
|
||||
g.write("const char *const " + current_section + "[] = {\n")
|
||||
break
|
||||
|
||||
for line in f:
|
||||
if reading >= 0:
|
||||
if line.startswith(" "):
|
||||
g.write('\t"' + escape_string(line.strip()) + '",\n')
|
||||
continue
|
||||
if line.startswith("## "):
|
||||
if reading:
|
||||
close_section()
|
||||
reading = False
|
||||
for section, section_id in zip(sections, sections_id):
|
||||
if line.strip().endswith(section):
|
||||
current_section = escape_string(section_id)
|
||||
reading = True
|
||||
g.write("const char *const " + current_section + "[] = {\n")
|
||||
break
|
||||
if reading:
|
||||
close_section()
|
||||
|
||||
if reading:
|
||||
close_section()
|
||||
|
||||
g.write("#endif // DONORS_GEN_H\n")
|
||||
|
||||
g.close()
|
||||
f.close()
|
||||
g.write("#endif // DONORS_GEN_H\n")
|
||||
|
||||
|
||||
def make_license_header(target, source, env):
|
||||
|
|
|
@ -4,18 +4,16 @@ import zlib
|
|||
def run(target, source, env):
|
||||
src = source[0]
|
||||
dst = target[0]
|
||||
f = open(src, "rb")
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(src, "rb") as f, open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
buf = f.read()
|
||||
decomp_size = len(buf)
|
||||
|
||||
buf = f.read()
|
||||
decomp_size = len(buf)
|
||||
# Use maximum zlib compression level to further reduce file size
|
||||
# (at the cost of initial build times).
|
||||
buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
|
||||
|
||||
# Use maximum zlib compression level to further reduce file size
|
||||
# (at the cost of initial build times).
|
||||
buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
|
||||
|
||||
g.write(
|
||||
"""/* THIS FILE IS GENERATED DO NOT EDIT */
|
||||
g.write(
|
||||
"""/* THIS FILE IS GENERATED DO NOT EDIT */
|
||||
#ifndef GDEXTENSION_INTERFACE_DUMP_H
|
||||
#define GDEXTENSION_INTERFACE_DUMP_H
|
||||
|
||||
|
@ -26,17 +24,17 @@ def run(target, source, env):
|
|||
#include "core/string/ustring.h"
|
||||
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
g.write("static const int _gdextension_interface_data_compressed_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const int _gdextension_interface_data_uncompressed_size = " + str(decomp_size) + ";\n")
|
||||
g.write("static const unsigned char _gdextension_interface_data_compressed[] = {\n")
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
g.write("};\n")
|
||||
g.write("static const int _gdextension_interface_data_compressed_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const int _gdextension_interface_data_uncompressed_size = " + str(decomp_size) + ";\n")
|
||||
g.write("static const unsigned char _gdextension_interface_data_compressed[] = {\n")
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
g.write("};\n")
|
||||
|
||||
g.write(
|
||||
"""
|
||||
g.write(
|
||||
"""
|
||||
class GDExtensionInterfaceDump {
|
||||
public:
|
||||
static void generate_gdextension_interface_file(const String &p_path) {
|
||||
|
@ -54,9 +52,7 @@ class GDExtensionInterfaceDump {
|
|||
|
||||
#endif // GDEXTENSION_INTERFACE_DUMP_H
|
||||
"""
|
||||
)
|
||||
g.close()
|
||||
f.close()
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -9,60 +9,58 @@ from collections import OrderedDict
|
|||
|
||||
def make_default_controller_mappings(target, source, env):
|
||||
dst = target[0]
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write('#include "core/typedefs.h"\n')
|
||||
g.write('#include "core/input/default_controller_mappings.h"\n')
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write('#include "core/typedefs.h"\n')
|
||||
g.write('#include "core/input/default_controller_mappings.h"\n')
|
||||
# ensure mappings have a consistent order
|
||||
platform_mappings: dict = OrderedDict()
|
||||
for src_path in source:
|
||||
with open(src_path, "r") as f:
|
||||
# read mapping file and skip header
|
||||
mapping_file_lines = f.readlines()[2:]
|
||||
|
||||
# ensure mappings have a consistent order
|
||||
platform_mappings: dict = OrderedDict()
|
||||
for src_path in source:
|
||||
with open(src_path, "r") as f:
|
||||
# read mapping file and skip header
|
||||
mapping_file_lines = f.readlines()[2:]
|
||||
|
||||
current_platform = None
|
||||
for line in mapping_file_lines:
|
||||
if not line:
|
||||
continue
|
||||
line = line.strip()
|
||||
if len(line) == 0:
|
||||
continue
|
||||
if line[0] == "#":
|
||||
current_platform = line[1:].strip()
|
||||
if current_platform not in platform_mappings:
|
||||
platform_mappings[current_platform] = {}
|
||||
elif current_platform:
|
||||
line_parts = line.split(",")
|
||||
guid = line_parts[0]
|
||||
if guid in platform_mappings[current_platform]:
|
||||
g.write(
|
||||
"// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(
|
||||
src_path, current_platform, platform_mappings[current_platform][guid]
|
||||
current_platform = None
|
||||
for line in mapping_file_lines:
|
||||
if not line:
|
||||
continue
|
||||
line = line.strip()
|
||||
if len(line) == 0:
|
||||
continue
|
||||
if line[0] == "#":
|
||||
current_platform = line[1:].strip()
|
||||
if current_platform not in platform_mappings:
|
||||
platform_mappings[current_platform] = {}
|
||||
elif current_platform:
|
||||
line_parts = line.split(",")
|
||||
guid = line_parts[0]
|
||||
if guid in platform_mappings[current_platform]:
|
||||
g.write(
|
||||
"// WARNING - DATABASE {} OVERWROTE PRIOR MAPPING: {} {}\n".format(
|
||||
src_path, current_platform, platform_mappings[current_platform][guid]
|
||||
)
|
||||
)
|
||||
)
|
||||
platform_mappings[current_platform][guid] = line
|
||||
platform_mappings[current_platform][guid] = line
|
||||
|
||||
platform_variables = {
|
||||
"Linux": "#ifdef LINUXBSD_ENABLED",
|
||||
"Windows": "#ifdef WINDOWS_ENABLED",
|
||||
"Mac OS X": "#ifdef MACOS_ENABLED",
|
||||
"Android": "#ifdef ANDROID_ENABLED",
|
||||
"iOS": "#ifdef IOS_ENABLED",
|
||||
"Web": "#ifdef WEB_ENABLED",
|
||||
}
|
||||
platform_variables = {
|
||||
"Linux": "#ifdef LINUXBSD_ENABLED",
|
||||
"Windows": "#ifdef WINDOWS_ENABLED",
|
||||
"Mac OS X": "#ifdef MACOS_ENABLED",
|
||||
"Android": "#ifdef ANDROID_ENABLED",
|
||||
"iOS": "#ifdef IOS_ENABLED",
|
||||
"Web": "#ifdef WEB_ENABLED",
|
||||
}
|
||||
|
||||
g.write("const char* DefaultControllerMappings::mappings[] = {\n")
|
||||
for platform, mappings in platform_mappings.items():
|
||||
variable = platform_variables[platform]
|
||||
g.write("{}\n".format(variable))
|
||||
for mapping in mappings.values():
|
||||
g.write('\t"{}",\n'.format(mapping))
|
||||
g.write("#endif\n")
|
||||
g.write("const char* DefaultControllerMappings::mappings[] = {\n")
|
||||
for platform, mappings in platform_mappings.items():
|
||||
variable = platform_variables[platform]
|
||||
g.write("{}\n".format(variable))
|
||||
for mapping in mappings.values():
|
||||
g.write('\t"{}",\n'.format(mapping))
|
||||
g.write("#endif\n")
|
||||
|
||||
g.write("\tnullptr\n};\n")
|
||||
g.close()
|
||||
g.write("\tnullptr\n};\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
File diff suppressed because it is too large
Load diff
18
editor/SCsub
18
editor/SCsub
|
@ -11,17 +11,15 @@ import editor_builders
|
|||
|
||||
def _make_doc_data_class_path(to_path):
|
||||
# NOTE: It is safe to generate this file here, since this is still executed serially
|
||||
g = open(os.path.join(to_path, "doc_data_class_path.gen.h"), "w", encoding="utf-8", newline="\n")
|
||||
g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
|
||||
g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
|
||||
with open(os.path.join(to_path, "doc_data_class_path.gen.h"), "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("static const int _doc_data_class_path_count = " + str(len(env.doc_class_path)) + ";\n")
|
||||
g.write("struct _DocDataClassPath { const char* name; const char* path; };\n")
|
||||
|
||||
g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n")
|
||||
for c in sorted(env.doc_class_path):
|
||||
g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n')
|
||||
g.write("\t{nullptr, nullptr}\n")
|
||||
g.write("};\n")
|
||||
|
||||
g.close()
|
||||
g.write("static const _DocDataClassPath _doc_data_class_paths[" + str(len(env.doc_class_path) + 1) + "] = {\n")
|
||||
for c in sorted(env.doc_class_path):
|
||||
g.write('\t{"' + c + '", "' + env.doc_class_path[c] + '"},\n')
|
||||
g.write("\t{nullptr, nullptr}\n")
|
||||
g.write("};\n")
|
||||
|
||||
|
||||
if env.editor_build:
|
||||
|
|
|
@ -16,116 +16,113 @@ from platform_methods import subprocess_main
|
|||
|
||||
def make_doc_header(target, source, env):
|
||||
dst = target[0]
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
buf = ""
|
||||
docbegin = ""
|
||||
docend = ""
|
||||
for src in source:
|
||||
if not src.endswith(".xml"):
|
||||
continue
|
||||
with open(src, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
buf += content
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
buf = ""
|
||||
docbegin = ""
|
||||
docend = ""
|
||||
for src in source:
|
||||
if not src.endswith(".xml"):
|
||||
continue
|
||||
with open(src, "r", encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
buf += content
|
||||
|
||||
buf = (docbegin + buf + docend).encode("utf-8")
|
||||
decomp_size = len(buf)
|
||||
buf = (docbegin + buf + docend).encode("utf-8")
|
||||
decomp_size = len(buf)
|
||||
|
||||
# Use maximum zlib compression level to further reduce file size
|
||||
# (at the cost of initial build times).
|
||||
buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
|
||||
# Use maximum zlib compression level to further reduce file size
|
||||
# (at the cost of initial build times).
|
||||
buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef _DOC_DATA_RAW_H\n")
|
||||
g.write("#define _DOC_DATA_RAW_H\n")
|
||||
g.write('static const char *_doc_data_hash = "' + str(hash(buf)) + '";\n')
|
||||
g.write("static const int _doc_data_compressed_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const int _doc_data_uncompressed_size = " + str(decomp_size) + ";\n")
|
||||
g.write("static const unsigned char _doc_data_compressed[] = {\n")
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
g.write("};\n")
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef _DOC_DATA_RAW_H\n")
|
||||
g.write("#define _DOC_DATA_RAW_H\n")
|
||||
g.write('static const char *_doc_data_hash = "' + str(hash(buf)) + '";\n')
|
||||
g.write("static const int _doc_data_compressed_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const int _doc_data_uncompressed_size = " + str(decomp_size) + ";\n")
|
||||
g.write("static const unsigned char _doc_data_compressed[] = {\n")
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
g.write("};\n")
|
||||
|
||||
g.write("#endif")
|
||||
|
||||
g.close()
|
||||
g.write("#endif")
|
||||
|
||||
|
||||
def make_translations_header(target, source, env, category):
|
||||
dst = target[0]
|
||||
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef _{}_TRANSLATIONS_H\n".format(category.upper()))
|
||||
g.write("#define _{}_TRANSLATIONS_H\n".format(category.upper()))
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef _{}_TRANSLATIONS_H\n".format(category.upper()))
|
||||
g.write("#define _{}_TRANSLATIONS_H\n".format(category.upper()))
|
||||
sorted_paths = sorted(source, key=lambda path: os.path.splitext(os.path.basename(path))[0])
|
||||
|
||||
sorted_paths = sorted(source, key=lambda path: os.path.splitext(os.path.basename(path))[0])
|
||||
msgfmt_available = shutil.which("msgfmt") is not None
|
||||
|
||||
msgfmt_available = shutil.which("msgfmt") is not None
|
||||
if not msgfmt_available:
|
||||
print("WARNING: msgfmt is not found, using .po files instead of .mo")
|
||||
|
||||
if not msgfmt_available:
|
||||
print("WARNING: msgfmt is not found, using .po files instead of .mo")
|
||||
|
||||
xl_names = []
|
||||
for i in range(len(sorted_paths)):
|
||||
if msgfmt_available:
|
||||
mo_path = os.path.join(tempfile.gettempdir(), uuid.uuid4().hex + ".mo")
|
||||
cmd = "msgfmt " + sorted_paths[i] + " --no-hash -o " + mo_path
|
||||
try:
|
||||
subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE).communicate()
|
||||
with open(mo_path, "rb") as f:
|
||||
buf = f.read()
|
||||
except OSError as e:
|
||||
print(
|
||||
"WARNING: msgfmt execution failed, using .po file instead of .mo: path=%r; [%s] %s"
|
||||
% (sorted_paths[i], e.__class__.__name__, e)
|
||||
)
|
||||
xl_names = []
|
||||
for i in range(len(sorted_paths)):
|
||||
if msgfmt_available:
|
||||
mo_path = os.path.join(tempfile.gettempdir(), uuid.uuid4().hex + ".mo")
|
||||
cmd = "msgfmt " + sorted_paths[i] + " --no-hash -o " + mo_path
|
||||
try:
|
||||
subprocess.Popen(cmd, shell=True, stderr=subprocess.PIPE).communicate()
|
||||
with open(mo_path, "rb") as f:
|
||||
buf = f.read()
|
||||
except OSError as e:
|
||||
print(
|
||||
"WARNING: msgfmt execution failed, using .po file instead of .mo: path=%r; [%s] %s"
|
||||
% (sorted_paths[i], e.__class__.__name__, e)
|
||||
)
|
||||
with open(sorted_paths[i], "rb") as f:
|
||||
buf = f.read()
|
||||
finally:
|
||||
try:
|
||||
os.remove(mo_path)
|
||||
except OSError as e:
|
||||
# Do not fail the entire build if it cannot delete a temporary file
|
||||
print(
|
||||
"WARNING: Could not delete temporary .mo file: path=%r; [%s] %s"
|
||||
% (mo_path, e.__class__.__name__, e)
|
||||
)
|
||||
else:
|
||||
with open(sorted_paths[i], "rb") as f:
|
||||
buf = f.read()
|
||||
finally:
|
||||
try:
|
||||
os.remove(mo_path)
|
||||
except OSError as e:
|
||||
# Do not fail the entire build if it cannot delete a temporary file
|
||||
print(
|
||||
"WARNING: Could not delete temporary .mo file: path=%r; [%s] %s"
|
||||
% (mo_path, e.__class__.__name__, e)
|
||||
)
|
||||
else:
|
||||
with open(sorted_paths[i], "rb") as f:
|
||||
buf = f.read()
|
||||
|
||||
decomp_size = len(buf)
|
||||
# Use maximum zlib compression level to further reduce file size
|
||||
# (at the cost of initial build times).
|
||||
buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
|
||||
name = os.path.splitext(os.path.basename(sorted_paths[i]))[0]
|
||||
decomp_size = len(buf)
|
||||
# Use maximum zlib compression level to further reduce file size
|
||||
# (at the cost of initial build times).
|
||||
buf = zlib.compress(buf, zlib.Z_BEST_COMPRESSION)
|
||||
name = os.path.splitext(os.path.basename(sorted_paths[i]))[0]
|
||||
|
||||
g.write("static const unsigned char _{}_translation_{}_compressed[] = {{\n".format(category, name))
|
||||
for j in range(len(buf)):
|
||||
g.write("\t" + str(buf[j]) + ",\n")
|
||||
g.write("static const unsigned char _{}_translation_{}_compressed[] = {{\n".format(category, name))
|
||||
for j in range(len(buf)):
|
||||
g.write("\t" + str(buf[j]) + ",\n")
|
||||
|
||||
g.write("};\n")
|
||||
|
||||
xl_names.append([name, len(buf), str(decomp_size)])
|
||||
|
||||
g.write("struct {}TranslationList {{\n".format(category.capitalize()))
|
||||
g.write("\tconst char* lang;\n")
|
||||
g.write("\tint comp_size;\n")
|
||||
g.write("\tint uncomp_size;\n")
|
||||
g.write("\tconst unsigned char* data;\n")
|
||||
g.write("};\n\n")
|
||||
g.write("static {}TranslationList _{}_translations[] = {{\n".format(category.capitalize(), category))
|
||||
for x in xl_names:
|
||||
g.write(
|
||||
'\t{{ "{}", {}, {}, _{}_translation_{}_compressed }},\n'.format(
|
||||
x[0], str(x[1]), str(x[2]), category, x[0]
|
||||
)
|
||||
)
|
||||
g.write("\t{nullptr, 0, 0, nullptr}\n")
|
||||
g.write("};\n")
|
||||
|
||||
xl_names.append([name, len(buf), str(decomp_size)])
|
||||
|
||||
g.write("struct {}TranslationList {{\n".format(category.capitalize()))
|
||||
g.write("\tconst char* lang;\n")
|
||||
g.write("\tint comp_size;\n")
|
||||
g.write("\tint uncomp_size;\n")
|
||||
g.write("\tconst unsigned char* data;\n")
|
||||
g.write("};\n\n")
|
||||
g.write("static {}TranslationList _{}_translations[] = {{\n".format(category.capitalize(), category))
|
||||
for x in xl_names:
|
||||
g.write(
|
||||
'\t{{ "{}", {}, {}, _{}_translation_{}_compressed }},\n'.format(x[0], str(x[1]), str(x[2]), category, x[0])
|
||||
)
|
||||
g.write("\t{nullptr, 0, 0, nullptr}\n")
|
||||
g.write("};\n")
|
||||
|
||||
g.write("#endif")
|
||||
|
||||
g.close()
|
||||
g.write("#endif")
|
||||
|
||||
|
||||
def make_editor_translations_header(target, source, env):
|
||||
|
|
|
@ -15,81 +15,76 @@ def make_editor_icons_action(target, source, env):
|
|||
dst = target[0]
|
||||
svg_icons = source
|
||||
|
||||
icons_string = StringIO()
|
||||
with StringIO() as icons_string, StringIO() as s:
|
||||
for f in svg_icons:
|
||||
fname = str(f)
|
||||
|
||||
for f in svg_icons:
|
||||
fname = str(f)
|
||||
icons_string.write('\t"')
|
||||
|
||||
icons_string.write('\t"')
|
||||
|
||||
with open(fname, "rb") as svgf:
|
||||
b = svgf.read(1)
|
||||
while len(b) == 1:
|
||||
icons_string.write("\\" + str(hex(ord(b)))[1:])
|
||||
with open(fname, "rb") as svgf:
|
||||
b = svgf.read(1)
|
||||
while len(b) == 1:
|
||||
icons_string.write("\\" + str(hex(ord(b)))[1:])
|
||||
b = svgf.read(1)
|
||||
|
||||
icons_string.write('"')
|
||||
if fname != svg_icons[-1]:
|
||||
icons_string.write(",")
|
||||
icons_string.write("\n")
|
||||
icons_string.write('"')
|
||||
if fname != svg_icons[-1]:
|
||||
icons_string.write(",")
|
||||
icons_string.write("\n")
|
||||
|
||||
s = StringIO()
|
||||
s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
s.write("#ifndef _EDITOR_ICONS_H\n")
|
||||
s.write("#define _EDITOR_ICONS_H\n")
|
||||
s.write("static const int editor_icons_count = {};\n".format(len(svg_icons)))
|
||||
s.write("static const char *editor_icons_sources[] = {\n")
|
||||
s.write(icons_string.getvalue())
|
||||
s.write("};\n\n")
|
||||
s.write("static const char *editor_icons_names[] = {\n")
|
||||
s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
s.write("#ifndef _EDITOR_ICONS_H\n")
|
||||
s.write("#define _EDITOR_ICONS_H\n")
|
||||
s.write("static const int editor_icons_count = {};\n".format(len(svg_icons)))
|
||||
s.write("static const char *editor_icons_sources[] = {\n")
|
||||
s.write(icons_string.getvalue())
|
||||
s.write("};\n\n")
|
||||
s.write("static const char *editor_icons_names[] = {\n")
|
||||
|
||||
# this is used to store the indices of thumbnail icons
|
||||
thumb_medium_indices = []
|
||||
thumb_big_indices = []
|
||||
index = 0
|
||||
for f in svg_icons:
|
||||
fname = str(f)
|
||||
# this is used to store the indices of thumbnail icons
|
||||
thumb_medium_indices = []
|
||||
thumb_big_indices = []
|
||||
index = 0
|
||||
for f in svg_icons:
|
||||
fname = str(f)
|
||||
|
||||
# Trim the `.svg` extension from the string.
|
||||
icon_name = os.path.basename(fname)[:-4]
|
||||
# some special cases
|
||||
if icon_name.endswith("MediumThumb"): # don't know a better way to handle this
|
||||
thumb_medium_indices.append(str(index))
|
||||
if icon_name.endswith("BigThumb"): # don't know a better way to handle this
|
||||
thumb_big_indices.append(str(index))
|
||||
if icon_name.endswith("GodotFile"): # don't know a better way to handle this
|
||||
thumb_big_indices.append(str(index))
|
||||
# Trim the `.svg` extension from the string.
|
||||
icon_name = os.path.basename(fname)[:-4]
|
||||
# some special cases
|
||||
if icon_name.endswith("MediumThumb"): # don't know a better way to handle this
|
||||
thumb_medium_indices.append(str(index))
|
||||
if icon_name.endswith("BigThumb"): # don't know a better way to handle this
|
||||
thumb_big_indices.append(str(index))
|
||||
if icon_name.endswith("GodotFile"): # don't know a better way to handle this
|
||||
thumb_big_indices.append(str(index))
|
||||
|
||||
s.write('\t"{0}"'.format(icon_name))
|
||||
s.write('\t"{0}"'.format(icon_name))
|
||||
|
||||
if fname != svg_icons[-1]:
|
||||
s.write(",")
|
||||
s.write("\n")
|
||||
if fname != svg_icons[-1]:
|
||||
s.write(",")
|
||||
s.write("\n")
|
||||
|
||||
index += 1
|
||||
index += 1
|
||||
|
||||
s.write("};\n")
|
||||
|
||||
if thumb_medium_indices:
|
||||
s.write("\n\n")
|
||||
s.write("static const int editor_md_thumbs_count = {};\n".format(len(thumb_medium_indices)))
|
||||
s.write("static const int editor_md_thumbs_indices[] = {")
|
||||
s.write(", ".join(thumb_medium_indices))
|
||||
s.write("};\n")
|
||||
if thumb_big_indices:
|
||||
s.write("\n\n")
|
||||
s.write("static const int editor_bg_thumbs_count = {};\n".format(len(thumb_big_indices)))
|
||||
s.write("static const int editor_bg_thumbs_indices[] = {")
|
||||
s.write(", ".join(thumb_big_indices))
|
||||
s.write("};\n")
|
||||
|
||||
s.write("#endif\n")
|
||||
if thumb_medium_indices:
|
||||
s.write("\n\n")
|
||||
s.write("static const int editor_md_thumbs_count = {};\n".format(len(thumb_medium_indices)))
|
||||
s.write("static const int editor_md_thumbs_indices[] = {")
|
||||
s.write(", ".join(thumb_medium_indices))
|
||||
s.write("};\n")
|
||||
if thumb_big_indices:
|
||||
s.write("\n\n")
|
||||
s.write("static const int editor_bg_thumbs_count = {};\n".format(len(thumb_big_indices)))
|
||||
s.write("static const int editor_bg_thumbs_indices[] = {")
|
||||
s.write(", ".join(thumb_big_indices))
|
||||
s.write("};\n")
|
||||
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(s.getvalue())
|
||||
s.write("#endif\n")
|
||||
|
||||
s.close()
|
||||
icons_string.close()
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(s.getvalue())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -54,41 +54,41 @@ def parse_template(inherits, source, delimiter):
|
|||
|
||||
def make_templates(target, source, env):
|
||||
dst = target[0]
|
||||
s = StringIO()
|
||||
s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n\n")
|
||||
s.write("#ifndef _CODE_TEMPLATES_H\n")
|
||||
s.write("#define _CODE_TEMPLATES_H\n\n")
|
||||
s.write('#include "core/object/object.h"\n')
|
||||
s.write('#include "core/object/script_language.h"\n')
|
||||
with StringIO() as s:
|
||||
s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n\n")
|
||||
s.write("#ifndef _CODE_TEMPLATES_H\n")
|
||||
s.write("#define _CODE_TEMPLATES_H\n\n")
|
||||
s.write('#include "core/object/object.h"\n')
|
||||
s.write('#include "core/object/script_language.h"\n')
|
||||
|
||||
delimiter = "#" # GDScript single line comment delimiter by default.
|
||||
if source:
|
||||
ext = os.path.splitext(source[0])[1]
|
||||
if ext == ".cs":
|
||||
delimiter = "//"
|
||||
delimiter = "#" # GDScript single line comment delimiter by default.
|
||||
if source:
|
||||
ext = os.path.splitext(source[0])[1]
|
||||
if ext == ".cs":
|
||||
delimiter = "//"
|
||||
|
||||
parsed_template_string = ""
|
||||
number_of_templates = 0
|
||||
parsed_template_string = ""
|
||||
number_of_templates = 0
|
||||
|
||||
for filepath in source:
|
||||
node_name = os.path.basename(os.path.dirname(filepath))
|
||||
parsed_template = parse_template(node_name, filepath, delimiter)
|
||||
parsed_template_string += "\t" + parsed_template
|
||||
number_of_templates += 1
|
||||
for filepath in source:
|
||||
node_name = os.path.basename(os.path.dirname(filepath))
|
||||
parsed_template = parse_template(node_name, filepath, delimiter)
|
||||
parsed_template_string += "\t" + parsed_template
|
||||
number_of_templates += 1
|
||||
|
||||
s.write("\nstatic const int TEMPLATES_ARRAY_SIZE = " + str(number_of_templates) + ";\n")
|
||||
s.write("\nstatic const struct ScriptLanguage::ScriptTemplate TEMPLATES[" + str(number_of_templates) + "] = {\n")
|
||||
s.write("\nstatic const int TEMPLATES_ARRAY_SIZE = " + str(number_of_templates) + ";\n")
|
||||
s.write(
|
||||
"\nstatic const struct ScriptLanguage::ScriptTemplate TEMPLATES[" + str(number_of_templates) + "] = {\n"
|
||||
)
|
||||
|
||||
s.write(parsed_template_string)
|
||||
s.write(parsed_template_string)
|
||||
|
||||
s.write("};\n")
|
||||
s.write("};\n")
|
||||
|
||||
s.write("\n#endif\n")
|
||||
s.write("\n#endif\n")
|
||||
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(s.getvalue())
|
||||
|
||||
s.close()
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(s.getvalue())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -12,29 +12,26 @@ from platform_methods import subprocess_main
|
|||
def make_fonts_header(target, source, env):
|
||||
dst = target[0]
|
||||
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef _EDITOR_FONTS_H\n")
|
||||
g.write("#define _EDITOR_FONTS_H\n")
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef _EDITOR_FONTS_H\n")
|
||||
g.write("#define _EDITOR_FONTS_H\n")
|
||||
# Saving uncompressed, since FreeType will reference from memory pointer.
|
||||
for i in range(len(source)):
|
||||
with open(source[i], "rb") as f:
|
||||
buf = f.read()
|
||||
|
||||
# Saving uncompressed, since FreeType will reference from memory pointer.
|
||||
for i in range(len(source)):
|
||||
with open(source[i], "rb") as f:
|
||||
buf = f.read()
|
||||
name = os.path.splitext(os.path.basename(source[i]))[0]
|
||||
|
||||
name = os.path.splitext(os.path.basename(source[i]))[0]
|
||||
g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const unsigned char _font_" + name + "[] = {\n")
|
||||
for j in range(len(buf)):
|
||||
g.write("\t" + str(buf[j]) + ",\n")
|
||||
|
||||
g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const unsigned char _font_" + name + "[] = {\n")
|
||||
for j in range(len(buf)):
|
||||
g.write("\t" + str(buf[j]) + ",\n")
|
||||
g.write("};\n")
|
||||
|
||||
g.write("};\n")
|
||||
|
||||
g.write("#endif")
|
||||
|
||||
g.close()
|
||||
g.write("#endif")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
1025
gles3_builders.py
1025
gles3_builders.py
File diff suppressed because it is too large
Load diff
152
glsl_builders.py
152
glsl_builders.py
|
@ -44,72 +44,70 @@ class RDHeaderStruct:
|
|||
|
||||
|
||||
def include_file_in_rd_header(filename: str, header_data: RDHeaderStruct, depth: int) -> RDHeaderStruct:
|
||||
fs = open(filename, "r")
|
||||
line = fs.readline()
|
||||
|
||||
while line:
|
||||
index = line.find("//")
|
||||
if index != -1:
|
||||
line = line[:index]
|
||||
|
||||
if line.find("#[vertex]") != -1:
|
||||
header_data.reading = "vertex"
|
||||
line = fs.readline()
|
||||
header_data.line_offset += 1
|
||||
header_data.vertex_offset = header_data.line_offset
|
||||
continue
|
||||
|
||||
if line.find("#[fragment]") != -1:
|
||||
header_data.reading = "fragment"
|
||||
line = fs.readline()
|
||||
header_data.line_offset += 1
|
||||
header_data.fragment_offset = header_data.line_offset
|
||||
continue
|
||||
|
||||
if line.find("#[compute]") != -1:
|
||||
header_data.reading = "compute"
|
||||
line = fs.readline()
|
||||
header_data.line_offset += 1
|
||||
header_data.compute_offset = header_data.line_offset
|
||||
continue
|
||||
|
||||
while line.find("#include ") != -1:
|
||||
includeline = line.replace("#include ", "").strip()[1:-1]
|
||||
|
||||
if includeline.startswith("thirdparty/"):
|
||||
included_file = os.path.relpath(includeline)
|
||||
|
||||
else:
|
||||
included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
|
||||
|
||||
if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
|
||||
header_data.vertex_included_files += [included_file]
|
||||
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
|
||||
print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
|
||||
elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
|
||||
header_data.fragment_included_files += [included_file]
|
||||
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
|
||||
print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
|
||||
elif not included_file in header_data.compute_included_files and header_data.reading == "compute":
|
||||
header_data.compute_included_files += [included_file]
|
||||
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
|
||||
print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
|
||||
|
||||
line = fs.readline()
|
||||
|
||||
line = line.replace("\r", "").replace("\n", "")
|
||||
|
||||
if header_data.reading == "vertex":
|
||||
header_data.vertex_lines += [line]
|
||||
if header_data.reading == "fragment":
|
||||
header_data.fragment_lines += [line]
|
||||
if header_data.reading == "compute":
|
||||
header_data.compute_lines += [line]
|
||||
|
||||
with open(filename, "r") as fs:
|
||||
line = fs.readline()
|
||||
header_data.line_offset += 1
|
||||
|
||||
fs.close()
|
||||
while line:
|
||||
index = line.find("//")
|
||||
if index != -1:
|
||||
line = line[:index]
|
||||
|
||||
if line.find("#[vertex]") != -1:
|
||||
header_data.reading = "vertex"
|
||||
line = fs.readline()
|
||||
header_data.line_offset += 1
|
||||
header_data.vertex_offset = header_data.line_offset
|
||||
continue
|
||||
|
||||
if line.find("#[fragment]") != -1:
|
||||
header_data.reading = "fragment"
|
||||
line = fs.readline()
|
||||
header_data.line_offset += 1
|
||||
header_data.fragment_offset = header_data.line_offset
|
||||
continue
|
||||
|
||||
if line.find("#[compute]") != -1:
|
||||
header_data.reading = "compute"
|
||||
line = fs.readline()
|
||||
header_data.line_offset += 1
|
||||
header_data.compute_offset = header_data.line_offset
|
||||
continue
|
||||
|
||||
while line.find("#include ") != -1:
|
||||
includeline = line.replace("#include ", "").strip()[1:-1]
|
||||
|
||||
if includeline.startswith("thirdparty/"):
|
||||
included_file = os.path.relpath(includeline)
|
||||
|
||||
else:
|
||||
included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
|
||||
|
||||
if not included_file in header_data.vertex_included_files and header_data.reading == "vertex":
|
||||
header_data.vertex_included_files += [included_file]
|
||||
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
|
||||
print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
|
||||
elif not included_file in header_data.fragment_included_files and header_data.reading == "fragment":
|
||||
header_data.fragment_included_files += [included_file]
|
||||
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
|
||||
print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
|
||||
elif not included_file in header_data.compute_included_files and header_data.reading == "compute":
|
||||
header_data.compute_included_files += [included_file]
|
||||
if include_file_in_rd_header(included_file, header_data, depth + 1) is None:
|
||||
print("Error in file '" + filename + "': #include " + includeline + "could not be found!")
|
||||
|
||||
line = fs.readline()
|
||||
|
||||
line = line.replace("\r", "").replace("\n", "")
|
||||
|
||||
if header_data.reading == "vertex":
|
||||
header_data.vertex_lines += [line]
|
||||
if header_data.reading == "fragment":
|
||||
header_data.fragment_lines += [line]
|
||||
if header_data.reading == "compute":
|
||||
header_data.compute_lines += [line]
|
||||
|
||||
line = fs.readline()
|
||||
header_data.line_offset += 1
|
||||
|
||||
return header_data
|
||||
|
||||
|
@ -180,22 +178,20 @@ class RAWHeaderStruct:
|
|||
|
||||
|
||||
def include_file_in_raw_header(filename: str, header_data: RAWHeaderStruct, depth: int) -> None:
|
||||
fs = open(filename, "r")
|
||||
line = fs.readline()
|
||||
|
||||
while line:
|
||||
while line.find("#include ") != -1:
|
||||
includeline = line.replace("#include ", "").strip()[1:-1]
|
||||
|
||||
included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
|
||||
include_file_in_raw_header(included_file, header_data, depth + 1)
|
||||
|
||||
line = fs.readline()
|
||||
|
||||
header_data.code += line
|
||||
with open(filename, "r") as fs:
|
||||
line = fs.readline()
|
||||
|
||||
fs.close()
|
||||
while line:
|
||||
while line.find("#include ") != -1:
|
||||
includeline = line.replace("#include ", "").strip()[1:-1]
|
||||
|
||||
included_file = os.path.relpath(os.path.dirname(filename) + "/" + includeline)
|
||||
include_file_in_raw_header(included_file, header_data, depth + 1)
|
||||
|
||||
line = fs.readline()
|
||||
|
||||
header_data.code += line
|
||||
line = fs.readline()
|
||||
|
||||
|
||||
def build_raw_header(
|
||||
|
|
114
methods.py
114
methods.py
|
@ -179,12 +179,14 @@ def get_version_info(module_version_string="", silent=False):
|
|||
gitfolder = ".git"
|
||||
|
||||
if os.path.isfile(".git"):
|
||||
module_folder = open(".git", "r").readline().strip()
|
||||
with open(".git", "r") as file:
|
||||
module_folder = file.readline().strip()
|
||||
if module_folder.startswith("gitdir: "):
|
||||
gitfolder = module_folder[8:]
|
||||
|
||||
if os.path.isfile(os.path.join(gitfolder, "HEAD")):
|
||||
head = open(os.path.join(gitfolder, "HEAD"), "r", encoding="utf8").readline().strip()
|
||||
with open(os.path.join(gitfolder, "HEAD"), "r", encoding="utf8") as file:
|
||||
head = file.readline().strip()
|
||||
if head.startswith("ref: "):
|
||||
ref = head[5:]
|
||||
# If this directory is a Git worktree instead of a root clone.
|
||||
|
@ -194,7 +196,8 @@ def get_version_info(module_version_string="", silent=False):
|
|||
head = os.path.join(gitfolder, ref)
|
||||
packedrefs = os.path.join(gitfolder, "packed-refs")
|
||||
if os.path.isfile(head):
|
||||
githash = open(head, "r").readline().strip()
|
||||
with open(head, "r") as file:
|
||||
githash = file.readline().strip()
|
||||
elif os.path.isfile(packedrefs):
|
||||
# Git may pack refs into a single file. This code searches .git/packed-refs file for the current ref's hash.
|
||||
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-pack-refs.html
|
||||
|
@ -230,9 +233,10 @@ def generate_version_header(module_version_string=""):
|
|||
|
||||
# NOTE: It is safe to generate these files here, since this is still executed serially.
|
||||
|
||||
f = open("core/version_generated.gen.h", "w", encoding="utf-8", newline="\n")
|
||||
f.write(
|
||||
"""/* THIS FILE IS GENERATED DO NOT EDIT */
|
||||
with open("core/version_generated.gen.h", "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(
|
||||
"""\
|
||||
/* THIS FILE IS GENERATED DO NOT EDIT */
|
||||
#ifndef VERSION_GENERATED_GEN_H
|
||||
#define VERSION_GENERATED_GEN_H
|
||||
#define VERSION_SHORT_NAME "{short_name}"
|
||||
|
@ -248,52 +252,49 @@ def generate_version_header(module_version_string=""):
|
|||
#define VERSION_DOCS_URL "https://docs.godotengine.org/en/" VERSION_DOCS_BRANCH
|
||||
#endif // VERSION_GENERATED_GEN_H
|
||||
""".format(
|
||||
**version_info
|
||||
**version_info
|
||||
)
|
||||
)
|
||||
)
|
||||
f.close()
|
||||
|
||||
fhash = open("core/version_hash.gen.cpp", "w", encoding="utf-8", newline="\n")
|
||||
fhash.write(
|
||||
"""/* THIS FILE IS GENERATED DO NOT EDIT */
|
||||
with open("core/version_hash.gen.cpp", "w", encoding="utf-8", newline="\n") as fhash:
|
||||
fhash.write(
|
||||
"""\
|
||||
/* THIS FILE IS GENERATED DO NOT EDIT */
|
||||
#include "core/version.h"
|
||||
const char *const VERSION_HASH = "{git_hash}";
|
||||
const uint64_t VERSION_TIMESTAMP = {git_timestamp};
|
||||
""".format(
|
||||
**version_info
|
||||
**version_info
|
||||
)
|
||||
)
|
||||
)
|
||||
fhash.close()
|
||||
|
||||
|
||||
def parse_cg_file(fname, uniforms, sizes, conditionals):
|
||||
fs = open(fname, "r")
|
||||
line = fs.readline()
|
||||
|
||||
while line:
|
||||
if re.match(r"^\s*uniform", line):
|
||||
res = re.match(r"uniform ([\d\w]*) ([\d\w]*)")
|
||||
type = res.groups(1)
|
||||
name = res.groups(2)
|
||||
|
||||
uniforms.append(name)
|
||||
|
||||
if type.find("texobj") != -1:
|
||||
sizes.append(1)
|
||||
else:
|
||||
t = re.match(r"float(\d)x(\d)", type)
|
||||
if t:
|
||||
sizes.append(int(t.groups(1)) * int(t.groups(2)))
|
||||
else:
|
||||
t = re.match(r"float(\d)", type)
|
||||
sizes.append(int(t.groups(1)))
|
||||
|
||||
if line.find("[branch]") != -1:
|
||||
conditionals.append(name)
|
||||
|
||||
with open(fname, "r") as fs:
|
||||
line = fs.readline()
|
||||
|
||||
fs.close()
|
||||
while line:
|
||||
if re.match(r"^\s*uniform", line):
|
||||
res = re.match(r"uniform ([\d\w]*) ([\d\w]*)")
|
||||
type = res.groups(1)
|
||||
name = res.groups(2)
|
||||
|
||||
uniforms.append(name)
|
||||
|
||||
if type.find("texobj") != -1:
|
||||
sizes.append(1)
|
||||
else:
|
||||
t = re.match(r"float(\d)x(\d)", type)
|
||||
if t:
|
||||
sizes.append(int(t.groups(1)) * int(t.groups(2)))
|
||||
else:
|
||||
t = re.match(r"float(\d)", type)
|
||||
sizes.append(int(t.groups(1)))
|
||||
|
||||
if line.find("[branch]") != -1:
|
||||
conditionals.append(name)
|
||||
|
||||
line = fs.readline()
|
||||
|
||||
|
||||
def get_cmdline_bool(option, default):
|
||||
|
@ -384,15 +385,15 @@ def is_module(path):
|
|||
|
||||
|
||||
def write_disabled_classes(class_list):
|
||||
f = open("core/disabled_classes.gen.h", "w", encoding="utf-8", newline="\n")
|
||||
f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
f.write("#ifndef DISABLED_CLASSES_GEN_H\n")
|
||||
f.write("#define DISABLED_CLASSES_GEN_H\n\n")
|
||||
for c in class_list:
|
||||
cs = c.strip()
|
||||
if cs != "":
|
||||
f.write("#define ClassDB_Disable_" + cs + " 1\n")
|
||||
f.write("\n#endif\n")
|
||||
with open("core/disabled_classes.gen.h", "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
f.write("#ifndef DISABLED_CLASSES_GEN_H\n")
|
||||
f.write("#define DISABLED_CLASSES_GEN_H\n\n")
|
||||
for c in class_list:
|
||||
cs = c.strip()
|
||||
if cs != "":
|
||||
f.write("#define ClassDB_Disable_" + cs + " 1\n")
|
||||
f.write("\n#endif\n")
|
||||
|
||||
|
||||
def write_modules(modules):
|
||||
|
@ -1246,7 +1247,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
|||
).hexdigest()
|
||||
|
||||
if os.path.exists(f"{project_name}.vcxproj.filters"):
|
||||
existing_filters = open(f"{project_name}.vcxproj.filters", "r").read()
|
||||
with open(f"{project_name}.vcxproj.filters", "r") as file:
|
||||
existing_filters = file.read()
|
||||
match = re.search(r"(?ms)^<!-- CHECKSUM$.([0-9a-f]{32})", existing_filters)
|
||||
if match is not None and md5 == match.group(1):
|
||||
skip_filters = True
|
||||
|
@ -1257,7 +1259,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
|||
if not skip_filters:
|
||||
print(f"Regenerating {project_name}.vcxproj.filters")
|
||||
|
||||
filters_template = open("misc/msvs/vcxproj.filters.template", "r").read()
|
||||
with open("misc/msvs/vcxproj.filters.template", "r") as file:
|
||||
filters_template = file.read()
|
||||
for i in range(1, 10):
|
||||
filters_template = filters_template.replace(f"%%UUID{i}%%", str(uuid.uuid4()))
|
||||
|
||||
|
@ -1410,7 +1413,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
|||
)
|
||||
output = f'bin\\godot{env["PROGSUFFIX"]}'
|
||||
|
||||
props_template = open("misc/msvs/props.template", "r").read()
|
||||
with open("misc/msvs/props.template", "r") as file:
|
||||
props_template = file.read()
|
||||
|
||||
props_template = props_template.replace("%%VSCONF%%", vsconf)
|
||||
props_template = props_template.replace("%%CONDITION%%", condition)
|
||||
|
@ -1567,7 +1571,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
|||
section2 = sorted(section2)
|
||||
|
||||
if not get_bool(original_args, "vsproj_props_only", False):
|
||||
proj_template = open("misc/msvs/vcxproj.template", "r").read()
|
||||
with open("misc/msvs/vcxproj.template", "r") as file:
|
||||
proj_template = file.read()
|
||||
proj_template = proj_template.replace("%%UUID%%", proj_uuid)
|
||||
proj_template = proj_template.replace("%%CONFS%%", "\n ".join(configurations))
|
||||
proj_template = proj_template.replace("%%IMPORTS%%", "\n ".join(imports))
|
||||
|
@ -1578,7 +1583,8 @@ def generate_vs_project(env, original_args, project_name="godot"):
|
|||
f.write(proj_template)
|
||||
|
||||
if not get_bool(original_args, "vsproj_props_only", False):
|
||||
sln_template = open("misc/msvs/sln.template", "r").read()
|
||||
with open("misc/msvs/sln.template", "r") as file:
|
||||
sln_template = file.read()
|
||||
sln_template = sln_template.replace("%%NAME%%", project_name)
|
||||
sln_template = sln_template.replace("%%UUID%%", proj_uuid)
|
||||
sln_template = sln_template.replace("%%SLNUUID%%", sln_uuid)
|
||||
|
|
|
@ -9,8 +9,8 @@ if len(sys.argv) < 2:
|
|||
|
||||
fname = sys.argv[1]
|
||||
|
||||
fileread = open(fname.strip(), "r")
|
||||
file_contents = fileread.read()
|
||||
with open(fname.strip(), "r") as fileread:
|
||||
file_contents = fileread.read()
|
||||
|
||||
# If find "ERROR: AddressSanitizer:", then happens invalid read or write
|
||||
# This is critical bug, so we need to fix this as fast as possible
|
||||
|
|
|
@ -65,31 +65,28 @@ text += "\n"
|
|||
# In a second pass, we skip all consecutive comment lines starting with "/*",
|
||||
# then we can append the rest (step 2).
|
||||
|
||||
fileread = open(fname.strip(), "r")
|
||||
line = fileread.readline()
|
||||
header_done = False
|
||||
|
||||
while line.strip() == "": # Skip empty lines at the top
|
||||
with open(fname.strip(), "r") as fileread:
|
||||
line = fileread.readline()
|
||||
header_done = False
|
||||
|
||||
if line.find("/**********") == -1: # Godot header starts this way
|
||||
# Maybe starting with a non-Godot comment, abort header magic
|
||||
header_done = True
|
||||
while line.strip() == "": # Skip empty lines at the top
|
||||
line = fileread.readline()
|
||||
|
||||
while not header_done: # Handle header now
|
||||
if line.find("/*") != 0: # No more starting with a comment
|
||||
if line.find("/**********") == -1: # Godot header starts this way
|
||||
# Maybe starting with a non-Godot comment, abort header magic
|
||||
header_done = True
|
||||
if line.strip() != "":
|
||||
text += line
|
||||
line = fileread.readline()
|
||||
|
||||
while line != "": # Dump everything until EOF
|
||||
text += line
|
||||
line = fileread.readline()
|
||||
while not header_done: # Handle header now
|
||||
if line.find("/*") != 0: # No more starting with a comment
|
||||
header_done = True
|
||||
if line.strip() != "":
|
||||
text += line
|
||||
line = fileread.readline()
|
||||
|
||||
fileread.close()
|
||||
while line != "": # Dump everything until EOF
|
||||
text += line
|
||||
line = fileread.readline()
|
||||
|
||||
# Write
|
||||
filewrite = open(fname.strip(), "w", encoding="utf-8", newline="\n")
|
||||
filewrite.write(text)
|
||||
filewrite.close()
|
||||
with open(fname.strip(), "w", encoding="utf-8", newline="\n") as filewrite:
|
||||
filewrite.write(text)
|
||||
|
|
|
@ -314,7 +314,6 @@ def generate_sdk_package_versions():
|
|||
# We write in ../SdkPackageVersions.props.
|
||||
with open(os.path.join(dirname(script_path), "SdkPackageVersions.props"), "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(props)
|
||||
f.close()
|
||||
|
||||
# Also write the versioned docs URL to a constant for the Source Generators.
|
||||
|
||||
|
@ -342,7 +341,6 @@ def generate_sdk_package_versions():
|
|||
|
||||
with open(os.path.join(generators_dir, "Common.Constants.cs"), "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(constants)
|
||||
f.close()
|
||||
|
||||
|
||||
def build_all(msbuild_tool, module_dir, output_dir, godot_platform, dev_debug, push_nupkgs_local, precision):
|
||||
|
|
|
@ -9,27 +9,26 @@ env_text_server_adv = env_modules.Clone()
|
|||
def make_icu_data(target, source, env):
|
||||
dst = target[0].srcnode().abspath
|
||||
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
|
||||
g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
|
||||
g.write("#ifndef _ICU_DATA_H\n")
|
||||
g.write("#define _ICU_DATA_H\n")
|
||||
g.write('#include "unicode/utypes.h"\n')
|
||||
g.write('#include "unicode/udata.h"\n')
|
||||
g.write('#include "unicode/uversion.h"\n')
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
|
||||
g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
|
||||
g.write("#ifndef _ICU_DATA_H\n")
|
||||
g.write("#define _ICU_DATA_H\n")
|
||||
g.write('#include "unicode/utypes.h"\n')
|
||||
g.write('#include "unicode/udata.h"\n')
|
||||
g.write('#include "unicode/uversion.h"\n')
|
||||
with open(source[0].srcnode().abspath, "rb") as f:
|
||||
buf = f.read()
|
||||
|
||||
f = open(source[0].srcnode().abspath, "rb")
|
||||
buf = f.read()
|
||||
g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
|
||||
g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
|
||||
g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
|
||||
g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
|
||||
g.write("};\n")
|
||||
g.write("#endif")
|
||||
g.write("};\n")
|
||||
g.write("#endif")
|
||||
|
||||
|
||||
# Thirdparty source files
|
||||
|
|
|
@ -83,56 +83,56 @@ def disable_warnings(self):
|
|||
|
||||
def make_icu_data(target, source, env):
|
||||
dst = target[0].srcnode().abspath
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
|
||||
g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
|
||||
g.write("#ifndef _ICU_DATA_H\n")
|
||||
g.write("#define _ICU_DATA_H\n")
|
||||
g.write('#include "unicode/utypes.h"\n')
|
||||
g.write('#include "unicode/udata.h"\n')
|
||||
g.write('#include "unicode/uversion.h"\n')
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
|
||||
g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
|
||||
g.write("#ifndef _ICU_DATA_H\n")
|
||||
g.write("#define _ICU_DATA_H\n")
|
||||
g.write('#include "unicode/utypes.h"\n')
|
||||
g.write('#include "unicode/udata.h"\n')
|
||||
g.write('#include "unicode/uversion.h"\n')
|
||||
with open(source[0].srcnode().abspath, "rb") as f:
|
||||
buf = f.read()
|
||||
|
||||
f = open(source[0].srcnode().abspath, "rb")
|
||||
buf = f.read()
|
||||
g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
|
||||
g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
|
||||
g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
|
||||
g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
|
||||
g.write("};\n")
|
||||
g.write("#endif")
|
||||
g.write("};\n")
|
||||
g.write("#endif")
|
||||
|
||||
|
||||
def write_macos_plist(target, binary_name, identifier, name):
|
||||
os.makedirs(f"{target}/Resource/", exist_ok=True)
|
||||
f = open(f"{target}/Resource/Info.plist", "w", encoding="utf-8", newline="\n")
|
||||
|
||||
f.write(f'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
f.write(f'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n')
|
||||
f.write(f'<plist version="1.0">\n')
|
||||
f.write(f"<dict>\n")
|
||||
f.write(f"\t<key>CFBundleExecutable</key>\n")
|
||||
f.write(f"\t<string>{binary_name}</string>\n")
|
||||
f.write(f"\t<key>CFBundleIdentifier</key>\n")
|
||||
f.write(f"\t<string>{identifier}</string>\n")
|
||||
f.write(f"\t<key>CFBundleInfoDictionaryVersion</key>\n")
|
||||
f.write(f"\t<string>6.0</string>\n")
|
||||
f.write(f"\t<key>CFBundleName</key>\n")
|
||||
f.write(f"\t<string>{name}</string>\n")
|
||||
f.write(f"\t<key>CFBundlePackageType</key>\n")
|
||||
f.write(f"\t<string>FMWK</string>\n")
|
||||
f.write(f"\t<key>CFBundleShortVersionString</key>\n")
|
||||
f.write(f"\t<string>1.0.0</string>\n")
|
||||
f.write(f"\t<key>CFBundleSupportedPlatforms</key>\n")
|
||||
f.write(f"\t<array>\n")
|
||||
f.write(f"\t\t<string>MacOSX</string>\n")
|
||||
f.write(f"\t</array>\n")
|
||||
f.write(f"\t<key>CFBundleVersion</key>\n")
|
||||
f.write(f"\t<string>1.0.0</string>\n")
|
||||
f.write(f"\t<key>LSMinimumSystemVersion</key>\n")
|
||||
f.write(f"\t<string>10.14</string>\n")
|
||||
f.write(f"</dict>\n")
|
||||
f.write(f"</plist>\n")
|
||||
with open(f"{target}/Resource/Info.plist", "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(f'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
f.write(
|
||||
f'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n'
|
||||
)
|
||||
f.write(f'<plist version="1.0">\n')
|
||||
f.write(f"<dict>\n")
|
||||
f.write(f"\t<key>CFBundleExecutable</key>\n")
|
||||
f.write(f"\t<string>{binary_name}</string>\n")
|
||||
f.write(f"\t<key>CFBundleIdentifier</key>\n")
|
||||
f.write(f"\t<string>{identifier}</string>\n")
|
||||
f.write(f"\t<key>CFBundleInfoDictionaryVersion</key>\n")
|
||||
f.write(f"\t<string>6.0</string>\n")
|
||||
f.write(f"\t<key>CFBundleName</key>\n")
|
||||
f.write(f"\t<string>{name}</string>\n")
|
||||
f.write(f"\t<key>CFBundlePackageType</key>\n")
|
||||
f.write(f"\t<string>FMWK</string>\n")
|
||||
f.write(f"\t<key>CFBundleShortVersionString</key>\n")
|
||||
f.write(f"\t<string>1.0.0</string>\n")
|
||||
f.write(f"\t<key>CFBundleSupportedPlatforms</key>\n")
|
||||
f.write(f"\t<array>\n")
|
||||
f.write(f"\t\t<string>MacOSX</string>\n")
|
||||
f.write(f"\t</array>\n")
|
||||
f.write(f"\t<key>CFBundleVersion</key>\n")
|
||||
f.write(f"\t<string>1.0.0</string>\n")
|
||||
f.write(f"\t<key>LSMinimumSystemVersion</key>\n")
|
||||
f.write(f"\t<string>10.14</string>\n")
|
||||
f.write(f"</dict>\n")
|
||||
f.write(f"</plist>\n")
|
||||
|
|
|
@ -83,56 +83,56 @@ def disable_warnings(self):
|
|||
|
||||
def make_icu_data(target, source, env):
|
||||
dst = target[0].srcnode().abspath
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
|
||||
g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
|
||||
g.write("#ifndef _ICU_DATA_H\n")
|
||||
g.write("#define _ICU_DATA_H\n")
|
||||
g.write('#include "unicode/utypes.h"\n')
|
||||
g.write('#include "unicode/udata.h"\n')
|
||||
g.write('#include "unicode/uversion.h"\n')
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("/* (C) 2016 and later: Unicode, Inc. and others. */\n")
|
||||
g.write("/* License & terms of use: https://www.unicode.org/copyright.html */\n")
|
||||
g.write("#ifndef _ICU_DATA_H\n")
|
||||
g.write("#define _ICU_DATA_H\n")
|
||||
g.write('#include "unicode/utypes.h"\n')
|
||||
g.write('#include "unicode/udata.h"\n')
|
||||
g.write('#include "unicode/uversion.h"\n')
|
||||
with open(source[0].srcnode().abspath, "rb") as f:
|
||||
buf = f.read()
|
||||
|
||||
f = open(source[0].srcnode().abspath, "rb")
|
||||
buf = f.read()
|
||||
g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
|
||||
g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
|
||||
g.write('extern "C" U_EXPORT const size_t U_ICUDATA_SIZE = ' + str(len(buf)) + ";\n")
|
||||
g.write('extern "C" U_EXPORT const unsigned char U_ICUDATA_ENTRY_POINT[] = {\n')
|
||||
for i in range(len(buf)):
|
||||
g.write("\t" + str(buf[i]) + ",\n")
|
||||
|
||||
g.write("};\n")
|
||||
g.write("#endif")
|
||||
g.write("};\n")
|
||||
g.write("#endif")
|
||||
|
||||
|
||||
def write_macos_plist(target, binary_name, identifier, name):
|
||||
os.makedirs(f"{target}/Resource/", exist_ok=True)
|
||||
f = open(f"{target}/Resource/Info.plist", "w", encoding="utf-8", newline="\n")
|
||||
|
||||
f.write(f'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
f.write(f'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n')
|
||||
f.write(f'<plist version="1.0">\n')
|
||||
f.write(f"<dict>\n")
|
||||
f.write(f"\t<key>CFBundleExecutable</key>\n")
|
||||
f.write(f"\t<string>{binary_name}</string>\n")
|
||||
f.write(f"\t<key>CFBundleIdentifier</key>\n")
|
||||
f.write(f"\t<string>{identifier}</string>\n")
|
||||
f.write(f"\t<key>CFBundleInfoDictionaryVersion</key>\n")
|
||||
f.write(f"\t<string>6.0</string>\n")
|
||||
f.write(f"\t<key>CFBundleName</key>\n")
|
||||
f.write(f"\t<string>{name}</string>\n")
|
||||
f.write(f"\t<key>CFBundlePackageType</key>\n")
|
||||
f.write(f"\t<string>FMWK</string>\n")
|
||||
f.write(f"\t<key>CFBundleShortVersionString</key>\n")
|
||||
f.write(f"\t<string>1.0.0</string>\n")
|
||||
f.write(f"\t<key>CFBundleSupportedPlatforms</key>\n")
|
||||
f.write(f"\t<array>\n")
|
||||
f.write(f"\t\t<string>MacOSX</string>\n")
|
||||
f.write(f"\t</array>\n")
|
||||
f.write(f"\t<key>CFBundleVersion</key>\n")
|
||||
f.write(f"\t<string>1.0.0</string>\n")
|
||||
f.write(f"\t<key>LSMinimumSystemVersion</key>\n")
|
||||
f.write(f"\t<string>10.14</string>\n")
|
||||
f.write(f"</dict>\n")
|
||||
f.write(f"</plist>\n")
|
||||
with open(f"{target}/Resource/Info.plist", "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(f'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
f.write(
|
||||
f'<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n'
|
||||
)
|
||||
f.write(f'<plist version="1.0">\n')
|
||||
f.write(f"<dict>\n")
|
||||
f.write(f"\t<key>CFBundleExecutable</key>\n")
|
||||
f.write(f"\t<string>{binary_name}</string>\n")
|
||||
f.write(f"\t<key>CFBundleIdentifier</key>\n")
|
||||
f.write(f"\t<string>{identifier}</string>\n")
|
||||
f.write(f"\t<key>CFBundleInfoDictionaryVersion</key>\n")
|
||||
f.write(f"\t<string>6.0</string>\n")
|
||||
f.write(f"\t<key>CFBundleName</key>\n")
|
||||
f.write(f"\t<string>{name}</string>\n")
|
||||
f.write(f"\t<key>CFBundlePackageType</key>\n")
|
||||
f.write(f"\t<string>FMWK</string>\n")
|
||||
f.write(f"\t<key>CFBundleShortVersionString</key>\n")
|
||||
f.write(f"\t<string>1.0.0</string>\n")
|
||||
f.write(f"\t<key>CFBundleSupportedPlatforms</key>\n")
|
||||
f.write(f"\t<array>\n")
|
||||
f.write(f"\t\t<string>MacOSX</string>\n")
|
||||
f.write(f"\t</array>\n")
|
||||
f.write(f"\t<key>CFBundleVersion</key>\n")
|
||||
f.write(f"\t<string>1.0.0</string>\n")
|
||||
f.write(f"\t<key>LSMinimumSystemVersion</key>\n")
|
||||
f.write(f"\t<string>10.14</string>\n")
|
||||
f.write(f"</dict>\n")
|
||||
f.write(f"</plist>\n")
|
||||
|
|
|
@ -124,17 +124,15 @@ def generate_export_icons(platform_path, platform_name):
|
|||
svg_names.append("run_icon")
|
||||
|
||||
for name in svg_names:
|
||||
svgf = open(export_path + "/" + name + ".svg", "rb")
|
||||
b = svgf.read(1)
|
||||
svg_str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
|
||||
svg_str += " static const char *_" + platform_name + "_" + name + '_svg = "'
|
||||
while len(b) == 1:
|
||||
svg_str += "\\" + hex(ord(b))[1:]
|
||||
with open(export_path + "/" + name + ".svg", "rb") as svgf:
|
||||
b = svgf.read(1)
|
||||
svg_str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
|
||||
svg_str += " static const char *_" + platform_name + "_" + name + '_svg = "'
|
||||
while len(b) == 1:
|
||||
svg_str += "\\" + hex(ord(b))[1:]
|
||||
b = svgf.read(1)
|
||||
|
||||
svg_str += '";\n'
|
||||
|
||||
svgf.close()
|
||||
svg_str += '";\n'
|
||||
|
||||
# NOTE: It is safe to generate this file here, since this is still executed serially.
|
||||
wf = export_path + "/" + name + "_svg.gen.h"
|
||||
|
|
|
@ -13,29 +13,26 @@ from platform_methods import subprocess_main
|
|||
def make_fonts_header(target, source, env):
|
||||
dst = target[0]
|
||||
|
||||
g = open(dst, "w", encoding="utf-8", newline="\n")
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as g:
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef _DEFAULT_FONTS_H\n")
|
||||
g.write("#define _DEFAULT_FONTS_H\n")
|
||||
|
||||
g.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n")
|
||||
g.write("#ifndef _DEFAULT_FONTS_H\n")
|
||||
g.write("#define _DEFAULT_FONTS_H\n")
|
||||
# Saving uncompressed, since FreeType will reference from memory pointer.
|
||||
for i in range(len(source)):
|
||||
with open(source[i], "rb") as f:
|
||||
buf = f.read()
|
||||
|
||||
# Saving uncompressed, since FreeType will reference from memory pointer.
|
||||
for i in range(len(source)):
|
||||
with open(source[i], "rb") as f:
|
||||
buf = f.read()
|
||||
name = os.path.splitext(os.path.basename(source[i]))[0]
|
||||
|
||||
name = os.path.splitext(os.path.basename(source[i]))[0]
|
||||
g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const unsigned char _font_" + name + "[] = {\n")
|
||||
for j in range(len(buf)):
|
||||
g.write("\t" + str(buf[j]) + ",\n")
|
||||
|
||||
g.write("static const int _font_" + name + "_size = " + str(len(buf)) + ";\n")
|
||||
g.write("static const unsigned char _font_" + name + "[] = {\n")
|
||||
for j in range(len(buf)):
|
||||
g.write("\t" + str(buf[j]) + ",\n")
|
||||
g.write("};\n")
|
||||
|
||||
g.write("};\n")
|
||||
|
||||
g.write("#endif")
|
||||
|
||||
g.close()
|
||||
g.write("#endif")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -15,61 +15,56 @@ def make_default_theme_icons_action(target, source, env):
|
|||
dst = target[0]
|
||||
svg_icons = source
|
||||
|
||||
icons_string = StringIO()
|
||||
with StringIO() as icons_string, StringIO() as s:
|
||||
for f in svg_icons:
|
||||
fname = str(f)
|
||||
|
||||
for f in svg_icons:
|
||||
fname = str(f)
|
||||
icons_string.write('\t"')
|
||||
|
||||
icons_string.write('\t"')
|
||||
|
||||
with open(fname, "rb") as svgf:
|
||||
b = svgf.read(1)
|
||||
while len(b) == 1:
|
||||
icons_string.write("\\" + str(hex(ord(b)))[1:])
|
||||
with open(fname, "rb") as svgf:
|
||||
b = svgf.read(1)
|
||||
while len(b) == 1:
|
||||
icons_string.write("\\" + str(hex(ord(b)))[1:])
|
||||
b = svgf.read(1)
|
||||
|
||||
icons_string.write('"')
|
||||
if fname != svg_icons[-1]:
|
||||
icons_string.write(",")
|
||||
icons_string.write("\n")
|
||||
icons_string.write('"')
|
||||
if fname != svg_icons[-1]:
|
||||
icons_string.write(",")
|
||||
icons_string.write("\n")
|
||||
|
||||
s = StringIO()
|
||||
s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n\n")
|
||||
s.write('#include "modules/modules_enabled.gen.h"\n\n')
|
||||
s.write("#ifndef _DEFAULT_THEME_ICONS_H\n")
|
||||
s.write("#define _DEFAULT_THEME_ICONS_H\n")
|
||||
s.write("static const int default_theme_icons_count = {};\n\n".format(len(svg_icons)))
|
||||
s.write("#ifdef MODULE_SVG_ENABLED\n")
|
||||
s.write("static const char *default_theme_icons_sources[] = {\n")
|
||||
s.write(icons_string.getvalue())
|
||||
s.write("};\n")
|
||||
s.write("#endif // MODULE_SVG_ENABLED\n\n")
|
||||
s.write("static const char *default_theme_icons_names[] = {\n")
|
||||
s.write("/* THIS FILE IS GENERATED DO NOT EDIT */\n\n")
|
||||
s.write('#include "modules/modules_enabled.gen.h"\n\n')
|
||||
s.write("#ifndef _DEFAULT_THEME_ICONS_H\n")
|
||||
s.write("#define _DEFAULT_THEME_ICONS_H\n")
|
||||
s.write("static const int default_theme_icons_count = {};\n\n".format(len(svg_icons)))
|
||||
s.write("#ifdef MODULE_SVG_ENABLED\n")
|
||||
s.write("static const char *default_theme_icons_sources[] = {\n")
|
||||
s.write(icons_string.getvalue())
|
||||
s.write("};\n")
|
||||
s.write("#endif // MODULE_SVG_ENABLED\n\n")
|
||||
s.write("static const char *default_theme_icons_names[] = {\n")
|
||||
|
||||
index = 0
|
||||
for f in svg_icons:
|
||||
fname = str(f)
|
||||
index = 0
|
||||
for f in svg_icons:
|
||||
fname = str(f)
|
||||
|
||||
# Trim the `.svg` extension from the string.
|
||||
icon_name = os.path.basename(fname)[:-4]
|
||||
# Trim the `.svg` extension from the string.
|
||||
icon_name = os.path.basename(fname)[:-4]
|
||||
|
||||
s.write('\t"{0}"'.format(icon_name))
|
||||
s.write('\t"{0}"'.format(icon_name))
|
||||
|
||||
if fname != svg_icons[-1]:
|
||||
s.write(",")
|
||||
s.write("\n")
|
||||
if fname != svg_icons[-1]:
|
||||
s.write(",")
|
||||
s.write("\n")
|
||||
|
||||
index += 1
|
||||
index += 1
|
||||
|
||||
s.write("};\n")
|
||||
s.write("};\n")
|
||||
|
||||
s.write("#endif\n")
|
||||
s.write("#endif\n")
|
||||
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(s.getvalue())
|
||||
|
||||
s.close()
|
||||
icons_string.close()
|
||||
with open(dst, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(s.getvalue())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Loading…
Reference in a new issue