2014-02-10 02:10:30 +01:00
|
|
|
import os
|
2018-03-17 23:23:55 +01:00
|
|
|
import re
|
2022-09-05 17:14:21 +02:00
|
|
|
import sys
|
2018-03-17 23:23:55 +01:00
|
|
|
import glob
|
|
|
|
import subprocess
|
2020-05-28 15:48:19 +02:00
|
|
|
from collections import OrderedDict
|
2021-10-24 23:20:28 +02:00
|
|
|
from collections.abc import Mapping
|
2021-10-03 23:31:19 +02:00
|
|
|
from typing import Iterator
|
2021-10-24 23:20:28 +02:00
|
|
|
from compat import iteritems, isbasestring, open_utf8, decode_utf8, qualname
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2020-09-19 16:39:11 +02:00
|
|
|
from SCons import Node
|
2021-02-24 09:56:40 +01:00
|
|
|
from SCons.Script import ARGUMENTS
|
2020-09-19 16:39:11 +02:00
|
|
|
from SCons.Script import Glob
|
2021-02-24 09:56:40 +01:00
|
|
|
from SCons.Variables.BoolVariable import _text2bool
|
2020-09-19 16:39:11 +02:00
|
|
|
|
2016-10-30 19:05:14 +01:00
|
|
|
|
2021-10-15 21:59:11 +02:00
|
|
|
def add_source_files(self, sources, files):
|
2019-07-22 13:57:39 +02:00
|
|
|
# Convert string to list of absolute paths (including expanding wildcard)
|
|
|
|
if isbasestring(files):
|
|
|
|
# Keep SCons project-absolute path as they are (no wildcard support)
|
2020-03-30 08:28:32 +02:00
|
|
|
if files.startswith("#"):
|
|
|
|
if "*" in files:
|
2019-07-22 13:57:39 +02:00
|
|
|
print("ERROR: Wildcards can't be expanded in SCons project-absolute path: '{}'".format(files))
|
|
|
|
return
|
|
|
|
files = [files]
|
|
|
|
else:
|
2021-10-15 21:59:11 +02:00
|
|
|
# Exclude .gen.cpp files from globbing, to avoid including obsolete ones.
|
|
|
|
# They should instead be added manually.
|
|
|
|
skip_gen_cpp = "*" in files
|
2020-03-30 08:28:32 +02:00
|
|
|
dir_path = self.Dir(".").abspath
|
2019-07-22 13:57:39 +02:00
|
|
|
files = sorted(glob.glob(dir_path + "/" + files))
|
2021-10-15 21:59:11 +02:00
|
|
|
if skip_gen_cpp:
|
|
|
|
files = [f for f in files if not f.endswith(".gen.cpp")]
|
2019-07-22 13:57:39 +02:00
|
|
|
|
|
|
|
# Add each path as compiled Object following environment (self) configuration
|
|
|
|
for path in files:
|
|
|
|
obj = self.Object(path)
|
|
|
|
if obj in sources:
|
2021-10-15 21:59:11 +02:00
|
|
|
print('WARNING: Object "{}" already included in environment sources.'.format(obj))
|
|
|
|
continue
|
2019-07-22 13:57:39 +02:00
|
|
|
sources.append(obj)
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
2018-09-28 13:29:52 +02:00
|
|
|
def disable_warnings(self):
|
|
|
|
# 'self' is the environment
|
|
|
|
if self.msvc:
|
2018-10-03 13:38:09 +02:00
|
|
|
# We have to remove existing warning level defines before appending /w,
|
|
|
|
# otherwise we get: "warning D9025 : overriding '/W3' with '/w'"
|
2022-09-27 12:35:12 +02:00
|
|
|
self["CCFLAGS"] = [x for x in self["CCFLAGS"] if not (x.startswith("/W") or x.startswith("/w"))]
|
|
|
|
self["CFLAGS"] = [x for x in self["CFLAGS"] if not (x.startswith("/W") or x.startswith("/w"))]
|
|
|
|
self["CXXFLAGS"] = [x for x in self["CXXFLAGS"] if not (x.startswith("/W") or x.startswith("/w"))]
|
|
|
|
self.AppendUnique(CCFLAGS=["/w"])
|
2018-09-28 13:29:52 +02:00
|
|
|
else:
|
2022-09-27 12:35:12 +02:00
|
|
|
self.AppendUnique(CCFLAGS=["-w"])
|
2018-09-28 13:29:52 +02:00
|
|
|
|
|
|
|
|
2021-09-28 11:59:56 +02:00
|
|
|
def force_optimization_on_debug(self):
|
|
|
|
# 'self' is the environment
|
|
|
|
if self["target"] != "debug":
|
|
|
|
return
|
|
|
|
|
|
|
|
if self.msvc:
|
2022-09-27 12:35:12 +02:00
|
|
|
# We have to remove existing optimization level defines before appending /O2,
|
|
|
|
# otherwise we get: "warning D9025 : overriding '/0d' with '/02'"
|
|
|
|
self["CCFLAGS"] = [x for x in self["CCFLAGS"] if not x.startswith("/O")]
|
|
|
|
self["CFLAGS"] = [x for x in self["CFLAGS"] if not x.startswith("/O")]
|
|
|
|
self["CXXFLAGS"] = [x for x in self["CXXFLAGS"] if not x.startswith("/O")]
|
|
|
|
self.AppendUnique(CCFLAGS=["/O2"])
|
2021-09-28 11:59:56 +02:00
|
|
|
else:
|
2022-09-27 12:35:12 +02:00
|
|
|
self.AppendUnique(CCFLAGS=["-O3"])
|
2021-09-28 11:59:56 +02:00
|
|
|
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
def add_module_version_string(self, s):
|
2017-11-19 21:18:01 +01:00
|
|
|
self.module_version_string += "." + s
|
2017-11-02 03:12:28 +01:00
|
|
|
|
2018-03-17 23:23:55 +01:00
|
|
|
|
2022-09-14 11:28:58 +02:00
|
|
|
def get_version_info(module_version_string="", silent=False):
|
2017-11-19 21:26:05 +01:00
|
|
|
build_name = "custom_build"
|
2018-03-17 23:23:55 +01:00
|
|
|
if os.getenv("BUILD_NAME") != None:
|
2021-07-29 11:19:18 +02:00
|
|
|
build_name = str(os.getenv("BUILD_NAME"))
|
2022-09-14 11:28:58 +02:00
|
|
|
if not silent:
|
|
|
|
print("Using custom build name: '{}'.".format(build_name))
|
2016-04-02 20:26:12 +02:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
import version
|
2016-04-02 20:26:12 +02:00
|
|
|
|
2022-09-14 11:28:58 +02:00
|
|
|
version_info = {
|
|
|
|
"short_name": str(version.short_name),
|
|
|
|
"name": str(version.name),
|
|
|
|
"major": int(version.major),
|
|
|
|
"minor": int(version.minor),
|
|
|
|
"patch": int(version.patch),
|
|
|
|
"status": str(version.status),
|
|
|
|
"build": str(build_name),
|
|
|
|
"module_config": str(version.module_config) + module_version_string,
|
|
|
|
"year": int(version.year),
|
|
|
|
"website": str(version.website),
|
|
|
|
"docs_branch": str(version.docs),
|
|
|
|
}
|
|
|
|
|
2021-07-29 11:19:18 +02:00
|
|
|
# For dev snapshots (alpha, beta, RC, etc.) we do not commit status change to Git,
|
|
|
|
# so this define provides a way to override it without having to modify the source.
|
|
|
|
if os.getenv("GODOT_VERSION_STATUS") != None:
|
2022-09-14 11:28:58 +02:00
|
|
|
version_info["status"] = str(os.getenv("GODOT_VERSION_STATUS"))
|
|
|
|
if not silent:
|
|
|
|
print(
|
2022-11-03 09:15:56 +01:00
|
|
|
"Using version status '{}', overriding the original '{}'.".format(
|
|
|
|
version_info["status"], version.status
|
|
|
|
)
|
2022-09-14 11:28:58 +02:00
|
|
|
)
|
2017-07-10 10:47:38 +02:00
|
|
|
|
2022-09-14 11:28:58 +02:00
|
|
|
# Parse Git hash if we're in a Git repo.
|
2017-07-10 10:47:38 +02:00
|
|
|
githash = ""
|
2019-02-08 19:17:07 +01:00
|
|
|
gitfolder = ".git"
|
|
|
|
|
|
|
|
if os.path.isfile(".git"):
|
|
|
|
module_folder = open(".git", "r").readline().strip()
|
|
|
|
if module_folder.startswith("gitdir: "):
|
|
|
|
gitfolder = module_folder[8:]
|
|
|
|
|
|
|
|
if os.path.isfile(os.path.join(gitfolder, "HEAD")):
|
2020-09-07 20:28:58 +02:00
|
|
|
head = open_utf8(os.path.join(gitfolder, "HEAD"), "r").readline().strip()
|
2017-07-10 10:47:38 +02:00
|
|
|
if head.startswith("ref: "):
|
2022-07-25 04:09:54 +02:00
|
|
|
ref = head[5:]
|
2023-01-09 17:36:22 +01:00
|
|
|
# If this directory is a Git worktree instead of a root clone.
|
|
|
|
parts = gitfolder.split("/")
|
|
|
|
if len(parts) > 2 and parts[-2] == "worktrees":
|
|
|
|
gitfolder = "/".join(parts[0:-2])
|
2022-07-25 04:09:54 +02:00
|
|
|
head = os.path.join(gitfolder, ref)
|
|
|
|
packedrefs = os.path.join(gitfolder, "packed-refs")
|
2017-07-10 10:47:38 +02:00
|
|
|
if os.path.isfile(head):
|
2017-08-26 18:53:49 +02:00
|
|
|
githash = open(head, "r").readline().strip()
|
2022-07-25 04:09:54 +02:00
|
|
|
elif os.path.isfile(packedrefs):
|
|
|
|
# Git may pack refs into a single file. This code searches .git/packed-refs file for the current ref's hash.
|
|
|
|
# https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-pack-refs.html
|
|
|
|
for line in open(packedrefs, "r").read().splitlines():
|
|
|
|
if line.startswith("#"):
|
|
|
|
continue
|
|
|
|
(line_hash, line_ref) = line.split(" ")
|
|
|
|
if ref == line_ref:
|
|
|
|
githash = line_hash
|
|
|
|
break
|
2017-07-10 10:47:38 +02:00
|
|
|
else:
|
|
|
|
githash = head
|
2019-02-08 19:17:07 +01:00
|
|
|
|
2022-09-14 11:28:58 +02:00
|
|
|
version_info["git_hash"] = githash
|
|
|
|
|
|
|
|
return version_info
|
|
|
|
|
|
|
|
|
|
|
|
def generate_version_header(module_version_string=""):
|
|
|
|
version_info = get_version_info(module_version_string)
|
|
|
|
|
|
|
|
# NOTE: It is safe to generate these files here, since this is still executed serially.
|
|
|
|
|
|
|
|
f = open("core/version_generated.gen.h", "w")
|
|
|
|
f.write(
|
|
|
|
"""/* THIS FILE IS GENERATED DO NOT EDIT */
|
|
|
|
#ifndef VERSION_GENERATED_GEN_H
|
|
|
|
#define VERSION_GENERATED_GEN_H
|
|
|
|
#define VERSION_SHORT_NAME "{short_name}"
|
|
|
|
#define VERSION_NAME "{name}"
|
|
|
|
#define VERSION_MAJOR {major}
|
|
|
|
#define VERSION_MINOR {minor}
|
|
|
|
#define VERSION_PATCH {patch}
|
|
|
|
#define VERSION_STATUS "{status}"
|
|
|
|
#define VERSION_BUILD "{build}"
|
|
|
|
#define VERSION_MODULE_CONFIG "{module_config}"
|
|
|
|
#define VERSION_YEAR {year}
|
|
|
|
#define VERSION_WEBSITE "{website}"
|
|
|
|
#define VERSION_DOCS_BRANCH "{docs_branch}"
|
|
|
|
#define VERSION_DOCS_URL "https://docs.godotengine.org/en/" VERSION_DOCS_BRANCH
|
|
|
|
#endif // VERSION_GENERATED_GEN_H
|
|
|
|
""".format(
|
|
|
|
**version_info
|
|
|
|
)
|
|
|
|
)
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
fhash = open("core/version_hash.gen.cpp", "w")
|
|
|
|
fhash.write(
|
|
|
|
"""/* THIS FILE IS GENERATED DO NOT EDIT */
|
|
|
|
#include "core/version.h"
|
|
|
|
const char *const VERSION_HASH = "{git_hash}";
|
|
|
|
""".format(
|
|
|
|
**version_info
|
|
|
|
)
|
|
|
|
)
|
2017-07-10 10:47:38 +02:00
|
|
|
fhash.close()
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-10-30 19:05:14 +01:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
def parse_cg_file(fname, uniforms, sizes, conditionals):
|
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
fs = open(fname, "r")
|
2016-10-30 18:57:40 +01:00
|
|
|
line = fs.readline()
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
while line:
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
if re.match(r"^\s*uniform", line):
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
res = re.match(r"uniform ([\d\w]*) ([\d\w]*)")
|
|
|
|
type = res.groups(1)
|
|
|
|
name = res.groups(2)
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-11-01 00:24:30 +01:00
|
|
|
uniforms.append(name)
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2018-03-17 23:23:55 +01:00
|
|
|
if type.find("texobj") != -1:
|
2016-11-01 00:24:30 +01:00
|
|
|
sizes.append(1)
|
2016-10-30 18:44:57 +01:00
|
|
|
else:
|
2016-11-01 00:24:30 +01:00
|
|
|
t = re.match(r"float(\d)x(\d)", type)
|
2016-10-30 18:44:57 +01:00
|
|
|
if t:
|
|
|
|
sizes.append(int(t.groups(1)) * int(t.groups(2)))
|
|
|
|
else:
|
2016-11-01 00:24:30 +01:00
|
|
|
t = re.match(r"float(\d)", type)
|
2016-10-30 18:44:57 +01:00
|
|
|
sizes.append(int(t.groups(1)))
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
if line.find("[branch]") != -1:
|
2016-11-01 00:24:30 +01:00
|
|
|
conditionals.append(name)
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-11-01 00:24:30 +01:00
|
|
|
line = fs.readline()
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2018-03-10 18:37:33 +01:00
|
|
|
fs.close()
|
|
|
|
|
|
|
|
|
2021-02-24 09:56:40 +01:00
|
|
|
def get_cmdline_bool(option, default):
|
|
|
|
"""We use `ARGUMENTS.get()` to check if options were manually overridden on the command line,
|
|
|
|
and SCons' _text2bool helper to convert them to booleans, otherwise they're handled as strings.
|
|
|
|
"""
|
|
|
|
cmdline_val = ARGUMENTS.get(option)
|
|
|
|
if cmdline_val is not None:
|
|
|
|
return _text2bool(cmdline_val)
|
|
|
|
else:
|
|
|
|
return default
|
|
|
|
|
|
|
|
|
2020-10-23 21:28:21 +02:00
|
|
|
def detect_modules(search_path, recursive=False):
|
|
|
|
"""Detects and collects a list of C++ modules at specified path
|
|
|
|
|
|
|
|
`search_path` - a directory path containing modules. The path may point to
|
|
|
|
a single module, which may have other nested modules. A module must have
|
|
|
|
"register_types.h", "SCsub", "config.py" files created to be detected.
|
|
|
|
|
|
|
|
`recursive` - if `True`, then all subdirectories are searched for modules as
|
|
|
|
specified by the `search_path`, otherwise collects all modules under the
|
|
|
|
`search_path` directory. If the `search_path` is a module, it is collected
|
|
|
|
in all cases.
|
|
|
|
|
|
|
|
Returns an `OrderedDict` with module names as keys, and directory paths as
|
|
|
|
values. If a path is relative, then it is a built-in module. If a path is
|
|
|
|
absolute, then it is a custom module collected outside of the engine source.
|
|
|
|
"""
|
|
|
|
modules = OrderedDict()
|
|
|
|
|
|
|
|
def add_module(path):
|
|
|
|
module_name = os.path.basename(path)
|
|
|
|
module_path = path.replace("\\", "/") # win32
|
|
|
|
modules[module_name] = module_path
|
|
|
|
|
|
|
|
def is_engine(path):
|
|
|
|
# Prevent recursively detecting modules in self and other
|
|
|
|
# Godot sources when using `custom_modules` build option.
|
|
|
|
version_path = os.path.join(path, "version.py")
|
|
|
|
if os.path.exists(version_path):
|
|
|
|
with open(version_path) as f:
|
2021-02-11 17:54:35 +01:00
|
|
|
if 'short_name = "godot"' in f.read():
|
2020-10-23 21:28:21 +02:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get_files(path):
|
|
|
|
files = glob.glob(os.path.join(path, "*"))
|
|
|
|
# Sort so that `register_module_types` does not change that often,
|
|
|
|
# and plugins are registered in alphabetic order as well.
|
|
|
|
files.sort()
|
|
|
|
return files
|
|
|
|
|
|
|
|
if not recursive:
|
|
|
|
if is_module(search_path):
|
|
|
|
add_module(search_path)
|
|
|
|
for path in get_files(search_path):
|
|
|
|
if is_engine(path):
|
|
|
|
continue
|
|
|
|
if is_module(path):
|
|
|
|
add_module(path)
|
|
|
|
else:
|
|
|
|
to_search = [search_path]
|
|
|
|
while to_search:
|
|
|
|
path = to_search.pop()
|
|
|
|
if is_module(path):
|
|
|
|
add_module(path)
|
|
|
|
for child in get_files(path):
|
|
|
|
if not os.path.isdir(child):
|
|
|
|
continue
|
|
|
|
if is_engine(child):
|
|
|
|
continue
|
|
|
|
to_search.insert(0, child)
|
|
|
|
return modules
|
2020-04-04 13:46:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
def is_module(path):
|
2020-10-23 21:28:21 +02:00
|
|
|
if not os.path.isdir(path):
|
|
|
|
return False
|
|
|
|
must_exist = ["register_types.h", "SCsub", "config.py"]
|
|
|
|
for f in must_exist:
|
|
|
|
if not os.path.exists(os.path.join(path, f)):
|
|
|
|
return False
|
|
|
|
return True
|
2020-04-04 13:46:15 +02:00
|
|
|
|
|
|
|
|
2020-10-23 21:28:21 +02:00
|
|
|
def write_modules(modules):
|
2016-10-30 18:57:40 +01:00
|
|
|
includes_cpp = ""
|
|
|
|
register_cpp = ""
|
|
|
|
unregister_cpp = ""
|
2016-10-30 18:44:57 +01:00
|
|
|
|
2020-10-23 21:28:21 +02:00
|
|
|
for name, path in modules.items():
|
2016-10-30 18:44:57 +01:00
|
|
|
try:
|
2020-04-04 13:46:15 +02:00
|
|
|
with open(os.path.join(path, "register_types.h")):
|
|
|
|
includes_cpp += '#include "' + path + '/register_types.h"\n'
|
2020-03-30 08:28:32 +02:00
|
|
|
register_cpp += "#ifdef MODULE_" + name.upper() + "_ENABLED\n"
|
|
|
|
register_cpp += "\tregister_" + name + "_types();\n"
|
|
|
|
register_cpp += "#endif\n"
|
|
|
|
unregister_cpp += "#ifdef MODULE_" + name.upper() + "_ENABLED\n"
|
|
|
|
unregister_cpp += "\tunregister_" + name + "_types();\n"
|
|
|
|
unregister_cpp += "#endif\n"
|
2016-10-30 18:44:57 +01:00
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
|
2020-02-06 17:28:32 +01:00
|
|
|
modules_cpp = """// register_module_types.gen.cpp
|
|
|
|
/* THIS FILE IS GENERATED DO NOT EDIT */
|
2014-02-10 02:10:30 +01:00
|
|
|
#include "register_module_types.h"
|
|
|
|
|
2020-02-06 17:28:32 +01:00
|
|
|
#include "modules/modules_enabled.gen.h"
|
|
|
|
|
|
|
|
%s
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
void register_module_types() {
|
2020-02-06 17:28:32 +01:00
|
|
|
%s
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void unregister_module_types() {
|
2020-02-06 17:28:32 +01:00
|
|
|
%s
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
2020-02-06 17:28:32 +01:00
|
|
|
""" % (
|
|
|
|
includes_cpp,
|
|
|
|
register_cpp,
|
|
|
|
unregister_cpp,
|
2020-03-30 08:28:32 +02:00
|
|
|
)
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2018-03-17 23:23:55 +01:00
|
|
|
# NOTE: It is safe to generate this file here, since this is still executed serially
|
2018-03-10 18:37:33 +01:00
|
|
|
with open("modules/register_module_types.gen.cpp", "w") as f:
|
|
|
|
f.write(modules_cpp)
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2020-04-04 13:46:15 +02:00
|
|
|
|
|
|
|
def convert_custom_modules_path(path):
|
|
|
|
if not path:
|
|
|
|
return path
|
2020-07-10 14:57:53 +02:00
|
|
|
path = os.path.realpath(os.path.expanduser(os.path.expandvars(path)))
|
2020-04-04 13:46:15 +02:00
|
|
|
err_msg = "Build option 'custom_modules' must %s"
|
|
|
|
if not os.path.isdir(path):
|
|
|
|
raise ValueError(err_msg % "point to an existing directory.")
|
2020-07-10 14:57:53 +02:00
|
|
|
if path == os.path.realpath("modules"):
|
2020-04-04 13:46:15 +02:00
|
|
|
raise ValueError(err_msg % "be a directory other than built-in `modules` directory.")
|
2020-07-10 14:57:53 +02:00
|
|
|
return path
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
|
|
|
def disable_module(self):
|
2016-10-30 18:44:57 +01:00
|
|
|
self.disabled_modules.append(self.current_module)
|
2016-01-25 04:21:04 +01:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2016-06-14 16:27:16 +02:00
|
|
|
def use_windows_spawn_fix(self, platform=None):
|
2016-01-25 04:21:04 +01:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
if os.name != "nt":
|
2016-10-30 18:57:40 +01:00
|
|
|
return # not needed, only for windows
|
2016-01-25 04:21:04 +01:00
|
|
|
|
2016-10-29 03:34:53 +02:00
|
|
|
# On Windows, due to the limited command line length, when creating a static library
|
|
|
|
# from a very high number of objects SCons will invoke "ar" once per object file;
|
|
|
|
# that makes object files with same names to be overwritten so the last wins and
|
|
|
|
# the library looses symbols defined by overwritten objects.
|
|
|
|
# By enabling quick append instead of the default mode (replacing), libraries will
|
2018-02-21 17:30:55 +01:00
|
|
|
# got built correctly regardless the invocation strategy.
|
2016-10-29 03:34:53 +02:00
|
|
|
# Furthermore, since SCons will rebuild the library from scratch when an object file
|
|
|
|
# changes, no multiple versions of the same object file will be present.
|
2020-03-30 08:28:32 +02:00
|
|
|
self.Replace(ARFLAGS="q")
|
2016-01-25 04:21:04 +01:00
|
|
|
|
2016-10-30 18:57:40 +01:00
|
|
|
def mySubProcess(cmdline, env):
|
2016-10-30 18:44:57 +01:00
|
|
|
|
|
|
|
startupinfo = subprocess.STARTUPINFO()
|
|
|
|
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
2022-08-31 21:36:54 +02:00
|
|
|
popen_args = {
|
|
|
|
"stdin": subprocess.PIPE,
|
|
|
|
"stdout": subprocess.PIPE,
|
|
|
|
"stderr": subprocess.PIPE,
|
|
|
|
"startupinfo": startupinfo,
|
|
|
|
"shell": False,
|
|
|
|
"env": env,
|
|
|
|
}
|
|
|
|
if sys.version_info >= (3, 7, 0):
|
|
|
|
popen_args["text"] = True
|
|
|
|
proc = subprocess.Popen(cmdline, **popen_args)
|
2019-06-29 03:19:49 +02:00
|
|
|
_, err = proc.communicate()
|
2016-10-30 18:44:57 +01:00
|
|
|
rv = proc.wait()
|
|
|
|
if rv:
|
2017-08-26 18:53:49 +02:00
|
|
|
print("=====")
|
|
|
|
print(err)
|
|
|
|
print("=====")
|
2016-10-30 18:44:57 +01:00
|
|
|
return rv
|
2016-01-25 04:21:04 +01:00
|
|
|
|
|
|
|
def mySpawn(sh, escape, cmd, args, env):
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
newargs = " ".join(args[1:])
|
2016-10-30 18:44:57 +01:00
|
|
|
cmdline = cmd + " " + newargs
|
2016-01-25 04:21:04 +01:00
|
|
|
|
2016-10-30 18:57:40 +01:00
|
|
|
rv = 0
|
2017-08-26 18:53:49 +02:00
|
|
|
env = {str(key): str(value) for key, value in iteritems(env)}
|
2016-10-30 18:57:40 +01:00
|
|
|
if len(cmdline) > 32000 and cmd.endswith("ar"):
|
2016-10-30 18:44:57 +01:00
|
|
|
cmdline = cmd + " " + args[1] + " " + args[2] + " "
|
2016-10-30 18:57:40 +01:00
|
|
|
for i in range(3, len(args)):
|
|
|
|
rv = mySubProcess(cmdline + args[i], env)
|
|
|
|
if rv:
|
2016-10-30 18:44:57 +01:00
|
|
|
break
|
|
|
|
else:
|
2016-10-30 18:57:40 +01:00
|
|
|
rv = mySubProcess(cmdline, env)
|
2016-01-25 04:21:04 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
return rv
|
2016-01-25 04:21:04 +01:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
self["SPAWN"] = mySpawn
|
2016-01-25 04:21:04 +01:00
|
|
|
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
def split_lib(self, libname, src_list=None, env_lib=None):
|
2016-10-30 18:44:57 +01:00
|
|
|
env = self
|
|
|
|
|
|
|
|
num = 0
|
|
|
|
cur_base = ""
|
|
|
|
max_src = 64
|
|
|
|
list = []
|
|
|
|
lib_list = []
|
|
|
|
|
2018-10-27 01:18:15 +02:00
|
|
|
if src_list is None:
|
2018-01-24 23:06:35 +01:00
|
|
|
src_list = getattr(env, libname + "_sources")
|
2018-01-25 06:45:18 +01:00
|
|
|
|
|
|
|
if type(env_lib) == type(None):
|
|
|
|
env_lib = env
|
2018-01-24 23:06:35 +01:00
|
|
|
|
|
|
|
for f in src_list:
|
2016-10-30 18:44:57 +01:00
|
|
|
fname = ""
|
|
|
|
if type(f) == type(""):
|
|
|
|
fname = env.File(f).path
|
|
|
|
else:
|
|
|
|
fname = env.File(f)[0].path
|
|
|
|
fname = fname.replace("\\", "/")
|
2019-12-09 19:22:08 +01:00
|
|
|
base = "/".join(fname.split("/")[:2])
|
2016-10-30 18:44:57 +01:00
|
|
|
if base != cur_base and len(list) > max_src:
|
|
|
|
if num > 0:
|
2018-01-24 23:06:35 +01:00
|
|
|
lib = env_lib.add_library(libname + str(num), list)
|
2016-10-30 18:44:57 +01:00
|
|
|
lib_list.append(lib)
|
|
|
|
list = []
|
|
|
|
num = num + 1
|
|
|
|
cur_base = base
|
|
|
|
list.append(f)
|
|
|
|
|
2018-01-24 23:06:35 +01:00
|
|
|
lib = env_lib.add_library(libname + str(num), list)
|
2016-10-30 18:44:57 +01:00
|
|
|
lib_list.append(lib)
|
|
|
|
|
|
|
|
lib_base = []
|
2018-01-24 23:06:35 +01:00
|
|
|
env_lib.add_source_files(lib_base, "*.cpp")
|
|
|
|
lib = env_lib.add_library(libname, lib_base)
|
2017-11-27 14:39:05 +01:00
|
|
|
lib_list.insert(0, lib)
|
2016-10-30 18:44:57 +01:00
|
|
|
|
2016-10-30 18:57:40 +01:00
|
|
|
env.Prepend(LIBS=lib_list)
|
2016-10-30 17:04:07 +01:00
|
|
|
|
2019-12-09 19:22:08 +01:00
|
|
|
# When we split modules into arbitrary chunks, we end up with linking issues
|
|
|
|
# due to symbol dependencies split over several libs, which may not be linked
|
|
|
|
# in the required order. We use --start-group and --end-group to tell the
|
|
|
|
# linker that those archives should be searched repeatedly to resolve all
|
|
|
|
# undefined references.
|
|
|
|
# As SCons doesn't give us much control over how inserting libs in LIBS
|
|
|
|
# impacts the linker call, we need to hack our way into the linking commands
|
|
|
|
# LINKCOM and SHLINKCOM to set those flags.
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
if "-Wl,--start-group" in env["LINKCOM"] and "-Wl,--start-group" in env["SHLINKCOM"]:
|
2019-12-09 19:22:08 +01:00
|
|
|
# Already added by a previous call, skip.
|
|
|
|
return
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
env["LINKCOM"] = str(env["LINKCOM"]).replace("$_LIBFLAGS", "-Wl,--start-group $_LIBFLAGS -Wl,--end-group")
|
|
|
|
env["SHLINKCOM"] = str(env["LINKCOM"]).replace("$_LIBFLAGS", "-Wl,--start-group $_LIBFLAGS -Wl,--end-group")
|
2019-12-09 19:22:08 +01:00
|
|
|
|
2016-10-30 17:04:07 +01:00
|
|
|
|
2016-10-30 18:57:40 +01:00
|
|
|
def save_active_platforms(apnames, ap):
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
for x in ap:
|
2020-03-30 08:28:32 +02:00
|
|
|
names = ["logo"]
|
2017-05-25 20:57:13 +02:00
|
|
|
if os.path.isfile(x + "/run_icon.png"):
|
2020-03-30 08:28:32 +02:00
|
|
|
names.append("run_icon")
|
2016-10-30 18:57:40 +01:00
|
|
|
|
2017-05-25 20:57:13 +02:00
|
|
|
for name in names:
|
|
|
|
pngf = open(x + "/" + name + ".png", "rb")
|
|
|
|
b = pngf.read(1)
|
|
|
|
str = " /* AUTOGENERATED FILE, DO NOT EDIT */ \n"
|
|
|
|
str += " static const unsigned char _" + x[9:] + "_" + name + "[]={"
|
2018-03-17 23:23:55 +01:00
|
|
|
while len(b) == 1:
|
2017-05-25 20:57:13 +02:00
|
|
|
str += hex(ord(b))
|
|
|
|
b = pngf.read(1)
|
2020-03-30 08:28:32 +02:00
|
|
|
if len(b) == 1:
|
2017-05-25 20:57:13 +02:00
|
|
|
str += ","
|
|
|
|
|
|
|
|
str += "};\n"
|
|
|
|
|
2018-03-10 18:37:33 +01:00
|
|
|
pngf.close()
|
|
|
|
|
2018-03-17 23:23:55 +01:00
|
|
|
# NOTE: It is safe to generate this file here, since this is still executed serially
|
2017-05-25 20:57:13 +02:00
|
|
|
wf = x + "/" + name + ".gen.h"
|
2018-03-10 18:37:33 +01:00
|
|
|
with open(wf, "w") as pngw:
|
|
|
|
pngw.write(str)
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2015-01-12 05:54:17 +01:00
|
|
|
|
2016-10-30 18:57:40 +01:00
|
|
|
def no_verbose(sys, env):
|
2015-01-12 05:54:17 +01:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
colors = {}
|
2018-01-13 17:56:41 +01:00
|
|
|
|
|
|
|
# Colors are disabled in non-TTY environments such as pipes. This means
|
|
|
|
# that if output is redirected to a file, it will not contain color codes
|
|
|
|
if sys.stdout.isatty():
|
2020-03-30 08:28:32 +02:00
|
|
|
colors["cyan"] = "\033[96m"
|
|
|
|
colors["purple"] = "\033[95m"
|
|
|
|
colors["blue"] = "\033[94m"
|
|
|
|
colors["green"] = "\033[92m"
|
|
|
|
colors["yellow"] = "\033[93m"
|
|
|
|
colors["red"] = "\033[91m"
|
|
|
|
colors["end"] = "\033[0m"
|
2018-01-13 17:56:41 +01:00
|
|
|
else:
|
2020-03-30 08:28:32 +02:00
|
|
|
colors["cyan"] = ""
|
|
|
|
colors["purple"] = ""
|
|
|
|
colors["blue"] = ""
|
|
|
|
colors["green"] = ""
|
|
|
|
colors["yellow"] = ""
|
|
|
|
colors["red"] = ""
|
|
|
|
colors["end"] = ""
|
|
|
|
|
|
|
|
compile_source_message = "%sCompiling %s==> %s$SOURCE%s" % (
|
|
|
|
colors["blue"],
|
|
|
|
colors["purple"],
|
|
|
|
colors["yellow"],
|
|
|
|
colors["end"],
|
|
|
|
)
|
|
|
|
java_compile_source_message = "%sCompiling %s==> %s$SOURCE%s" % (
|
|
|
|
colors["blue"],
|
|
|
|
colors["purple"],
|
|
|
|
colors["yellow"],
|
|
|
|
colors["end"],
|
|
|
|
)
|
|
|
|
compile_shared_source_message = "%sCompiling shared %s==> %s$SOURCE%s" % (
|
|
|
|
colors["blue"],
|
|
|
|
colors["purple"],
|
|
|
|
colors["yellow"],
|
|
|
|
colors["end"],
|
|
|
|
)
|
|
|
|
link_program_message = "%sLinking Program %s==> %s$TARGET%s" % (
|
|
|
|
colors["red"],
|
|
|
|
colors["purple"],
|
|
|
|
colors["yellow"],
|
|
|
|
colors["end"],
|
|
|
|
)
|
|
|
|
link_library_message = "%sLinking Static Library %s==> %s$TARGET%s" % (
|
|
|
|
colors["red"],
|
|
|
|
colors["purple"],
|
|
|
|
colors["yellow"],
|
|
|
|
colors["end"],
|
|
|
|
)
|
|
|
|
ranlib_library_message = "%sRanlib Library %s==> %s$TARGET%s" % (
|
|
|
|
colors["red"],
|
|
|
|
colors["purple"],
|
|
|
|
colors["yellow"],
|
|
|
|
colors["end"],
|
|
|
|
)
|
|
|
|
link_shared_library_message = "%sLinking Shared Library %s==> %s$TARGET%s" % (
|
|
|
|
colors["red"],
|
|
|
|
colors["purple"],
|
|
|
|
colors["yellow"],
|
|
|
|
colors["end"],
|
|
|
|
)
|
|
|
|
java_library_message = "%sCreating Java Archive %s==> %s$TARGET%s" % (
|
|
|
|
colors["red"],
|
|
|
|
colors["purple"],
|
|
|
|
colors["yellow"],
|
|
|
|
colors["end"],
|
|
|
|
)
|
2016-10-30 18:57:40 +01:00
|
|
|
|
|
|
|
env.Append(CXXCOMSTR=[compile_source_message])
|
|
|
|
env.Append(CCCOMSTR=[compile_source_message])
|
|
|
|
env.Append(SHCCCOMSTR=[compile_shared_source_message])
|
|
|
|
env.Append(SHCXXCOMSTR=[compile_shared_source_message])
|
|
|
|
env.Append(ARCOMSTR=[link_library_message])
|
|
|
|
env.Append(RANLIBCOMSTR=[ranlib_library_message])
|
|
|
|
env.Append(SHLINKCOMSTR=[link_shared_library_message])
|
|
|
|
env.Append(LINKCOMSTR=[link_program_message])
|
|
|
|
env.Append(JARCOMSTR=[java_library_message])
|
|
|
|
env.Append(JAVACCOMSTR=[java_compile_source_message])
|
2015-01-12 05:54:17 +01:00
|
|
|
|
2016-10-30 19:05:14 +01:00
|
|
|
|
2016-09-04 00:25:43 +02:00
|
|
|
def detect_visual_c_compiler_version(tools_env):
|
2016-10-30 18:44:57 +01:00
|
|
|
# tools_env is the variable scons uses to call tools that execute tasks, SCons's env['ENV'] that executes tasks...
|
|
|
|
# (see the SCons documentation for more information on what it does)...
|
2017-03-24 21:45:31 +01:00
|
|
|
# in order for this function to be well encapsulated i choose to force it to receive SCons's TOOLS env (env['ENV']
|
2016-10-30 18:44:57 +01:00
|
|
|
# and not scons setup environment (env)... so make sure you call the right environment on it or it will fail to detect
|
2017-03-24 21:45:31 +01:00
|
|
|
# the proper vc version that will be called
|
2016-10-30 18:44:57 +01:00
|
|
|
|
2018-01-19 04:13:48 +01:00
|
|
|
# There is no flag to give to visual c compilers to set the architecture, ie scons bits argument (32,64,ARM etc)
|
2016-10-30 18:44:57 +01:00
|
|
|
# There are many different cl.exe files that are run, and each one compiles & links to a different architecture
|
|
|
|
# As far as I know, the only way to figure out what compiler will be run when Scons calls cl.exe via Program()
|
2018-02-21 17:30:55 +01:00
|
|
|
# is to check the PATH variable and figure out which one will be called first. Code below does that and returns:
|
2016-10-30 18:44:57 +01:00
|
|
|
# the following string values:
|
|
|
|
|
|
|
|
# "" Compiler not detected
|
|
|
|
# "amd64" Native 64 bit compiler
|
|
|
|
# "amd64_x86" 64 bit Cross Compiler for 32 bit
|
|
|
|
# "x86" Native 32 bit compiler
|
|
|
|
# "x86_amd64" 32 bit Cross Compiler for 64 bit
|
|
|
|
|
|
|
|
# There are other architectures, but Godot does not support them currently, so this function does not detect arm/amd64_arm
|
|
|
|
# and similar architectures/compilers
|
|
|
|
|
|
|
|
# Set chosen compiler to "not detected"
|
|
|
|
vc_chosen_compiler_index = -1
|
|
|
|
vc_chosen_compiler_str = ""
|
|
|
|
|
2017-05-23 13:50:06 +02:00
|
|
|
# Start with Pre VS 2017 checks which uses VCINSTALLDIR:
|
2020-03-30 08:28:32 +02:00
|
|
|
if "VCINSTALLDIR" in tools_env:
|
2017-08-26 18:53:49 +02:00
|
|
|
# print("Checking VCINSTALLDIR")
|
2017-05-23 13:50:06 +02:00
|
|
|
|
2018-02-21 17:30:55 +01:00
|
|
|
# find() works with -1 so big ifs below are needed... the simplest solution, in fact
|
2017-05-23 13:50:06 +02:00
|
|
|
# First test if amd64 and amd64_x86 compilers are present in the path
|
|
|
|
vc_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64;")
|
2020-03-30 08:28:32 +02:00
|
|
|
if vc_amd64_compiler_detection_index > -1:
|
2017-05-23 13:50:06 +02:00
|
|
|
vc_chosen_compiler_index = vc_amd64_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "amd64"
|
|
|
|
|
|
|
|
vc_amd64_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\amd64_x86;")
|
2020-03-30 08:28:32 +02:00
|
|
|
if vc_amd64_x86_compiler_detection_index > -1 and (
|
|
|
|
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index
|
|
|
|
):
|
2017-05-23 13:50:06 +02:00
|
|
|
vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "amd64_x86"
|
|
|
|
|
|
|
|
# Now check the 32 bit compilers
|
|
|
|
vc_x86_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN;")
|
2020-03-30 08:28:32 +02:00
|
|
|
if vc_x86_compiler_detection_index > -1 and (
|
|
|
|
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index
|
|
|
|
):
|
2017-05-23 13:50:06 +02:00
|
|
|
vc_chosen_compiler_index = vc_x86_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "x86"
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
vc_x86_amd64_compiler_detection_index = tools_env["PATH"].find(tools_env["VCINSTALLDIR"] + "BIN\\x86_amd64;")
|
|
|
|
if vc_x86_amd64_compiler_detection_index > -1 and (
|
|
|
|
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index
|
|
|
|
):
|
2017-05-23 13:50:06 +02:00
|
|
|
vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "x86_amd64"
|
|
|
|
|
|
|
|
# and for VS 2017 and newer we check VCTOOLSINSTALLDIR:
|
2020-03-30 08:28:32 +02:00
|
|
|
if "VCTOOLSINSTALLDIR" in tools_env:
|
2017-05-23 13:50:06 +02:00
|
|
|
|
|
|
|
# Newer versions have a different path available
|
2020-03-30 08:28:32 +02:00
|
|
|
vc_amd64_compiler_detection_index = (
|
|
|
|
tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX64\\X64;")
|
|
|
|
)
|
|
|
|
if vc_amd64_compiler_detection_index > -1:
|
2017-05-23 13:50:06 +02:00
|
|
|
vc_chosen_compiler_index = vc_amd64_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "amd64"
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
vc_amd64_x86_compiler_detection_index = (
|
|
|
|
tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX64\\X86;")
|
|
|
|
)
|
|
|
|
if vc_amd64_x86_compiler_detection_index > -1 and (
|
|
|
|
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_amd64_x86_compiler_detection_index
|
|
|
|
):
|
2017-05-23 13:50:06 +02:00
|
|
|
vc_chosen_compiler_index = vc_amd64_x86_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "amd64_x86"
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
vc_x86_compiler_detection_index = (
|
|
|
|
tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX86\\X86;")
|
|
|
|
)
|
|
|
|
if vc_x86_compiler_detection_index > -1 and (
|
|
|
|
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_compiler_detection_index
|
|
|
|
):
|
2017-05-23 13:50:06 +02:00
|
|
|
vc_chosen_compiler_index = vc_x86_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "x86"
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
vc_x86_amd64_compiler_detection_index = (
|
|
|
|
tools_env["PATH"].upper().find(tools_env["VCTOOLSINSTALLDIR"].upper() + "BIN\\HOSTX86\\X64;")
|
|
|
|
)
|
|
|
|
if vc_x86_amd64_compiler_detection_index > -1 and (
|
|
|
|
vc_chosen_compiler_index == -1 or vc_chosen_compiler_index > vc_x86_amd64_compiler_detection_index
|
|
|
|
):
|
2017-05-23 13:50:06 +02:00
|
|
|
vc_chosen_compiler_index = vc_x86_amd64_compiler_detection_index
|
|
|
|
vc_chosen_compiler_str = "x86_amd64"
|
2017-05-19 03:27:17 +02:00
|
|
|
|
2016-10-30 18:44:57 +01:00
|
|
|
return vc_chosen_compiler_str
|
2016-09-04 00:25:43 +02:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2017-08-28 17:17:26 +02:00
|
|
|
def find_visual_c_batch_file(env):
|
2021-10-03 23:31:19 +02:00
|
|
|
from SCons.Tool.MSCommon.vc import (
|
|
|
|
get_default_version,
|
|
|
|
get_host_target,
|
|
|
|
find_batch_file,
|
|
|
|
)
|
2017-08-28 17:17:26 +02:00
|
|
|
|
2022-07-31 13:58:05 +02:00
|
|
|
# Syntax changed in SCons 4.4.0.
|
|
|
|
from SCons import __version__ as scons_raw_version
|
|
|
|
|
|
|
|
scons_ver = env._get_major_minor_revision(scons_raw_version)
|
|
|
|
|
2017-08-28 17:17:26 +02:00
|
|
|
version = get_default_version(env)
|
2022-07-31 13:58:05 +02:00
|
|
|
|
|
|
|
if scons_ver >= (4, 4, 0):
|
|
|
|
(host_platform, target_platform, _) = get_host_target(env, version)
|
|
|
|
else:
|
|
|
|
(host_platform, target_platform, _) = get_host_target(env)
|
|
|
|
|
2017-08-28 17:17:26 +02:00
|
|
|
return find_batch_file(env, version, host_platform, target_platform)[0]
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2017-11-18 18:09:18 +01:00
|
|
|
def generate_cpp_hint_file(filename):
|
|
|
|
if os.path.isfile(filename):
|
|
|
|
# Don't overwrite an existing hint file since the user may have customized it.
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
try:
|
2018-03-10 18:37:33 +01:00
|
|
|
with open(filename, "w") as fd:
|
|
|
|
fd.write("#define GDCLASS(m_class, m_inherits)\n")
|
2017-11-18 18:09:18 +01:00
|
|
|
except IOError:
|
|
|
|
print("Could not write cpp.hint file.")
|
2017-08-28 17:17:26 +02:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2020-09-19 16:39:11 +02:00
|
|
|
def glob_recursive(pattern, node="."):
|
|
|
|
results = []
|
|
|
|
for f in Glob(str(node) + "/*", source=True):
|
|
|
|
if type(f) is Node.FS.Dir:
|
|
|
|
results += glob_recursive(pattern, f)
|
|
|
|
results += Glob(str(node) + "/" + pattern, source=True)
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def add_to_vs_project(env, sources):
|
|
|
|
for x in sources:
|
|
|
|
if type(x) == type(""):
|
|
|
|
fname = env.File(x).path
|
|
|
|
else:
|
|
|
|
fname = env.File(x)[0].path
|
|
|
|
pieces = fname.split(".")
|
|
|
|
if len(pieces) > 0:
|
|
|
|
basename = pieces[0]
|
|
|
|
basename = basename.replace("\\\\", "/")
|
|
|
|
if os.path.isfile(basename + ".h"):
|
|
|
|
env.vs_incs += [basename + ".h"]
|
|
|
|
elif os.path.isfile(basename + ".hpp"):
|
|
|
|
env.vs_incs += [basename + ".hpp"]
|
|
|
|
if os.path.isfile(basename + ".c"):
|
|
|
|
env.vs_srcs += [basename + ".c"]
|
|
|
|
elif os.path.isfile(basename + ".cpp"):
|
|
|
|
env.vs_srcs += [basename + ".cpp"]
|
|
|
|
|
|
|
|
|
2017-08-28 17:17:26 +02:00
|
|
|
def generate_vs_project(env, num_jobs):
|
|
|
|
batch_file = find_visual_c_batch_file(env)
|
|
|
|
if batch_file:
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2021-10-24 23:20:28 +02:00
|
|
|
class ModuleConfigs(Mapping):
|
2021-10-03 23:31:19 +02:00
|
|
|
# This version information (Win32, x64, Debug, Release, Release_Debug seems to be
|
|
|
|
# required for Visual Studio to understand that it needs to generate an NMAKE
|
|
|
|
# project. Do not modify without knowing what you are doing.
|
|
|
|
PLATFORMS = ["Win32", "x64"]
|
|
|
|
PLATFORM_IDS = ["32", "64"]
|
|
|
|
CONFIGURATIONS = ["debug", "release", "release_debug"]
|
|
|
|
CONFIGURATION_IDS = ["tools", "opt", "opt.tools"]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def for_every_variant(value):
|
|
|
|
return [value for _ in range(len(ModuleConfigs.CONFIGURATIONS) * len(ModuleConfigs.PLATFORMS))]
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
|
|
shared_targets_array = []
|
|
|
|
self.names = []
|
|
|
|
self.arg_dict = {
|
|
|
|
"variant": [],
|
|
|
|
"runfile": shared_targets_array,
|
|
|
|
"buildtarget": shared_targets_array,
|
|
|
|
"cpppaths": [],
|
|
|
|
"cppdefines": [],
|
|
|
|
"cmdargs": [],
|
|
|
|
}
|
|
|
|
self.add_mode() # default
|
|
|
|
|
|
|
|
def add_mode(
|
|
|
|
self,
|
|
|
|
name: str = "",
|
|
|
|
includes: str = "",
|
|
|
|
cli_args: str = "",
|
|
|
|
defines=None,
|
|
|
|
):
|
|
|
|
if defines is None:
|
|
|
|
defines = []
|
|
|
|
self.names.append(name)
|
|
|
|
self.arg_dict["variant"] += [
|
|
|
|
f'{config}{f"_[{name}]" if name else ""}|{platform}'
|
|
|
|
for config in ModuleConfigs.CONFIGURATIONS
|
|
|
|
for platform in ModuleConfigs.PLATFORMS
|
|
|
|
]
|
|
|
|
self.arg_dict["runfile"] += [
|
|
|
|
f'bin\\godot.windows.{config_id}.{plat_id}{f".{name}" if name else ""}.exe'
|
|
|
|
for config_id in ModuleConfigs.CONFIGURATION_IDS
|
|
|
|
for plat_id in ModuleConfigs.PLATFORM_IDS
|
|
|
|
]
|
|
|
|
self.arg_dict["cpppaths"] += ModuleConfigs.for_every_variant(env["CPPPATH"] + [includes])
|
|
|
|
self.arg_dict["cppdefines"] += ModuleConfigs.for_every_variant(env["CPPDEFINES"] + defines)
|
|
|
|
self.arg_dict["cmdargs"] += ModuleConfigs.for_every_variant(cli_args)
|
|
|
|
|
|
|
|
def build_commandline(self, commands):
|
|
|
|
|
|
|
|
configuration_getter = (
|
|
|
|
"$(Configuration"
|
|
|
|
+ "".join([f'.Replace("{name}", "")' for name in self.names[1:]])
|
|
|
|
+ '.Replace("_[]", "")'
|
|
|
|
+ ")"
|
|
|
|
)
|
|
|
|
|
|
|
|
common_build_prefix = [
|
|
|
|
'cmd /V /C set "plat=$(PlatformTarget)"',
|
|
|
|
'(if "$(PlatformTarget)"=="x64" (set "plat=x86_amd64"))',
|
|
|
|
'set "tools=%s"' % env["tools"],
|
|
|
|
f'(if "{configuration_getter}"=="release" (set "tools=no"))',
|
|
|
|
'call "' + batch_file + '" !plat!',
|
|
|
|
]
|
|
|
|
|
|
|
|
# Windows allows us to have spaces in paths, so we need
|
|
|
|
# to double quote off the directory. However, the path ends
|
|
|
|
# in a backslash, so we need to remove this, lest it escape the
|
|
|
|
# last double quote off, confusing MSBuild
|
|
|
|
common_build_postfix = [
|
|
|
|
"--directory=\"$(ProjectDir.TrimEnd('\\'))\"",
|
|
|
|
"platform=windows",
|
|
|
|
f"target={configuration_getter}",
|
|
|
|
"progress=no",
|
|
|
|
"tools=!tools!",
|
|
|
|
"-j%s" % num_jobs,
|
|
|
|
]
|
|
|
|
|
|
|
|
if env["custom_modules"]:
|
|
|
|
common_build_postfix.append("custom_modules=%s" % env["custom_modules"])
|
|
|
|
|
|
|
|
result = " ^& ".join(common_build_prefix + [" ".join([commands] + common_build_postfix)])
|
|
|
|
return result
|
|
|
|
|
|
|
|
# Mappings interface definitions
|
|
|
|
|
|
|
|
def __iter__(self) -> Iterator[str]:
|
|
|
|
for x in self.arg_dict:
|
|
|
|
yield x
|
|
|
|
|
|
|
|
def __len__(self) -> int:
|
|
|
|
return len(self.names)
|
|
|
|
|
|
|
|
def __getitem__(self, k: str):
|
|
|
|
return self.arg_dict[k]
|
2017-08-28 17:17:26 +02:00
|
|
|
|
2020-09-19 16:39:11 +02:00
|
|
|
add_to_vs_project(env, env.core_sources)
|
|
|
|
add_to_vs_project(env, env.drivers_sources)
|
|
|
|
add_to_vs_project(env, env.main_sources)
|
|
|
|
add_to_vs_project(env, env.modules_sources)
|
|
|
|
add_to_vs_project(env, env.scene_sources)
|
|
|
|
add_to_vs_project(env, env.servers_sources)
|
|
|
|
add_to_vs_project(env, env.editor_sources)
|
|
|
|
|
|
|
|
for header in glob_recursive("**/*.h"):
|
|
|
|
env.vs_incs.append(str(header))
|
2017-08-28 17:17:26 +02:00
|
|
|
|
2021-10-03 23:31:19 +02:00
|
|
|
module_configs = ModuleConfigs()
|
|
|
|
import modules.mono.build_scripts.mono_reg_utils as mono_reg
|
|
|
|
|
|
|
|
if env.get("module_mono_enabled"):
|
|
|
|
mono_root = env.get("mono_prefix") or mono_reg.find_mono_root_dir(env["bits"])
|
|
|
|
if mono_root:
|
|
|
|
module_configs.add_mode(
|
|
|
|
"mono",
|
|
|
|
includes=os.path.join(mono_root, "include", "mono-2.0"),
|
|
|
|
cli_args="module_mono_enabled=yes mono_glue=yes",
|
|
|
|
defines=[("MONO_GLUE_ENABLED",)],
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
print("Mono installation directory not found. Generated project will not have build variants for Mono.")
|
|
|
|
|
|
|
|
env["MSVSBUILDCOM"] = module_configs.build_commandline("scons")
|
|
|
|
env["MSVSREBUILDCOM"] = module_configs.build_commandline("scons vsproj=yes")
|
|
|
|
env["MSVSCLEANCOM"] = module_configs.build_commandline("scons --clean")
|
2020-03-30 08:28:32 +02:00
|
|
|
if not env.get("MSVS"):
|
|
|
|
env["MSVS"]["PROJECTSUFFIX"] = ".vcxproj"
|
|
|
|
env["MSVS"]["SOLUTIONSUFFIX"] = ".sln"
|
2018-03-17 23:23:55 +01:00
|
|
|
env.MSVSProject(
|
2020-03-30 08:28:32 +02:00
|
|
|
target=["#godot" + env["MSVSPROJECTSUFFIX"]],
|
2018-03-17 23:23:55 +01:00
|
|
|
incs=env.vs_incs,
|
|
|
|
srcs=env.vs_srcs,
|
|
|
|
auto_build_solution=1,
|
2021-10-03 23:31:19 +02:00
|
|
|
**module_configs,
|
2020-03-30 08:28:32 +02:00
|
|
|
)
|
2017-08-28 17:17:26 +02:00
|
|
|
else:
|
2020-03-30 08:28:32 +02:00
|
|
|
print(
|
|
|
|
"Could not locate Visual Studio batch file for setting up the build environment. Not generating VS project."
|
|
|
|
)
|
|
|
|
|
2016-09-15 18:04:26 +02:00
|
|
|
|
2016-09-04 00:25:43 +02:00
|
|
|
def precious_program(env, program, sources, **args):
|
2016-10-30 18:44:57 +01:00
|
|
|
program = env.ProgramOriginal(program, sources, **args)
|
|
|
|
env.Precious(program)
|
|
|
|
return program
|
2017-11-28 21:27:57 +01:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2017-11-28 21:27:57 +01:00
|
|
|
def add_shared_library(env, name, sources, **args):
|
2017-12-18 23:01:09 +01:00
|
|
|
library = env.SharedLibrary(name, sources, **args)
|
|
|
|
env.NoCache(library)
|
|
|
|
return library
|
2017-11-28 21:27:57 +01:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2017-11-28 21:27:57 +01:00
|
|
|
def add_library(env, name, sources, **args):
|
2017-12-18 23:01:09 +01:00
|
|
|
library = env.Library(name, sources, **args)
|
|
|
|
env.NoCache(library)
|
|
|
|
return library
|
2017-11-28 21:27:57 +01:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2017-11-28 21:27:57 +01:00
|
|
|
def add_program(env, name, sources, **args):
|
2017-12-18 23:01:09 +01:00
|
|
|
program = env.Program(name, sources, **args)
|
|
|
|
env.NoCache(program)
|
|
|
|
return program
|
2018-06-21 06:33:25 +02:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2018-06-21 06:33:25 +02:00
|
|
|
def CommandNoCache(env, target, sources, command, **args):
|
|
|
|
result = env.Command(target, sources, command, **args)
|
|
|
|
env.NoCache(result)
|
|
|
|
return result
|
2018-08-24 02:03:57 +02:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2020-10-06 10:23:20 +02:00
|
|
|
def get_darwin_sdk_version(platform):
|
|
|
|
sdk_name = ""
|
|
|
|
if platform == "osx":
|
|
|
|
sdk_name = "macosx"
|
|
|
|
elif platform == "iphone":
|
|
|
|
sdk_name = "iphoneos"
|
|
|
|
elif platform == "iphonesimulator":
|
|
|
|
sdk_name = "iphonesimulator"
|
|
|
|
else:
|
|
|
|
raise Exception("Invalid platform argument passed to get_darwin_sdk_version")
|
|
|
|
|
|
|
|
try:
|
|
|
|
return float(decode_utf8(subprocess.check_output(["xcrun", "--sdk", sdk_name, "--show-sdk-version"]).strip()))
|
|
|
|
except (subprocess.CalledProcessError, OSError):
|
|
|
|
print("Failed to find SDK version while running xcrun --sdk {} --show-sdk-version.".format(sdk_name))
|
2020-10-26 13:15:44 +01:00
|
|
|
return 0.0
|
2020-10-06 10:23:20 +02:00
|
|
|
|
|
|
|
|
2018-08-24 02:03:57 +02:00
|
|
|
def detect_darwin_sdk_path(platform, env):
|
2020-03-30 08:28:32 +02:00
|
|
|
sdk_name = ""
|
|
|
|
if platform == "osx":
|
|
|
|
sdk_name = "macosx"
|
|
|
|
var_name = "MACOS_SDK_PATH"
|
|
|
|
elif platform == "iphone":
|
|
|
|
sdk_name = "iphoneos"
|
|
|
|
var_name = "IPHONESDK"
|
|
|
|
elif platform == "iphonesimulator":
|
|
|
|
sdk_name = "iphonesimulator"
|
|
|
|
var_name = "IPHONESDK"
|
2018-08-24 02:03:57 +02:00
|
|
|
else:
|
|
|
|
raise Exception("Invalid platform argument passed to detect_darwin_sdk_path")
|
|
|
|
|
|
|
|
if not env[var_name]:
|
|
|
|
try:
|
2020-03-30 08:28:32 +02:00
|
|
|
sdk_path = decode_utf8(subprocess.check_output(["xcrun", "--sdk", sdk_name, "--show-sdk-path"]).strip())
|
2018-08-24 02:03:57 +02:00
|
|
|
if sdk_path:
|
|
|
|
env[var_name] = sdk_path
|
2019-06-29 03:19:49 +02:00
|
|
|
except (subprocess.CalledProcessError, OSError):
|
2018-08-24 02:03:57 +02:00
|
|
|
print("Failed to find SDK path while running xcrun --sdk {} --show-sdk-path.".format(sdk_name))
|
|
|
|
raise
|
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2019-02-12 21:10:08 +01:00
|
|
|
def get_compiler_version(env):
|
2020-02-26 13:23:37 +01:00
|
|
|
"""
|
|
|
|
Returns an array of version numbers as ints: [major, minor, patch].
|
|
|
|
The return array should have at least two values (major, minor).
|
|
|
|
"""
|
|
|
|
if not env.msvc:
|
|
|
|
# Not using -dumpversion as some GCC distros only return major, and
|
|
|
|
# Clang used to return hardcoded 4.2.1: # https://reviews.llvm.org/D56803
|
|
|
|
try:
|
2020-03-30 08:28:32 +02:00
|
|
|
version = decode_utf8(subprocess.check_output([env.subst(env["CXX"]), "--version"]).strip())
|
2020-02-26 13:23:37 +01:00
|
|
|
except (subprocess.CalledProcessError, OSError):
|
|
|
|
print("Couldn't parse CXX environment variable to infer compiler version.")
|
|
|
|
return None
|
|
|
|
else: # TODO: Implement for MSVC
|
|
|
|
return None
|
2020-03-30 08:28:32 +02:00
|
|
|
match = re.search("[0-9]+\.[0-9.]+", version)
|
2019-02-12 21:10:08 +01:00
|
|
|
if match is not None:
|
2020-03-30 08:28:32 +02:00
|
|
|
return list(map(int, match.group().split(".")))
|
2019-02-12 21:10:08 +01:00
|
|
|
else:
|
|
|
|
return None
|
2019-02-22 15:41:31 +01:00
|
|
|
|
2020-03-30 08:28:32 +02:00
|
|
|
|
2019-04-05 12:51:15 +02:00
|
|
|
def using_gcc(env):
|
2020-03-30 08:28:32 +02:00
|
|
|
return "gcc" in os.path.basename(env["CC"])
|
|
|
|
|
2019-04-05 12:51:15 +02:00
|
|
|
|
|
|
|
def using_clang(env):
|
2020-03-30 08:28:32 +02:00
|
|
|
return "clang" in os.path.basename(env["CC"])
|
2020-03-30 16:35:13 +02:00
|
|
|
|
|
|
|
|
2021-07-15 17:11:15 +02:00
|
|
|
def using_emcc(env):
|
|
|
|
return "emcc" in os.path.basename(env["CC"])
|
|
|
|
|
|
|
|
|
2020-03-30 16:35:13 +02:00
|
|
|
def show_progress(env):
|
|
|
|
import sys
|
|
|
|
from SCons.Script import Progress, Command, AlwaysBuild
|
|
|
|
|
|
|
|
screen = sys.stdout
|
|
|
|
# Progress reporting is not available in non-TTY environments since it
|
|
|
|
# messes with the output (for example, when writing to a file)
|
|
|
|
show_progress = env["progress"] and sys.stdout.isatty()
|
2020-06-11 20:15:48 +02:00
|
|
|
node_count_data = {
|
|
|
|
"count": 0,
|
|
|
|
"max": 0,
|
|
|
|
"interval": 1,
|
|
|
|
"fname": str(env.Dir("#")) + "/.scons_node_count",
|
|
|
|
}
|
2020-03-30 16:35:13 +02:00
|
|
|
|
|
|
|
import time, math
|
|
|
|
|
|
|
|
class cache_progress:
|
|
|
|
# The default is 1 GB cache and 12 hours half life
|
|
|
|
def __init__(self, path=None, limit=1073741824, half_life=43200):
|
|
|
|
self.path = path
|
|
|
|
self.limit = limit
|
|
|
|
self.exponent_scale = math.log(2) / half_life
|
|
|
|
if env["verbose"] and path != None:
|
|
|
|
screen.write(
|
|
|
|
"Current cache limit is {} (used: {})\n".format(
|
|
|
|
self.convert_size(limit), self.convert_size(self.get_size(path))
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.delete(self.file_list())
|
|
|
|
|
|
|
|
def __call__(self, node, *args, **kw):
|
|
|
|
if show_progress:
|
|
|
|
# Print the progress percentage
|
2020-06-11 20:15:48 +02:00
|
|
|
node_count_data["count"] += node_count_data["interval"]
|
|
|
|
node_count = node_count_data["count"]
|
|
|
|
node_count_max = node_count_data["max"]
|
2020-03-30 16:35:13 +02:00
|
|
|
if node_count_max > 0 and node_count <= node_count_max:
|
|
|
|
screen.write("\r[%3d%%] " % (node_count * 100 / node_count_max))
|
|
|
|
screen.flush()
|
|
|
|
elif node_count_max > 0 and node_count > node_count_max:
|
|
|
|
screen.write("\r[100%] ")
|
|
|
|
screen.flush()
|
|
|
|
else:
|
|
|
|
screen.write("\r[Initial build] ")
|
|
|
|
screen.flush()
|
|
|
|
|
|
|
|
def delete(self, files):
|
|
|
|
if len(files) == 0:
|
|
|
|
return
|
|
|
|
if env["verbose"]:
|
|
|
|
# Utter something
|
|
|
|
screen.write("\rPurging %d %s from cache...\n" % (len(files), len(files) > 1 and "files" or "file"))
|
|
|
|
[os.remove(f) for f in files]
|
|
|
|
|
|
|
|
def file_list(self):
|
|
|
|
if self.path is None:
|
|
|
|
# Nothing to do
|
|
|
|
return []
|
|
|
|
# Gather a list of (filename, (size, atime)) within the
|
|
|
|
# cache directory
|
|
|
|
file_stat = [(x, os.stat(x)[6:8]) for x in glob.glob(os.path.join(self.path, "*", "*"))]
|
|
|
|
if file_stat == []:
|
|
|
|
# Nothing to do
|
|
|
|
return []
|
|
|
|
# Weight the cache files by size (assumed to be roughly
|
|
|
|
# proportional to the recompilation time) times an exponential
|
|
|
|
# decay since the ctime, and return a list with the entries
|
|
|
|
# (filename, size, weight).
|
|
|
|
current_time = time.time()
|
|
|
|
file_stat = [(x[0], x[1][0], (current_time - x[1][1])) for x in file_stat]
|
|
|
|
# Sort by the most recently accessed files (most sensible to keep) first
|
|
|
|
file_stat.sort(key=lambda x: x[2])
|
|
|
|
# Search for the first entry where the storage limit is
|
|
|
|
# reached
|
|
|
|
sum, mark = 0, None
|
|
|
|
for i, x in enumerate(file_stat):
|
|
|
|
sum += x[1]
|
|
|
|
if sum > self.limit:
|
|
|
|
mark = i
|
|
|
|
break
|
|
|
|
if mark is None:
|
|
|
|
return []
|
|
|
|
else:
|
|
|
|
return [x[0] for x in file_stat[mark:]]
|
|
|
|
|
|
|
|
def convert_size(self, size_bytes):
|
|
|
|
if size_bytes == 0:
|
|
|
|
return "0 bytes"
|
|
|
|
size_name = ("bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
|
|
|
|
i = int(math.floor(math.log(size_bytes, 1024)))
|
|
|
|
p = math.pow(1024, i)
|
|
|
|
s = round(size_bytes / p, 2)
|
|
|
|
return "%s %s" % (int(s) if i == 0 else s, size_name[i])
|
|
|
|
|
|
|
|
def get_size(self, start_path="."):
|
|
|
|
total_size = 0
|
|
|
|
for dirpath, dirnames, filenames in os.walk(start_path):
|
|
|
|
for f in filenames:
|
|
|
|
fp = os.path.join(dirpath, f)
|
|
|
|
total_size += os.path.getsize(fp)
|
|
|
|
return total_size
|
|
|
|
|
|
|
|
def progress_finish(target, source, env):
|
2021-08-18 21:57:42 +02:00
|
|
|
try:
|
|
|
|
with open(node_count_data["fname"], "w") as f:
|
|
|
|
f.write("%d\n" % node_count_data["count"])
|
|
|
|
progressor.delete(progressor.file_list())
|
|
|
|
except Exception:
|
|
|
|
pass
|
2020-03-30 16:35:13 +02:00
|
|
|
|
|
|
|
try:
|
2020-06-11 20:15:48 +02:00
|
|
|
with open(node_count_data["fname"]) as f:
|
|
|
|
node_count_data["max"] = int(f.readline())
|
2020-12-12 11:10:23 +01:00
|
|
|
except Exception:
|
2020-03-30 16:35:13 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
cache_directory = os.environ.get("SCONS_CACHE")
|
|
|
|
# Simple cache pruning, attached to SCons' progress callback. Trim the
|
|
|
|
# cache directory to a size not larger than cache_limit.
|
|
|
|
cache_limit = float(os.getenv("SCONS_CACHE_LIMIT", 1024)) * 1024 * 1024
|
|
|
|
progressor = cache_progress(cache_directory, cache_limit)
|
2020-06-11 20:15:48 +02:00
|
|
|
Progress(progressor, interval=node_count_data["interval"])
|
2020-03-30 16:35:13 +02:00
|
|
|
|
|
|
|
progress_finish_command = Command("progress_finish", [], progress_finish)
|
|
|
|
AlwaysBuild(progress_finish_command)
|
2020-03-20 21:49:38 +01:00
|
|
|
|
|
|
|
|
|
|
|
def dump(env):
|
|
|
|
# Dumps latest build information for debugging purposes and external tools.
|
|
|
|
from json import dump
|
|
|
|
|
|
|
|
def non_serializable(obj):
|
2020-06-11 20:15:48 +02:00
|
|
|
return "<<non-serializable: %s>>" % (qualname(type(obj)))
|
2020-03-20 21:49:38 +01:00
|
|
|
|
|
|
|
with open(".scons_env.json", "w") as f:
|
|
|
|
dump(env.Dictionary(), f, indent=4, default=non_serializable)
|