build: refactor configure.py
- Explicitly specify the encoding when opening files. - Use f-strings to format strings. - Use `isinstance()` for type checks instead of `type()`. - Use the `with` keyword for resource-allocating operations. - Avoid using multiple statements in a single line. - Remove unnecessary `else` clauses after `return`. - Iterate with the `items()` method of dictionaries when both the key and value are used. - Remove unnecessary parentheses. - Rename unused unpacked variables to `_`, `_1`, etc etc. - Rename the `list` variable to avoid conflict with the global `list()` function. PR-URL: https://github.com/nodejs/node/pull/47667 Reviewed-By: Christian Clauss <cclauss@me.com>
This commit is contained in:
parent
b5fe45fb9a
commit
a4cf6b204f
246
configure.py
246
configure.py
@ -53,7 +53,7 @@ valid_mips_arch = ('loongson', 'r1', 'r2', 'r6', 'rx')
|
|||||||
valid_mips_fpu = ('fp32', 'fp64', 'fpxx')
|
valid_mips_fpu = ('fp32', 'fp64', 'fpxx')
|
||||||
valid_mips_float_abi = ('soft', 'hard')
|
valid_mips_float_abi = ('soft', 'hard')
|
||||||
valid_intl_modes = ('none', 'small-icu', 'full-icu', 'system-icu')
|
valid_intl_modes = ('none', 'small-icu', 'full-icu', 'system-icu')
|
||||||
with open ('tools/icu/icu_versions.json') as f:
|
with open('tools/icu/icu_versions.json', encoding='utf-8') as f:
|
||||||
icu_versions = json.load(f)
|
icu_versions = json.load(f)
|
||||||
|
|
||||||
shareable_builtins = {'cjs_module_lexer/lexer': 'deps/cjs-module-lexer/lexer.js',
|
shareable_builtins = {'cjs_module_lexer/lexer': 'deps/cjs-module-lexer/lexer.js',
|
||||||
@ -108,7 +108,7 @@ parser.add_argument('--dest-cpu',
|
|||||||
action='store',
|
action='store',
|
||||||
dest='dest_cpu',
|
dest='dest_cpu',
|
||||||
choices=valid_arch,
|
choices=valid_arch,
|
||||||
help='CPU architecture to build for ({0})'.format(', '.join(valid_arch)))
|
help=f"CPU architecture to build for ({', '.join(valid_arch)})")
|
||||||
|
|
||||||
parser.add_argument('--cross-compiling',
|
parser.add_argument('--cross-compiling',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@ -125,7 +125,7 @@ parser.add_argument('--dest-os',
|
|||||||
action='store',
|
action='store',
|
||||||
dest='dest_os',
|
dest='dest_os',
|
||||||
choices=valid_os,
|
choices=valid_os,
|
||||||
help='operating system to build for ({0})'.format(', '.join(valid_os)))
|
help=f"operating system to build for ({', '.join(valid_os)})")
|
||||||
|
|
||||||
parser.add_argument('--error-on-warn',
|
parser.add_argument('--error-on-warn',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@ -510,39 +510,34 @@ parser.add_argument('--with-arm-float-abi',
|
|||||||
action='store',
|
action='store',
|
||||||
dest='arm_float_abi',
|
dest='arm_float_abi',
|
||||||
choices=valid_arm_float_abi,
|
choices=valid_arm_float_abi,
|
||||||
help='specifies which floating-point ABI to use ({0}).'.format(
|
help=f"specifies which floating-point ABI to use ({', '.join(valid_arm_float_abi)}).")
|
||||||
', '.join(valid_arm_float_abi)))
|
|
||||||
|
|
||||||
parser.add_argument('--with-arm-fpu',
|
parser.add_argument('--with-arm-fpu',
|
||||||
action='store',
|
action='store',
|
||||||
dest='arm_fpu',
|
dest='arm_fpu',
|
||||||
choices=valid_arm_fpu,
|
choices=valid_arm_fpu,
|
||||||
help='ARM FPU mode ({0}) [default: %(default)s]'.format(
|
help=f"ARM FPU mode ({', '.join(valid_arm_fpu)}) [default: %(default)s]")
|
||||||
', '.join(valid_arm_fpu)))
|
|
||||||
|
|
||||||
parser.add_argument('--with-mips-arch-variant',
|
parser.add_argument('--with-mips-arch-variant',
|
||||||
action='store',
|
action='store',
|
||||||
dest='mips_arch_variant',
|
dest='mips_arch_variant',
|
||||||
default='r2',
|
default='r2',
|
||||||
choices=valid_mips_arch,
|
choices=valid_mips_arch,
|
||||||
help='MIPS arch variant ({0}) [default: %(default)s]'.format(
|
help=f"MIPS arch variant ({', '.join(valid_mips_arch)}) [default: %(default)s]")
|
||||||
', '.join(valid_mips_arch)))
|
|
||||||
|
|
||||||
parser.add_argument('--with-mips-fpu-mode',
|
parser.add_argument('--with-mips-fpu-mode',
|
||||||
action='store',
|
action='store',
|
||||||
dest='mips_fpu_mode',
|
dest='mips_fpu_mode',
|
||||||
default='fp32',
|
default='fp32',
|
||||||
choices=valid_mips_fpu,
|
choices=valid_mips_fpu,
|
||||||
help='MIPS FPU mode ({0}) [default: %(default)s]'.format(
|
help=f"MIPS FPU mode ({', '.join(valid_mips_fpu)}) [default: %(default)s]")
|
||||||
', '.join(valid_mips_fpu)))
|
|
||||||
|
|
||||||
parser.add_argument('--with-mips-float-abi',
|
parser.add_argument('--with-mips-float-abi',
|
||||||
action='store',
|
action='store',
|
||||||
dest='mips_float_abi',
|
dest='mips_float_abi',
|
||||||
default='hard',
|
default='hard',
|
||||||
choices=valid_mips_float_abi,
|
choices=valid_mips_float_abi,
|
||||||
help='MIPS floating-point ABI ({0}) [default: %(default)s]'.format(
|
help=f"MIPS floating-point ABI ({', '.join(valid_mips_float_abi)}) [default: %(default)s]")
|
||||||
', '.join(valid_mips_float_abi)))
|
|
||||||
|
|
||||||
parser.add_argument('--use-largepages',
|
parser.add_argument('--use-largepages',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@ -569,8 +564,7 @@ intl_optgroup.add_argument('--with-intl',
|
|||||||
dest='with_intl',
|
dest='with_intl',
|
||||||
default='full-icu',
|
default='full-icu',
|
||||||
choices=valid_intl_modes,
|
choices=valid_intl_modes,
|
||||||
help='Intl mode (valid choices: {0}) [default: %(default)s]'.format(
|
help=f"Intl mode (valid choices: {', '.join(valid_intl_modes)}) [default: %(default)s]")
|
||||||
', '.join(valid_intl_modes)))
|
|
||||||
|
|
||||||
intl_optgroup.add_argument('--without-intl',
|
intl_optgroup.add_argument('--without-intl',
|
||||||
action='store_const',
|
action='store_const',
|
||||||
@ -597,7 +591,7 @@ intl_optgroup.add_argument('--with-icu-source',
|
|||||||
dest='with_icu_source',
|
dest='with_icu_source',
|
||||||
help='Intl mode: optional local path to icu/ dir, or path/URL of '
|
help='Intl mode: optional local path to icu/ dir, or path/URL of '
|
||||||
'the icu4c source archive. '
|
'the icu4c source archive. '
|
||||||
'v%d.x or later recommended.' % icu_versions['minimum_icu'])
|
f"v{icu_versions['minimum_icu']}.x or later recommended.")
|
||||||
|
|
||||||
intl_optgroup.add_argument('--with-icu-default-data-dir',
|
intl_optgroup.add_argument('--with-icu-default-data-dir',
|
||||||
action='store',
|
action='store',
|
||||||
@ -853,25 +847,25 @@ auto_downloads = nodedownload.parse(options.download_list)
|
|||||||
|
|
||||||
def error(msg):
|
def error(msg):
|
||||||
prefix = '\033[1m\033[31mERROR\033[0m' if os.isatty(1) else 'ERROR'
|
prefix = '\033[1m\033[31mERROR\033[0m' if os.isatty(1) else 'ERROR'
|
||||||
print('%s: %s' % (prefix, msg))
|
print(f'{prefix}: {msg}')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def warn(msg):
|
def warn(msg):
|
||||||
warn.warned = True
|
warn.warned = True
|
||||||
prefix = '\033[1m\033[93mWARNING\033[0m' if os.isatty(1) else 'WARNING'
|
prefix = '\033[1m\033[93mWARNING\033[0m' if os.isatty(1) else 'WARNING'
|
||||||
print('%s: %s' % (prefix, msg))
|
print(f'{prefix}: {msg}')
|
||||||
|
|
||||||
# track if warnings occurred
|
# track if warnings occurred
|
||||||
warn.warned = False
|
warn.warned = False
|
||||||
|
|
||||||
def info(msg):
|
def info(msg):
|
||||||
prefix = '\033[1m\033[32mINFO\033[0m' if os.isatty(1) else 'INFO'
|
prefix = '\033[1m\033[32mINFO\033[0m' if os.isatty(1) else 'INFO'
|
||||||
print('%s: %s' % (prefix, msg))
|
print(f'{prefix}: {msg}')
|
||||||
|
|
||||||
def print_verbose(x):
|
def print_verbose(x):
|
||||||
if not options.verbose:
|
if not options.verbose:
|
||||||
return
|
return
|
||||||
if type(x) is str:
|
if isinstance(x, str):
|
||||||
print(x)
|
print(x)
|
||||||
else:
|
else:
|
||||||
pprint.pprint(x, indent=2)
|
pprint.pprint(x, indent=2)
|
||||||
@ -904,9 +898,11 @@ def pkg_config(pkg):
|
|||||||
try:
|
try:
|
||||||
proc = subprocess.Popen(shlex.split(pkg_config) + args,
|
proc = subprocess.Popen(shlex.split(pkg_config) + args,
|
||||||
stdout=subprocess.PIPE)
|
stdout=subprocess.PIPE)
|
||||||
val = to_utf8(proc.communicate()[0]).strip()
|
with proc:
|
||||||
|
val = to_utf8(proc.communicate()[0]).strip()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT: raise e # Unexpected error.
|
if e.errno != errno.ENOENT:
|
||||||
|
raise e # Unexpected error.
|
||||||
return (None, None, None, None) # No pkg-config/pkgconf installed.
|
return (None, None, None, None) # No pkg-config/pkgconf installed.
|
||||||
retval.append(val)
|
retval.append(val)
|
||||||
args = ['--silence-errors']
|
args = ['--silence-errors']
|
||||||
@ -920,13 +916,14 @@ def try_check_compiler(cc, lang):
|
|||||||
except OSError:
|
except OSError:
|
||||||
return (False, False, '', '')
|
return (False, False, '', '')
|
||||||
|
|
||||||
proc.stdin.write(b'__clang__ __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ '
|
with proc:
|
||||||
b'__clang_major__ __clang_minor__ __clang_patchlevel__')
|
proc.stdin.write(b'__clang__ __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ '
|
||||||
|
b'__clang_major__ __clang_minor__ __clang_patchlevel__')
|
||||||
|
|
||||||
if sys.platform == 'zos':
|
if sys.platform == 'zos':
|
||||||
values = (to_utf8(proc.communicate()[0]).split('\n')[-2].split() + ['0'] * 7)[0:7]
|
values = (to_utf8(proc.communicate()[0]).split('\n')[-2].split() + ['0'] * 7)[0:7]
|
||||||
else:
|
else:
|
||||||
values = (to_utf8(proc.communicate()[0]).split() + ['0'] * 7)[0:7]
|
values = (to_utf8(proc.communicate()[0]).split() + ['0'] * 7)[0:7]
|
||||||
|
|
||||||
is_clang = values[0] == '1'
|
is_clang = values[0] == '1'
|
||||||
gcc_version = tuple(map(int, values[1:1+3]))
|
gcc_version = tuple(map(int, values[1:1+3]))
|
||||||
@ -952,12 +949,10 @@ def get_version_helper(cc, regexp):
|
|||||||
consider adjusting the CC environment variable if you installed
|
consider adjusting the CC environment variable if you installed
|
||||||
it in a non-standard prefix.''')
|
it in a non-standard prefix.''')
|
||||||
|
|
||||||
match = re.search(regexp, to_utf8(proc.communicate()[1]))
|
with proc:
|
||||||
|
match = re.search(regexp, to_utf8(proc.communicate()[1]))
|
||||||
|
|
||||||
if match:
|
return match.group(2) if match else '0.0'
|
||||||
return match.group(2)
|
|
||||||
else:
|
|
||||||
return '0.0'
|
|
||||||
|
|
||||||
def get_nasm_version(asm):
|
def get_nasm_version(asm):
|
||||||
try:
|
try:
|
||||||
@ -970,13 +965,11 @@ def get_nasm_version(asm):
|
|||||||
and refer BUILDING.md.''')
|
and refer BUILDING.md.''')
|
||||||
return '0.0'
|
return '0.0'
|
||||||
|
|
||||||
match = re.match(r"NASM version ([2-9]\.[0-9][0-9]+)",
|
with proc:
|
||||||
to_utf8(proc.communicate()[0]))
|
match = re.match(r"NASM version ([2-9]\.[0-9][0-9]+)",
|
||||||
|
to_utf8(proc.communicate()[0]))
|
||||||
|
|
||||||
if match:
|
return match.group(1) if match else '0.0'
|
||||||
return match.group(1)
|
|
||||||
else:
|
|
||||||
return '0.0'
|
|
||||||
|
|
||||||
def get_llvm_version(cc):
|
def get_llvm_version(cc):
|
||||||
return get_version_helper(
|
return get_version_helper(
|
||||||
@ -1002,14 +995,16 @@ def get_gas_version(cc):
|
|||||||
consider adjusting the CC environment variable if you installed
|
consider adjusting the CC environment variable if you installed
|
||||||
it in a non-standard prefix.''')
|
it in a non-standard prefix.''')
|
||||||
|
|
||||||
gas_ret = to_utf8(proc.communicate()[1])
|
with proc:
|
||||||
|
gas_ret = to_utf8(proc.communicate()[1])
|
||||||
|
|
||||||
match = re.match(r"GNU assembler version ([2-9]\.[0-9]+)", gas_ret)
|
match = re.match(r"GNU assembler version ([2-9]\.[0-9]+)", gas_ret)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
else:
|
|
||||||
warn('Could not recognize `gas`: ' + gas_ret)
|
warn(f'Could not recognize `gas`: {gas_ret}')
|
||||||
return '0.0'
|
return '0.0'
|
||||||
|
|
||||||
# Note: Apple clang self-reports as clang 4.2.0 and gcc 4.2.1. It passes
|
# Note: Apple clang self-reports as clang 4.2.0 and gcc 4.2.1. It passes
|
||||||
# the version check more by accident than anything else but a more rigorous
|
# the version check more by accident than anything else but a more rigorous
|
||||||
@ -1027,26 +1022,22 @@ def check_compiler(o):
|
|||||||
|
|
||||||
ok, is_clang, clang_version, gcc_version = try_check_compiler(CXX, 'c++')
|
ok, is_clang, clang_version, gcc_version = try_check_compiler(CXX, 'c++')
|
||||||
version_str = ".".join(map(str, clang_version if is_clang else gcc_version))
|
version_str = ".".join(map(str, clang_version if is_clang else gcc_version))
|
||||||
print_verbose('Detected %sC++ compiler (CXX=%s) version: %s' %
|
print_verbose(f"Detected {'clang ' if is_clang else ''}C++ compiler (CXX={CXX}) version: {version_str}")
|
||||||
('clang ' if is_clang else '', CXX, version_str))
|
|
||||||
if not ok:
|
if not ok:
|
||||||
warn('failed to autodetect C++ compiler version (CXX=%s)' % CXX)
|
warn(f'failed to autodetect C++ compiler version (CXX={CXX})')
|
||||||
elif clang_version < (8, 0, 0) if is_clang else gcc_version < (10, 1, 0):
|
elif clang_version < (8, 0, 0) if is_clang else gcc_version < (10, 1, 0):
|
||||||
warn('C++ compiler (CXX=%s, %s) too old, need g++ 10.1.0 or clang++ 8.0.0' %
|
warn(f'C++ compiler (CXX={CXX}, {version_str}) too old, need g++ 10.1.0 or clang++ 8.0.0')
|
||||||
(CXX, version_str))
|
|
||||||
|
|
||||||
ok, is_clang, clang_version, gcc_version = try_check_compiler(CC, 'c')
|
ok, is_clang, clang_version, gcc_version = try_check_compiler(CC, 'c')
|
||||||
version_str = ".".join(map(str, clang_version if is_clang else gcc_version))
|
version_str = ".".join(map(str, clang_version if is_clang else gcc_version))
|
||||||
print_verbose('Detected %sC compiler (CC=%s) version: %s' %
|
print_verbose(f"Detected {'clang ' if is_clang else ''}C compiler (CC={CC}) version: {version_str}")
|
||||||
('clang ' if is_clang else '', CC, version_str))
|
|
||||||
if not ok:
|
if not ok:
|
||||||
warn('failed to autodetect C compiler version (CC=%s)' % CC)
|
warn(f'failed to autodetect C compiler version (CC={CC})')
|
||||||
elif not is_clang and gcc_version < (4, 2, 0):
|
elif not is_clang and gcc_version < (4, 2, 0):
|
||||||
# clang 3.2 is a little white lie because any clang version will probably
|
# clang 3.2 is a little white lie because any clang version will probably
|
||||||
# do for the C bits. However, we might as well encourage people to upgrade
|
# do for the C bits. However, we might as well encourage people to upgrade
|
||||||
# to a version that is not completely ancient.
|
# to a version that is not completely ancient.
|
||||||
warn('C compiler (CC=%s, %s) too old, need gcc 4.2 or clang 3.2' %
|
warn(f'C compiler (CC={CC}, {version_str}) too old, need gcc 4.2 or clang 3.2')
|
||||||
(CC, version_str))
|
|
||||||
|
|
||||||
o['variables']['llvm_version'] = get_llvm_version(CC) if is_clang else '0.0'
|
o['variables']['llvm_version'] = get_llvm_version(CC) if is_clang else '0.0'
|
||||||
|
|
||||||
@ -1076,8 +1067,9 @@ def cc_macros(cc=None):
|
|||||||
consider adjusting the CC environment variable if you installed
|
consider adjusting the CC environment variable if you installed
|
||||||
it in a non-standard prefix.''')
|
it in a non-standard prefix.''')
|
||||||
|
|
||||||
p.stdin.write(b'\n')
|
with p:
|
||||||
out = to_utf8(p.communicate()[0]).split('\n')
|
p.stdin.write(b'\n')
|
||||||
|
out = to_utf8(p.communicate()[0]).split('\n')
|
||||||
|
|
||||||
k = {}
|
k = {}
|
||||||
for line in out:
|
for line in out:
|
||||||
@ -1134,9 +1126,9 @@ def host_arch_cc():
|
|||||||
|
|
||||||
rtn = 'ia32' # default
|
rtn = 'ia32' # default
|
||||||
|
|
||||||
for i in matchup:
|
for key, value in matchup.items():
|
||||||
if i in k and k[i] != '0':
|
if k.get(key, 0) and k[key] != '0':
|
||||||
rtn = matchup[i]
|
rtn = value
|
||||||
break
|
break
|
||||||
|
|
||||||
if rtn == 'mipsel' and '_LP64' in k:
|
if rtn == 'mipsel' and '_LP64' in k:
|
||||||
@ -1195,7 +1187,7 @@ def configure_arm(o):
|
|||||||
|
|
||||||
|
|
||||||
def configure_mips(o, target_arch):
|
def configure_mips(o, target_arch):
|
||||||
can_use_fpu_instructions = (options.mips_float_abi != 'soft')
|
can_use_fpu_instructions = options.mips_float_abi != 'soft'
|
||||||
o['variables']['v8_can_use_fpu_instructions'] = b(can_use_fpu_instructions)
|
o['variables']['v8_can_use_fpu_instructions'] = b(can_use_fpu_instructions)
|
||||||
o['variables']['v8_use_mips_abi_hardfloat'] = b(can_use_fpu_instructions)
|
o['variables']['v8_use_mips_abi_hardfloat'] = b(can_use_fpu_instructions)
|
||||||
o['variables']['mips_arch_variant'] = options.mips_arch_variant
|
o['variables']['mips_arch_variant'] = options.mips_arch_variant
|
||||||
@ -1214,16 +1206,18 @@ def configure_zos(o):
|
|||||||
|
|
||||||
def clang_version_ge(version_checked):
|
def clang_version_ge(version_checked):
|
||||||
for compiler in [(CC, 'c'), (CXX, 'c++')]:
|
for compiler in [(CC, 'c'), (CXX, 'c++')]:
|
||||||
ok, is_clang, clang_version, gcc_version = \
|
_, is_clang, clang_version, _1 = (
|
||||||
try_check_compiler(compiler[0], compiler[1])
|
try_check_compiler(compiler[0], compiler[1])
|
||||||
|
)
|
||||||
if is_clang and clang_version >= version_checked:
|
if is_clang and clang_version >= version_checked:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def gcc_version_ge(version_checked):
|
def gcc_version_ge(version_checked):
|
||||||
for compiler in [(CC, 'c'), (CXX, 'c++')]:
|
for compiler in [(CC, 'c'), (CXX, 'c++')]:
|
||||||
ok, is_clang, clang_version, gcc_version = \
|
_, is_clang, _1, gcc_version = (
|
||||||
try_check_compiler(compiler[0], compiler[1])
|
try_check_compiler(compiler[0], compiler[1])
|
||||||
|
)
|
||||||
if is_clang or gcc_version < version_checked:
|
if is_clang or gcc_version < version_checked:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
@ -1324,7 +1318,7 @@ def configure_node(o):
|
|||||||
version_checked_str = ".".join(map(str, version_checked))
|
version_checked_str = ".".join(map(str, version_checked))
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'The options --enable-pgo-generate and --enable-pgo-use '
|
'The options --enable-pgo-generate and --enable-pgo-use '
|
||||||
'are supported for gcc and gxx %s or newer only.' % (version_checked_str))
|
f'are supported for gcc and gxx {version_checked_str} or newer only.')
|
||||||
|
|
||||||
if options.enable_pgo_generate and options.enable_pgo_use:
|
if options.enable_pgo_generate and options.enable_pgo_use:
|
||||||
raise Exception(
|
raise Exception(
|
||||||
@ -1347,8 +1341,8 @@ def configure_node(o):
|
|||||||
gcc_version_checked_str = ".".join(map(str, gcc_version_checked))
|
gcc_version_checked_str = ".".join(map(str, gcc_version_checked))
|
||||||
clang_version_checked_str = ".".join(map(str, clang_version_checked))
|
clang_version_checked_str = ".".join(map(str, clang_version_checked))
|
||||||
raise Exception(
|
raise Exception(
|
||||||
'The option --enable-lto is supported for gcc %s+'
|
f'The option --enable-lto is supported for gcc {gcc_version_checked_str}+'
|
||||||
'or clang %s+ only.' % (gcc_version_checked_str, clang_version_checked_str))
|
f'or clang {clang_version_checked_str}+ only.')
|
||||||
|
|
||||||
o['variables']['enable_lto'] = b(options.enable_lto)
|
o['variables']['enable_lto'] = b(options.enable_lto)
|
||||||
|
|
||||||
@ -1458,15 +1452,15 @@ def configure_library(lib, output, pkgname=None):
|
|||||||
if 'msvs_settings' not in output:
|
if 'msvs_settings' not in output:
|
||||||
output['msvs_settings'] = { 'VCLinkerTool': { 'AdditionalOptions': [] } }
|
output['msvs_settings'] = { 'VCLinkerTool': { 'AdditionalOptions': [] } }
|
||||||
output['msvs_settings']['VCLinkerTool']['AdditionalOptions'] += [
|
output['msvs_settings']['VCLinkerTool']['AdditionalOptions'] += [
|
||||||
'/LIBPATH:%s' % options.__dict__[shared_lib + '_libpath']]
|
f"/LIBPATH:{options.__dict__[shared_lib + '_libpath']}"]
|
||||||
else:
|
else:
|
||||||
output['libraries'] += [
|
output['libraries'] += [
|
||||||
'-L%s' % options.__dict__[shared_lib + '_libpath']]
|
f"-L{options.__dict__[shared_lib + '_libpath']}"]
|
||||||
elif pkg_libpath:
|
elif pkg_libpath:
|
||||||
output['libraries'] += [pkg_libpath]
|
output['libraries'] += [pkg_libpath]
|
||||||
|
|
||||||
default_libs = getattr(options, shared_lib + '_libname')
|
default_libs = getattr(options, shared_lib + '_libname')
|
||||||
default_libs = ['-l{0}'.format(l) for l in default_libs.split(',')]
|
default_libs = [f'-l{l}' for l in default_libs.split(',')]
|
||||||
|
|
||||||
if default_libs:
|
if default_libs:
|
||||||
output['libraries'] += default_libs
|
output['libraries'] += default_libs
|
||||||
@ -1528,7 +1522,7 @@ def configure_openssl(o):
|
|||||||
|
|
||||||
if options.without_ssl:
|
if options.without_ssl:
|
||||||
def without_ssl_error(option):
|
def without_ssl_error(option):
|
||||||
error('--without-ssl is incompatible with %s' % option)
|
error(f'--without-ssl is incompatible with {option}')
|
||||||
if options.shared_openssl:
|
if options.shared_openssl:
|
||||||
without_ssl_error('--shared-openssl')
|
without_ssl_error('--shared-openssl')
|
||||||
if options.openssl_no_asm:
|
if options.openssl_no_asm:
|
||||||
@ -1608,35 +1602,35 @@ def configure_static(o):
|
|||||||
|
|
||||||
|
|
||||||
def write(filename, data):
|
def write(filename, data):
|
||||||
print_verbose('creating %s' % filename)
|
print_verbose(f'creating {filename}')
|
||||||
with open(filename, 'w+') as f:
|
with open(filename, 'w+', encoding='utf-8') as f:
|
||||||
f.write(data)
|
f.write(data)
|
||||||
|
|
||||||
do_not_edit = '# Do not edit. Generated by the configure script.\n'
|
do_not_edit = '# Do not edit. Generated by the configure script.\n'
|
||||||
|
|
||||||
def glob_to_var(dir_base, dir_sub, patch_dir):
|
def glob_to_var(dir_base, dir_sub, patch_dir):
|
||||||
list = []
|
file_list = []
|
||||||
dir_all = '%s/%s' % (dir_base, dir_sub)
|
dir_all = f'{dir_base}/{dir_sub}'
|
||||||
files = os.walk(dir_all)
|
files = os.walk(dir_all)
|
||||||
for ent in files:
|
for ent in files:
|
||||||
(path, dirs, files) = ent
|
(_, _1, files) = ent
|
||||||
for file in files:
|
for file in files:
|
||||||
if file.endswith(('.cpp', '.c', '.h')):
|
if file.endswith(('.cpp', '.c', '.h')):
|
||||||
# srcfile uses "slash" as dir separator as its output is consumed by gyp
|
# srcfile uses "slash" as dir separator as its output is consumed by gyp
|
||||||
srcfile = '%s/%s' % (dir_sub, file)
|
srcfile = f'{dir_sub}/{file}'
|
||||||
if patch_dir:
|
if patch_dir:
|
||||||
patchfile = '%s/%s/%s' % (dir_base, patch_dir, file)
|
patchfile = f'{dir_base}{patch_dir}{file}'
|
||||||
if os.path.isfile(patchfile):
|
if os.path.isfile(patchfile):
|
||||||
srcfile = '%s/%s' % (patch_dir, file)
|
srcfile = f'{patch_dir}/{file}'
|
||||||
info('Using floating patch "%s" from "%s"' % (patchfile, dir_base))
|
info(f'Using floating patch "{patchfile}" from "{dir_base}"')
|
||||||
list.append(srcfile)
|
file_list.append(srcfile)
|
||||||
break
|
break
|
||||||
return list
|
return file_list
|
||||||
|
|
||||||
def configure_intl(o):
|
def configure_intl(o):
|
||||||
def icu_download(path):
|
def icu_download(path):
|
||||||
depFile = 'tools/icu/current_ver.dep'
|
depFile = 'tools/icu/current_ver.dep'
|
||||||
with open(depFile) as f:
|
with open(depFile, encoding='utf-8') as f:
|
||||||
icus = json.load(f)
|
icus = json.load(f)
|
||||||
# download ICU, if needed
|
# download ICU, if needed
|
||||||
if not os.access(options.download_path, os.W_OK):
|
if not os.access(options.download_path, os.W_OK):
|
||||||
@ -1647,26 +1641,26 @@ def configure_intl(o):
|
|||||||
url = icu['url']
|
url = icu['url']
|
||||||
(expectHash, hashAlgo, allAlgos) = nodedownload.findHash(icu)
|
(expectHash, hashAlgo, allAlgos) = nodedownload.findHash(icu)
|
||||||
if not expectHash:
|
if not expectHash:
|
||||||
error('''Could not find a hash to verify ICU download.
|
error(f'''Could not find a hash to verify ICU download.
|
||||||
%s may be incorrect.
|
{depFile} may be incorrect.
|
||||||
For the entry %s,
|
For the entry {url},
|
||||||
Expected one of these keys: %s''' % (depFile, url, ' '.join(allAlgos)))
|
Expected one of these keys: {' '.join(allAlgos)}''')
|
||||||
local = url.split('/')[-1]
|
local = url.split('/')[-1]
|
||||||
targetfile = os.path.join(options.download_path, local)
|
targetfile = os.path.join(options.download_path, local)
|
||||||
if not os.path.isfile(targetfile):
|
if not os.path.isfile(targetfile):
|
||||||
if attemptdownload:
|
if attemptdownload:
|
||||||
nodedownload.retrievefile(url, targetfile)
|
nodedownload.retrievefile(url, targetfile)
|
||||||
else:
|
else:
|
||||||
print('Re-using existing %s' % targetfile)
|
print(f'Re-using existing {targetfile}')
|
||||||
if os.path.isfile(targetfile):
|
if os.path.isfile(targetfile):
|
||||||
print('Checking file integrity with %s:\r' % hashAlgo)
|
print(f'Checking file integrity with {hashAlgo}:\r')
|
||||||
gotHash = nodedownload.checkHash(targetfile, hashAlgo)
|
gotHash = nodedownload.checkHash(targetfile, hashAlgo)
|
||||||
print('%s: %s %s' % (hashAlgo, gotHash, targetfile))
|
print(f'{hashAlgo}: {gotHash} {targetfile}')
|
||||||
if (expectHash == gotHash):
|
if expectHash == gotHash:
|
||||||
return targetfile
|
return targetfile
|
||||||
else:
|
|
||||||
warn('Expected: %s *MISMATCH*' % expectHash)
|
warn(f'Expected: {expectHash} *MISMATCH*')
|
||||||
warn('\n ** Corrupted ZIP? Delete %s to retry download.\n' % targetfile)
|
warn(f'\n ** Corrupted ZIP? Delete {targetfile} to retry download.\n')
|
||||||
return None
|
return None
|
||||||
icu_config = {
|
icu_config = {
|
||||||
'variables': {}
|
'variables': {}
|
||||||
@ -1694,12 +1688,14 @@ def configure_intl(o):
|
|||||||
# use the .gyp given
|
# use the .gyp given
|
||||||
o['variables']['icu_gyp_path'] = options.with_icu_path
|
o['variables']['icu_gyp_path'] = options.with_icu_path
|
||||||
return
|
return
|
||||||
|
|
||||||
# --with-intl=<with_intl>
|
# --with-intl=<with_intl>
|
||||||
# set the default
|
# set the default
|
||||||
if with_intl in (None, 'none'):
|
if with_intl in (None, 'none'):
|
||||||
o['variables']['v8_enable_i18n_support'] = 0
|
o['variables']['v8_enable_i18n_support'] = 0
|
||||||
return # no Intl
|
return # no Intl
|
||||||
elif with_intl == 'small-icu':
|
|
||||||
|
if with_intl == 'small-icu':
|
||||||
# small ICU (English only)
|
# small ICU (English only)
|
||||||
o['variables']['v8_enable_i18n_support'] = 1
|
o['variables']['v8_enable_i18n_support'] = 1
|
||||||
o['variables']['icu_small'] = b(True)
|
o['variables']['icu_small'] = b(True)
|
||||||
@ -1722,8 +1718,7 @@ def configure_intl(o):
|
|||||||
icu_ver_major = icuversion.split('.')[0]
|
icu_ver_major = icuversion.split('.')[0]
|
||||||
o['variables']['icu_ver_major'] = icu_ver_major
|
o['variables']['icu_ver_major'] = icu_ver_major
|
||||||
if int(icu_ver_major) < icu_versions['minimum_icu']:
|
if int(icu_ver_major) < icu_versions['minimum_icu']:
|
||||||
error('icu4c v%s is too old, v%d.x or later is required.' %
|
error(f"icu4c v{icuversion} is too old, v{icu_versions['minimum_icu']}.x or later is required.")
|
||||||
(icuversion, icu_versions['minimum_icu']))
|
|
||||||
# libpath provides linker path which may contain spaces
|
# libpath provides linker path which may contain spaces
|
||||||
if libpath:
|
if libpath:
|
||||||
o['libraries'] += [libpath]
|
o['libraries'] += [libpath]
|
||||||
@ -1753,7 +1748,7 @@ def configure_intl(o):
|
|||||||
canned_is_full = os.path.isfile(os.path.join(canned_icu_dir, 'README-FULL-ICU.txt'))
|
canned_is_full = os.path.isfile(os.path.join(canned_icu_dir, 'README-FULL-ICU.txt'))
|
||||||
canned_is_small = os.path.isfile(os.path.join(canned_icu_dir, 'README-SMALL-ICU.txt'))
|
canned_is_small = os.path.isfile(os.path.join(canned_icu_dir, 'README-SMALL-ICU.txt'))
|
||||||
if canned_is_small:
|
if canned_is_small:
|
||||||
warn('Ignoring %s - in-repo small icu is no longer supported.' % canned_icu_dir)
|
warn(f'Ignoring {canned_icu_dir} - in-repo small icu is no longer supported.')
|
||||||
|
|
||||||
# We can use 'deps/icu-small' - pre-canned ICU *iff*
|
# We can use 'deps/icu-small' - pre-canned ICU *iff*
|
||||||
# - canned_is_full AND
|
# - canned_is_full AND
|
||||||
@ -1772,17 +1767,17 @@ def configure_intl(o):
|
|||||||
# --with-icu-source processing
|
# --with-icu-source processing
|
||||||
# now, check that they didn't pass --with-icu-source=deps/icu
|
# now, check that they didn't pass --with-icu-source=deps/icu
|
||||||
elif with_icu_source and os.path.abspath(icu_full_path) == os.path.abspath(with_icu_source):
|
elif with_icu_source and os.path.abspath(icu_full_path) == os.path.abspath(with_icu_source):
|
||||||
warn('Ignoring redundant --with-icu-source=%s' % with_icu_source)
|
warn(f'Ignoring redundant --with-icu-source={with_icu_source}')
|
||||||
with_icu_source = None
|
with_icu_source = None
|
||||||
# if with_icu_source is still set, try to use it.
|
# if with_icu_source is still set, try to use it.
|
||||||
if with_icu_source:
|
if with_icu_source:
|
||||||
if os.path.isdir(icu_full_path):
|
if os.path.isdir(icu_full_path):
|
||||||
print('Deleting old ICU source: %s' % icu_full_path)
|
print(f'Deleting old ICU source: {icu_full_path}')
|
||||||
shutil.rmtree(icu_full_path)
|
shutil.rmtree(icu_full_path)
|
||||||
# now, what path was given?
|
# now, what path was given?
|
||||||
if os.path.isdir(with_icu_source):
|
if os.path.isdir(with_icu_source):
|
||||||
# it's a path. Copy it.
|
# it's a path. Copy it.
|
||||||
print('%s -> %s' % (with_icu_source, icu_full_path))
|
print(f'{with_icu_source} -> {icu_full_path}')
|
||||||
shutil.copytree(with_icu_source, icu_full_path)
|
shutil.copytree(with_icu_source, icu_full_path)
|
||||||
else:
|
else:
|
||||||
# could be file or URL.
|
# could be file or URL.
|
||||||
@ -1807,8 +1802,7 @@ def configure_intl(o):
|
|||||||
shutil.rmtree(icu_tmp_path)
|
shutil.rmtree(icu_tmp_path)
|
||||||
else:
|
else:
|
||||||
shutil.rmtree(icu_tmp_path)
|
shutil.rmtree(icu_tmp_path)
|
||||||
error('--with-icu-source=%s did not result in an "icu" dir.' % \
|
error(f'--with-icu-source={with_icu_source} did not result in an "icu" dir.')
|
||||||
with_icu_source)
|
|
||||||
|
|
||||||
# ICU mode. (icu-generic.gyp)
|
# ICU mode. (icu-generic.gyp)
|
||||||
o['variables']['icu_gyp_path'] = 'tools/icu/icu-generic.gyp'
|
o['variables']['icu_gyp_path'] = 'tools/icu/icu-generic.gyp'
|
||||||
@ -1820,17 +1814,17 @@ def configure_intl(o):
|
|||||||
if localzip:
|
if localzip:
|
||||||
nodedownload.unpack(localzip, icu_parent_path)
|
nodedownload.unpack(localzip, icu_parent_path)
|
||||||
else:
|
else:
|
||||||
warn('* ECMA-402 (Intl) support didn\'t find ICU in %s..' % icu_full_path)
|
warn("* ECMA-402 (Intl) support didn't find ICU in {icu_full_path}..")
|
||||||
if not os.path.isdir(icu_full_path):
|
if not os.path.isdir(icu_full_path):
|
||||||
error('''Cannot build Intl without ICU in %s.
|
error(f'''Cannot build Intl without ICU in {icu_full_path}.
|
||||||
Fix, or disable with "--with-intl=none"''' % icu_full_path)
|
Fix, or disable with "--with-intl=none"''')
|
||||||
else:
|
else:
|
||||||
print_verbose('* Using ICU in %s' % icu_full_path)
|
print_verbose(f'* Using ICU in {icu_full_path}')
|
||||||
# Now, what version of ICU is it? We just need the "major", such as 54.
|
# Now, what version of ICU is it? We just need the "major", such as 54.
|
||||||
# uvernum.h contains it as a #define.
|
# uvernum.h contains it as a #define.
|
||||||
uvernum_h = os.path.join(icu_full_path, 'source/common/unicode/uvernum.h')
|
uvernum_h = os.path.join(icu_full_path, 'source/common/unicode/uvernum.h')
|
||||||
if not os.path.isfile(uvernum_h):
|
if not os.path.isfile(uvernum_h):
|
||||||
error('Could not load %s - is ICU installed?' % uvernum_h)
|
error(f'Could not load {uvernum_h} - is ICU installed?')
|
||||||
icu_ver_major = None
|
icu_ver_major = None
|
||||||
matchVerExp = r'^\s*#define\s+U_ICU_VERSION_SHORT\s+"([^"]*)".*'
|
matchVerExp = r'^\s*#define\s+U_ICU_VERSION_SHORT\s+"([^"]*)".*'
|
||||||
match_version = re.compile(matchVerExp)
|
match_version = re.compile(matchVerExp)
|
||||||
@ -1840,20 +1834,19 @@ def configure_intl(o):
|
|||||||
if m:
|
if m:
|
||||||
icu_ver_major = str(m.group(1))
|
icu_ver_major = str(m.group(1))
|
||||||
if not icu_ver_major:
|
if not icu_ver_major:
|
||||||
error('Could not read U_ICU_VERSION_SHORT version from %s' % uvernum_h)
|
error(f'Could not read U_ICU_VERSION_SHORT version from {uvernum_h}')
|
||||||
elif int(icu_ver_major) < icu_versions['minimum_icu']:
|
elif int(icu_ver_major) < icu_versions['minimum_icu']:
|
||||||
error('icu4c v%s.x is too old, v%d.x or later is required.' %
|
error(f"icu4c v{icu_ver_major}.x is too old, v{icu_versions['minimum_icu']}.x or later is required.")
|
||||||
(icu_ver_major, icu_versions['minimum_icu']))
|
|
||||||
icu_endianness = sys.byteorder[0]
|
icu_endianness = sys.byteorder[0]
|
||||||
o['variables']['icu_ver_major'] = icu_ver_major
|
o['variables']['icu_ver_major'] = icu_ver_major
|
||||||
o['variables']['icu_endianness'] = icu_endianness
|
o['variables']['icu_endianness'] = icu_endianness
|
||||||
icu_data_file_l = 'icudt%s%s.dat' % (icu_ver_major, 'l') # LE filename
|
icu_data_file_l = f'icudt{icu_ver_major}l.dat' # LE filename
|
||||||
icu_data_file = 'icudt%s%s.dat' % (icu_ver_major, icu_endianness)
|
icu_data_file = f'icudt{icu_ver_major}{icu_endianness}.dat'
|
||||||
# relative to configure
|
# relative to configure
|
||||||
icu_data_path = os.path.join(icu_full_path,
|
icu_data_path = os.path.join(icu_full_path,
|
||||||
'source/data/in',
|
'source/data/in',
|
||||||
icu_data_file_l) # LE
|
icu_data_file_l) # LE
|
||||||
compressed_data = '%s.bz2' % (icu_data_path)
|
compressed_data = f'{icu_data_path}.bz2'
|
||||||
if not os.path.isfile(icu_data_path) and os.path.isfile(compressed_data):
|
if not os.path.isfile(icu_data_path) and os.path.isfile(compressed_data):
|
||||||
# unpack. deps/icu is a temporary path
|
# unpack. deps/icu is a temporary path
|
||||||
if os.path.isdir(icu_tmp_path):
|
if os.path.isdir(icu_tmp_path):
|
||||||
@ -1869,7 +1862,7 @@ def configure_intl(o):
|
|||||||
# Now, proceed..
|
# Now, proceed..
|
||||||
|
|
||||||
# relative to dep..
|
# relative to dep..
|
||||||
icu_data_in = os.path.join('..','..', icu_data_path)
|
icu_data_in = os.path.join('..', '..', icu_data_path)
|
||||||
if not os.path.isfile(icu_data_path) and icu_endianness != 'l':
|
if not os.path.isfile(icu_data_path) and icu_endianness != 'l':
|
||||||
# use host endianness
|
# use host endianness
|
||||||
icu_data_path = os.path.join(icu_full_path,
|
icu_data_path = os.path.join(icu_full_path,
|
||||||
@ -1877,8 +1870,8 @@ def configure_intl(o):
|
|||||||
icu_data_file) # will be generated
|
icu_data_file) # will be generated
|
||||||
if not os.path.isfile(icu_data_path):
|
if not os.path.isfile(icu_data_path):
|
||||||
# .. and we're not about to build it from .gyp!
|
# .. and we're not about to build it from .gyp!
|
||||||
error('''ICU prebuilt data file %s does not exist.
|
error(f'''ICU prebuilt data file {icu_data_path} does not exist.
|
||||||
See the README.md.''' % icu_data_path)
|
See the README.md.''')
|
||||||
|
|
||||||
# this is the input '.dat' file to use .. icudt*.dat
|
# this is the input '.dat' file to use .. icudt*.dat
|
||||||
# may be little-endian if from a icu-project.org tarball
|
# may be little-endian if from a icu-project.org tarball
|
||||||
@ -1896,10 +1889,10 @@ def configure_intl(o):
|
|||||||
}
|
}
|
||||||
# this creates a variable icu_src_XXX for each of the subdirs
|
# this creates a variable icu_src_XXX for each of the subdirs
|
||||||
# with a list of the src files to use
|
# with a list of the src files to use
|
||||||
for i in icu_src:
|
for key, value in icu_src.items():
|
||||||
var = 'icu_src_%s' % i
|
var = f'icu_src_{key}'
|
||||||
path = '../../%s/source/%s' % (icu_full_path, icu_src[i])
|
path = f'../../{icu_full_path}/source/{value}'
|
||||||
icu_config['variables'][var] = glob_to_var('tools/icu', path, 'patches/%s/source/%s' % (icu_ver_major, icu_src[i]) )
|
icu_config['variables'][var] = glob_to_var('tools/icu', path, f'patches/{icu_ver_major}/source/{value}')
|
||||||
# calculate platform-specific genccode args
|
# calculate platform-specific genccode args
|
||||||
# print("platform %s, flavor %s" % (sys.platform, flavor))
|
# print("platform %s, flavor %s" % (sys.platform, flavor))
|
||||||
# if sys.platform == 'darwin':
|
# if sys.platform == 'darwin':
|
||||||
@ -1950,8 +1943,9 @@ def configure_section_file(o):
|
|||||||
warn('''No acceptable ld.gold linker found!''')
|
warn('''No acceptable ld.gold linker found!''')
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
match = re.match(r"^GNU gold.*([0-9]+)\.([0-9]+)$",
|
with proc:
|
||||||
proc.communicate()[0].decode("utf-8"))
|
match = re.match(r"^GNU gold.*([0-9]+)\.([0-9]+)$",
|
||||||
|
proc.communicate()[0].decode("utf-8"))
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
gold_major_version = match.group(1)
|
gold_major_version = match.group(1)
|
||||||
@ -1983,13 +1977,15 @@ def make_bin_override():
|
|||||||
try:
|
try:
|
||||||
os.makedirs(bin_override)
|
os.makedirs(bin_override)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.EEXIST: raise e
|
if e.errno != errno.EEXIST:
|
||||||
|
raise e
|
||||||
|
|
||||||
python_link = os.path.join(bin_override, 'python')
|
python_link = os.path.join(bin_override, 'python')
|
||||||
try:
|
try:
|
||||||
os.unlink(python_link)
|
os.unlink(python_link)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT: raise e
|
if e.errno != errno.ENOENT:
|
||||||
|
raise e
|
||||||
os.symlink(sys.executable, python_link)
|
os.symlink(sys.executable, python_link)
|
||||||
|
|
||||||
# We need to set the environment right now so that when gyp (in run_gyp)
|
# We need to set the environment right now so that when gyp (in run_gyp)
|
||||||
@ -2013,7 +2009,7 @@ check_compiler(output)
|
|||||||
# determine the "flavor" (operating system) we're building for,
|
# determine the "flavor" (operating system) we're building for,
|
||||||
# leveraging gyp's GetFlavor function
|
# leveraging gyp's GetFlavor function
|
||||||
flavor_params = {}
|
flavor_params = {}
|
||||||
if (options.dest_os):
|
if options.dest_os:
|
||||||
flavor_params['flavor'] = options.dest_os
|
flavor_params['flavor'] = options.dest_os
|
||||||
flavor = GetFlavor(flavor_params)
|
flavor = GetFlavor(flavor_params)
|
||||||
|
|
||||||
@ -2037,12 +2033,12 @@ configure_section_file(output)
|
|||||||
|
|
||||||
# configure shareable builtins
|
# configure shareable builtins
|
||||||
output['variables']['node_builtin_shareable_builtins'] = []
|
output['variables']['node_builtin_shareable_builtins'] = []
|
||||||
for builtin in shareable_builtins:
|
for builtin, value in shareable_builtins.items():
|
||||||
builtin_id = 'node_shared_builtin_' + builtin.replace('/', '_') + '_path'
|
builtin_id = 'node_shared_builtin_' + builtin.replace('/', '_') + '_path'
|
||||||
if getattr(options, builtin_id):
|
if getattr(options, builtin_id):
|
||||||
output['defines'] += [builtin_id.upper() + '=' + getattr(options, builtin_id)]
|
output['defines'] += [builtin_id.upper() + '=' + getattr(options, builtin_id)]
|
||||||
else:
|
else:
|
||||||
output['variables']['node_builtin_shareable_builtins'] += [shareable_builtins[builtin]]
|
output['variables']['node_builtin_shareable_builtins'] += [value]
|
||||||
|
|
||||||
# Forward OSS-Fuzz settings
|
# Forward OSS-Fuzz settings
|
||||||
output['variables']['ossfuzz'] = b(options.ossfuzz)
|
output['variables']['ossfuzz'] = b(options.ossfuzz)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user