gyp: pull Python 3 changes from node/node-gyp

PR-URL: https://github.com/nodejs/node/pull/28573
Reviewed-By: Sam Roberts <vieuxtech@gmail.com>
Reviewed-By: Ruben Bridgewater <ruben@bridgewater.de>
This commit is contained in:
cclauss 2019-07-06 14:13:09 +02:00 committed by Sam Roberts
parent f5b40b2ffa
commit 5ebaf703aa
16 changed files with 220 additions and 182 deletions

View File

@ -76,8 +76,7 @@ PYLINT_DISABLED_WARNINGS = [
def _LicenseHeader(input_api): def _LicenseHeader(input_api):
# Accept any year number from 2009 to the current year. # Accept any year number from 2009 to the current year.
current_year = int(input_api.time.strftime('%Y')) current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) allowed_years = (str(s) for s in reversed(range(2009, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')' years_re = '(' + '|'.join(allowed_years) + ')'
# The (c) is deprecated, but tolerate it until it's removed from all files. # The (c) is deprecated, but tolerate it until it's removed from all files.
@ -124,3 +123,16 @@ def CheckChangeOnCommit(input_api, output_api):
finally: finally:
sys.path = old_sys_path sys.path = old_sys_path
return report return report
TRYBOTS = [
'linux_try',
'mac_try',
'win_try',
]
def GetPreferredTryMasters(_, change):
return {
'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS },
}

View File

@ -4,22 +4,17 @@
"""New implementation of Visual Studio project generation.""" """New implementation of Visual Studio project generation."""
import hashlib
import os import os
import random import random
import gyp.common import gyp.common
# hashlib is supplied as of Python 2.5 as the replacement interface for md5
# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
# preserving 2.4 compatibility.
try: try:
import hashlib cmp
_new_md5 = hashlib.md5 except NameError:
except ImportError: def cmp(x, y):
import md5 return (x > y) - (x < y)
_new_md5 = md5.new
# Initialize random number generator # Initialize random number generator
random.seed() random.seed()
@ -50,7 +45,7 @@ def MakeGuid(name, seed='msvs_new'):
not change when the project for a target is rebuilt. not change when the project for a target is rebuilt.
""" """
# Calculate a MD5 signature for the seed and name. # Calculate a MD5 signature for the seed and name.
d = _new_md5(str(seed) + str(name)).hexdigest().upper() d = hashlib.md5(str(seed) + str(name)).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest) # Convert most of the signature to GUID form (discard the rest)
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ '-' + d[20:32] + '}') + '-' + d[20:32] + '}')

View File

@ -91,7 +91,7 @@ class Writer(object):
if environment and isinstance(environment, dict): if environment and isinstance(environment, dict):
env_list = ['%s="%s"' % (key, val) env_list = ['%s="%s"' % (key, val)
for (key,val) in environment.iteritems()] for (key,val) in environment.items()]
environment = ' '.join(env_list) environment = ' '.join(env_list)
else: else:
environment = '' environment = ''
@ -135,7 +135,7 @@ class Writer(object):
def WriteIfChanged(self): def WriteIfChanged(self):
"""Writes the user file.""" """Writes the user file."""
configs = ['Configurations'] configs = ['Configurations']
for config, spec in sorted(self.configurations.iteritems()): for config, spec in sorted(self.configurations.items()):
configs.append(spec) configs.append(spec)
content = ['VisualStudioUserFile', content = ['VisualStudioUserFile',

View File

@ -4,9 +4,11 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
from __future__ import print_function
import copy import copy
import gyp.input import gyp.input
import optparse import argparse
import os.path import os.path
import re import re
import shlex import shlex
@ -14,6 +16,13 @@ import sys
import traceback import traceback
from gyp.common import GypError from gyp.common import GypError
try:
# Python 2
string_types = basestring
except NameError:
# Python 3
string_types = str
# Default debug modes for GYP # Default debug modes for GYP
debug = {} debug = {}
@ -34,8 +43,8 @@ def DebugOutput(mode, message, *args):
pass pass
if args: if args:
message %= args message %= args
print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
ctx[1], ctx[2], message) ctx[1], ctx[2], message))
def FindBuildFiles(): def FindBuildFiles():
extension = '.gyp' extension = '.gyp'
@ -207,7 +216,7 @@ def RegenerateFlags(options):
# We always want to ignore the environment when regenerating, to avoid # We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration. # duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment'] flags = ['--ignore-environment']
for name, metadata in options._regeneration_metadata.iteritems(): for name, metadata in options._regeneration_metadata.items():
opt = metadata['opt'] opt = metadata['opt']
value = getattr(options, name) value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop value_predicate = metadata['type'] == 'path' and FixPath or Noop
@ -226,24 +235,24 @@ def RegenerateFlags(options):
(action == 'store_false' and not value)): (action == 'store_false' and not value)):
flags.append(opt) flags.append(opt)
elif options.use_environment and env_name: elif options.use_environment and env_name:
print >>sys.stderr, ('Warning: environment regeneration unimplemented ' print('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt, 'for %s flag %r env_name %r' % (action, opt,
env_name)) env_name), file=sys.stderr)
else: else:
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' print('Warning: regeneration unimplemented for action %r '
'flag %r' % (action, opt)) 'flag %r' % (action, opt), file=sys.stderr)
return flags return flags
class RegeneratableOptionParser(optparse.OptionParser): class RegeneratableOptionParser(argparse.ArgumentParser):
def __init__(self): def __init__(self, usage):
self.__regeneratable_options = {} self.__regeneratable_options = {}
optparse.OptionParser.__init__(self) argparse.ArgumentParser.__init__(self, usage=usage)
def add_option(self, *args, **kw): def add_argument(self, *args, **kw):
"""Add an option to the parser. """Add an option to the parser.
This accepts the same arguments as OptionParser.add_option, plus the This accepts the same arguments as ArgumentParser.add_argument, plus the
following: following:
regenerate: can be set to False to prevent this option from being included regenerate: can be set to False to prevent this option from being included
in regeneration. in regeneration.
@ -260,7 +269,7 @@ class RegeneratableOptionParser(optparse.OptionParser):
# it as a string. # it as a string.
type = kw.get('type') type = kw.get('type')
if type == 'path': if type == 'path':
kw['type'] = 'string' kw['type'] = str
self.__regeneratable_options[dest] = { self.__regeneratable_options[dest] = {
'action': kw.get('action'), 'action': kw.get('action'),
@ -269,50 +278,50 @@ class RegeneratableOptionParser(optparse.OptionParser):
'opt': args[0], 'opt': args[0],
} }
optparse.OptionParser.add_option(self, *args, **kw) argparse.ArgumentParser.add_argument(self, *args, **kw)
def parse_args(self, *args): def parse_args(self, *args):
values, args = optparse.OptionParser.parse_args(self, *args) values, args = argparse.ArgumentParser.parse_known_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options values._regeneration_metadata = self.__regeneratable_options
return values, args return values, args
def gyp_main(args): def gyp_main(args):
my_name = os.path.basename(sys.argv[0]) my_name = os.path.basename(sys.argv[0])
usage = 'usage: %(prog)s [options ...] [build_file ...]'
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]' parser = RegeneratableOptionParser(usage=usage.replace('%s', '%(prog)s'))
parser.set_usage(usage.replace('%s', '%prog')) parser.add_argument('--build', dest='configs', action='append',
parser.add_option('--build', dest='configs', action='append',
help='configuration for build after project generation') help='configuration for build after project generation')
parser.add_option('--check', dest='check', action='store_true', parser.add_argument('--check', dest='check', action='store_true',
help='check format of gyp files') help='check format of gyp files')
parser.add_option('--config-dir', dest='config_dir', action='store', parser.add_argument('--config-dir', dest='config_dir', action='store',
env_name='GYP_CONFIG_DIR', default=None, env_name='GYP_CONFIG_DIR', default=None,
help='The location for configuration files like ' help='The location for configuration files like '
'include.gypi.') 'include.gypi.')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', parser.add_argument('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging ' action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables", ' 'mode for debugging GYP. Supported modes are "variables", '
'"includes" and "general" or "all" for all of them.') '"includes" and "general" or "all" for all of them.')
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', parser.add_argument('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES', env_name='GYP_DEFINES',
help='sets variable VAR to value VAL') help='sets variable VAR to value VAL')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path', parser.add_argument('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH') help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-f', '--format', dest='formats', action='append', parser.add_argument('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False, env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate') help='output formats to generate')
parser.add_option('-G', dest='generator_flags', action='append', default=[], parser.add_argument('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL') help='sets generator flag FLAG to VAL')
parser.add_option('--generator-output', dest='generator_output', parser.add_argument('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path', action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT', env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR') help='puts generated build files under DIR')
parser.add_option('--ignore-environment', dest='use_environment', parser.add_argument('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False, action='store_false', default=True, regenerate=False,
help='do not read options from environment variables') help='do not read options from environment variables')
parser.add_option('-I', '--include', dest='includes', action='append', parser.add_argument('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path', metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files') help='files to include in all loaded .gyp files')
# --no-circular-check disables the check for circular relationships between # --no-circular-check disables the check for circular relationships between
@ -322,7 +331,7 @@ def gyp_main(args):
# option allows the strict behavior to be used on Macs and the lenient # option allows the strict behavior to be used on Macs and the lenient
# behavior to be used elsewhere. # behavior to be used elsewhere.
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed. # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
parser.add_option('--no-circular-check', dest='circular_check', parser.add_argument('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False, action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files") help="don't check for circular relationships between files")
# --no-duplicate-basename-check disables the check for duplicate basenames # --no-duplicate-basename-check disables the check for duplicate basenames
@ -331,18 +340,18 @@ def gyp_main(args):
# when duplicate basenames are passed into Make generator on Mac. # when duplicate basenames are passed into Make generator on Mac.
# TODO(yukawa): Remove this option when these legacy generators are # TODO(yukawa): Remove this option when these legacy generators are
# deprecated. # deprecated.
parser.add_option('--no-duplicate-basename-check', parser.add_argument('--no-duplicate-basename-check',
dest='duplicate_basename_check', action='store_false', dest='duplicate_basename_check', action='store_false',
default=True, regenerate=False, default=True, regenerate=False,
help="don't check for duplicate basenames") help="don't check for duplicate basenames")
parser.add_option('--no-parallel', action='store_true', default=False, parser.add_argument('--no-parallel', action='store_true', default=False,
help='Disable multiprocessing') help='Disable multiprocessing')
parser.add_option('-S', '--suffix', dest='suffix', default='', parser.add_argument('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files') help='suffix to add to generated files')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', parser.add_argument('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path', default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree') help='directory to use as the root of the source tree')
parser.add_option('-R', '--root-target', dest='root_targets', parser.add_argument('-R', '--root-target', dest='root_targets',
action='append', metavar='TARGET', action='append', metavar='TARGET',
help='include only TARGET and its deep dependencies') help='include only TARGET and its deep dependencies')
@ -410,7 +419,7 @@ def gyp_main(args):
for option, value in sorted(options.__dict__.items()): for option, value in sorted(options.__dict__.items()):
if option[0] == '_': if option[0] == '_':
continue continue
if isinstance(value, basestring): if isinstance(value, string_types):
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
else: else:
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
@ -432,7 +441,7 @@ def gyp_main(args):
build_file_dir = os.path.abspath(os.path.dirname(build_file)) build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep) build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components) components_len = len(build_file_dir_components)
for index in xrange(components_len - 1, -1, -1): for index in range(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src': if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components) options.depth = os.path.sep.join(build_file_dir_components)
break break
@ -475,7 +484,7 @@ def gyp_main(args):
if home_dot_gyp != None: if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi') default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include): if os.path.exists(default_include):
print 'Using overrides found in ' + default_include print('Using overrides found in ' + default_include)
includes.append(default_include) includes.append(default_include)
# Command-line --include files come after the default include. # Command-line --include files come after the default include.
@ -536,7 +545,7 @@ def gyp_main(args):
def main(args): def main(args):
try: try:
return gyp_main(args) return gyp_main(args)
except GypError, e: except GypError as e:
sys.stderr.write("gyp: %s\n" % e) sys.stderr.write("gyp: %s\n" % e)
return 1 return 1

View File

@ -39,7 +39,7 @@ class FlockTool(object):
# where fcntl.flock(fd, LOCK_EX) always fails # where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK # with EBADF, that's why we use this F_SETLK
# hack instead. # hack instead.
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
if sys.platform.startswith('aix'): if sys.platform.startswith('aix'):
# Python on AIX is compiled with LARGEFILE support, which changes the # Python on AIX is compiled with LARGEFILE support, which changes the
# struct size. # struct size.

View File

@ -62,6 +62,8 @@ directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
then the "all" target includes "b1" and "b2". then the "all" target includes "b1" and "b2".
""" """
from __future__ import print_function
import gyp.common import gyp.common
import gyp.ninja_syntax as ninja_syntax import gyp.ninja_syntax as ninja_syntax
import json import json
@ -155,7 +157,7 @@ def _AddSources(sources, base_path, base_path_components, result):
continue continue
result.append(base_path + source) result.append(base_path + source)
if debug: if debug:
print 'AddSource', org_source, result[len(result) - 1] print('AddSource', org_source, result[len(result) - 1])
def _ExtractSourcesFromAction(action, base_path, base_path_components, def _ExtractSourcesFromAction(action, base_path, base_path_components,
@ -185,7 +187,7 @@ def _ExtractSources(target, target_dict, toplevel_dir):
base_path += '/' base_path += '/'
if debug: if debug:
print 'ExtractSources', target, base_path print('ExtractSources', target, base_path)
results = [] results = []
if 'sources' in target_dict: if 'sources' in target_dict:
@ -278,7 +280,7 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir):
the root of the source tree.""" the root of the source tree."""
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files: if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
if debug: if debug:
print 'gyp file modified', build_file print('gyp file modified', build_file)
return True return True
# First element of included_files is the file itself. # First element of included_files is the file itself.
@ -291,8 +293,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir):
_ToGypPath(gyp.common.UnrelativePath(include_file, build_file)) _ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
if _ToLocalPath(toplevel_dir, rel_include_file) in files: if _ToLocalPath(toplevel_dir, rel_include_file) in files:
if debug: if debug:
print 'included gyp file modified, gyp_file=', build_file, \ print('included gyp file modified, gyp_file=', build_file,
'included file=', rel_include_file 'included file=', rel_include_file)
return True return True
return False return False
@ -373,7 +375,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
# If a build file (or any of its included files) is modified we assume all # If a build file (or any of its included files) is modified we assume all
# targets in the file are modified. # targets in the file are modified.
if build_file_in_files[build_file]: if build_file_in_files[build_file]:
print 'matching target from modified build file', target_name print('matching target from modified build file', target_name)
target.match_status = MATCH_STATUS_MATCHES target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target) matching_targets.append(target)
else: else:
@ -381,7 +383,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
toplevel_dir) toplevel_dir)
for source in sources: for source in sources:
if _ToGypPath(os.path.normpath(source)) in files: if _ToGypPath(os.path.normpath(source)) in files:
print 'target', target_name, 'matches', source print('target', target_name, 'matches', source)
target.match_status = MATCH_STATUS_MATCHES target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target) matching_targets.append(target)
break break
@ -433,7 +435,7 @@ def _DoesTargetDependOnMatchingTargets(target):
for dep in target.deps: for dep in target.deps:
if _DoesTargetDependOnMatchingTargets(dep): if _DoesTargetDependOnMatchingTargets(dep):
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
print '\t', target.name, 'matches by dep', dep.name print('\t', target.name, 'matches by dep', dep.name)
return True return True
target.match_status = MATCH_STATUS_DOESNT_MATCH target.match_status = MATCH_STATUS_DOESNT_MATCH
return False return False
@ -445,7 +447,7 @@ def _GetTargetsDependingOnMatchingTargets(possible_targets):
supplied as input to analyzer. supplied as input to analyzer.
possible_targets: targets to search from.""" possible_targets: targets to search from."""
found = [] found = []
print 'Targets that matched by dependency:' print('Targets that matched by dependency:')
for target in possible_targets: for target in possible_targets:
if _DoesTargetDependOnMatchingTargets(target): if _DoesTargetDependOnMatchingTargets(target):
found.append(target) found.append(target)
@ -484,12 +486,12 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
(add_if_no_ancestor or target.requires_build)) or (add_if_no_ancestor or target.requires_build)) or
(target.is_static_library and add_if_no_ancestor and (target.is_static_library and add_if_no_ancestor and
not target.is_or_has_linked_ancestor)): not target.is_or_has_linked_ancestor)):
print '\t\tadding to compile targets', target.name, 'executable', \ print('\t\tadding to compile targets', target.name, 'executable',
target.is_executable, 'added_to_compile_targets', \ target.is_executable, 'added_to_compile_targets',
target.added_to_compile_targets, 'add_if_no_ancestor', \ target.added_to_compile_targets, 'add_if_no_ancestor',
add_if_no_ancestor, 'requires_build', target.requires_build, \ add_if_no_ancestor, 'requires_build', target.requires_build,
'is_static_library', target.is_static_library, \ 'is_static_library', target.is_static_library,
'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor)
result.add(target) result.add(target)
target.added_to_compile_targets = True target.added_to_compile_targets = True
@ -500,7 +502,7 @@ def _GetCompileTargets(matching_targets, supplied_targets):
supplied_targets: set of targets supplied to analyzer to search from.""" supplied_targets: set of targets supplied to analyzer to search from."""
result = set() result = set()
for target in matching_targets: for target in matching_targets:
print 'finding compile targets for match', target.name print('finding compile targets for match', target.name)
_AddCompileTargets(target, supplied_targets, True, result) _AddCompileTargets(target, supplied_targets, True, result)
return result return result
@ -508,46 +510,46 @@ def _GetCompileTargets(matching_targets, supplied_targets):
def _WriteOutput(params, **values): def _WriteOutput(params, **values):
"""Writes the output, either to stdout or a file is specified.""" """Writes the output, either to stdout or a file is specified."""
if 'error' in values: if 'error' in values:
print 'Error:', values['error'] print('Error:', values['error'])
if 'status' in values: if 'status' in values:
print values['status'] print(values['status'])
if 'targets' in values: if 'targets' in values:
values['targets'].sort() values['targets'].sort()
print 'Supplied targets that depend on changed files:' print('Supplied targets that depend on changed files:')
for target in values['targets']: for target in values['targets']:
print '\t', target print('\t', target)
if 'invalid_targets' in values: if 'invalid_targets' in values:
values['invalid_targets'].sort() values['invalid_targets'].sort()
print 'The following targets were not found:' print('The following targets were not found:')
for target in values['invalid_targets']: for target in values['invalid_targets']:
print '\t', target print('\t', target)
if 'build_targets' in values: if 'build_targets' in values:
values['build_targets'].sort() values['build_targets'].sort()
print 'Targets that require a build:' print('Targets that require a build:')
for target in values['build_targets']: for target in values['build_targets']:
print '\t', target print('\t', target)
if 'compile_targets' in values: if 'compile_targets' in values:
values['compile_targets'].sort() values['compile_targets'].sort()
print 'Targets that need to be built:' print('Targets that need to be built:')
for target in values['compile_targets']: for target in values['compile_targets']:
print '\t', target print('\t', target)
if 'test_targets' in values: if 'test_targets' in values:
values['test_targets'].sort() values['test_targets'].sort()
print 'Test targets:' print('Test targets:')
for target in values['test_targets']: for target in values['test_targets']:
print '\t', target print('\t', target)
output_path = params.get('generator_flags', {}).get( output_path = params.get('generator_flags', {}).get(
'analyzer_output_path', None) 'analyzer_output_path', None)
if not output_path: if not output_path:
print json.dumps(values) print(json.dumps(values))
return return
try: try:
f = open(output_path, 'w') f = open(output_path, 'w')
f.write(json.dumps(values) + '\n') f.write(json.dumps(values) + '\n')
f.close() f.close()
except IOError as e: except IOError as e:
print 'Error writing to output file', output_path, str(e) print('Error writing to output file', output_path, str(e))
def _WasGypIncludeFileModified(params, files): def _WasGypIncludeFileModified(params, files):
@ -556,7 +558,7 @@ def _WasGypIncludeFileModified(params, files):
if params['options'].includes: if params['options'].includes:
for include in params['options'].includes: for include in params['options'].includes:
if _ToGypPath(os.path.normpath(include)) in files: if _ToGypPath(os.path.normpath(include)) in files:
print 'Include file modified, assuming all changed', include print('Include file modified, assuming all changed', include)
return True return True
return False return False
@ -638,13 +640,13 @@ class TargetCalculator(object):
set(self._root_targets))] set(self._root_targets))]
else: else:
test_targets = [x for x in test_targets_no_all] test_targets = [x for x in test_targets_no_all]
print 'supplied test_targets' print('supplied test_targets')
for target_name in self._test_target_names: for target_name in self._test_target_names:
print '\t', target_name print('\t', target_name)
print 'found test_targets' print('found test_targets')
for target in test_targets: for target in test_targets:
print '\t', target.name print('\t', target.name)
print 'searching for matching test targets' print('searching for matching test targets')
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
matching_test_targets_contains_all = (test_target_names_contains_all and matching_test_targets_contains_all = (test_target_names_contains_all and
set(matching_test_targets) & set(matching_test_targets) &
@ -654,14 +656,14 @@ class TargetCalculator(object):
# 'all' is subsequentely added to the matching names below. # 'all' is subsequentely added to the matching names below.
matching_test_targets = [x for x in (set(matching_test_targets) & matching_test_targets = [x for x in (set(matching_test_targets) &
set(test_targets_no_all))] set(test_targets_no_all))]
print 'matched test_targets' print('matched test_targets')
for target in matching_test_targets: for target in matching_test_targets:
print '\t', target.name print('\t', target.name)
matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1] matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
for target in matching_test_targets] for target in matching_test_targets]
if matching_test_targets_contains_all: if matching_test_targets_contains_all:
matching_target_names.append('all') matching_target_names.append('all')
print '\tall' print('\tall')
return matching_target_names return matching_target_names
def find_matching_compile_target_names(self): def find_matching_compile_target_names(self):
@ -677,10 +679,10 @@ class TargetCalculator(object):
if 'all' in self._supplied_target_names(): if 'all' in self._supplied_target_names():
supplied_targets = [x for x in (set(supplied_targets) | supplied_targets = [x for x in (set(supplied_targets) |
set(self._root_targets))] set(self._root_targets))]
print 'Supplied test_targets & compile_targets' print('Supplied test_targets & compile_targets')
for target in supplied_targets: for target in supplied_targets:
print '\t', target.name print('\t', target.name)
print 'Finding compile targets' print('Finding compile targets')
compile_targets = _GetCompileTargets(self._changed_targets, compile_targets = _GetCompileTargets(self._changed_targets,
supplied_targets) supplied_targets)
return [gyp.common.ParseQualifiedTarget(target.name)[1] return [gyp.common.ParseQualifiedTarget(target.name)[1]
@ -699,7 +701,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir)) toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
if debug: if debug:
print 'toplevel_dir', toplevel_dir print('toplevel_dir', toplevel_dir)
if _WasGypIncludeFileModified(params, config.files): if _WasGypIncludeFileModified(params, config.files):
result_dict = { 'status': all_changed_string, result_dict = { 'status': all_changed_string,

View File

@ -1,3 +1,4 @@
from __future__ import print_function
# Copyright (c) 2012 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -96,4 +97,4 @@ def GenerateOutput(target_list, target_dicts, data, params):
f = open(filename, 'w') f = open(filename, 'w')
json.dump(edges, f) json.dump(edges, f)
f.close() f.close()
print 'Wrote json to %s.' % filename print('Wrote json to %s.' % filename)

View File

@ -141,7 +141,7 @@ def GetAllIncludeDirectories(target_list, target_dicts,
compiler_includes_list.append(include_dir) compiler_includes_list.append(include_dir)
# Find standard gyp include dirs. # Find standard gyp include dirs.
if config.has_key('include_dirs'): if 'include_dirs' in config:
include_dirs = config['include_dirs'] include_dirs = config['include_dirs']
for shared_intermediate_dir in shared_intermediate_dirs: for shared_intermediate_dir in shared_intermediate_dirs:
for include_dir in include_dirs: for include_dir in include_dirs:

View File

@ -88,7 +88,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
if not output_file in output_files: if not output_file in output_files:
output_files[output_file] = input_file output_files[output_file] = input_file
for output_file, input_file in output_files.iteritems(): for output_file, input_file in output_files.items():
output = open(output_file, 'w') output = open(output_file, 'w')
pprint.pprint(data[input_file], output) pprint.pprint(data[input_file], output)
output.close() output.close()

View File

@ -22,38 +22,38 @@ class TestFindCycles(unittest.TestCase):
dependency.dependents.append(dependent) dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self): def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems(): for label, node in self.nodes.items():
self.assertEquals([], node.FindCycles()) self.assertEqual([], node.FindCycles())
def test_no_cycle_line(self): def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d']) self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems(): for label, node in self.nodes.items():
self.assertEquals([], node.FindCycles()) self.assertEqual([], node.FindCycles())
def test_no_cycle_dag(self): def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c']) self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems(): for label, node in self.nodes.items():
self.assertEquals([], node.FindCycles()) self.assertEqual([], node.FindCycles())
def test_cycle_self_reference(self): def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a']) self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['a']]], self.assertEqual([[self.nodes['a'], self.nodes['a']]],
self.nodes['a'].FindCycles()) self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self): def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a']) self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]], self.assertEqual([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.nodes['a'].FindCycles()) self.nodes['a'].FindCycles())
self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]], self.assertEqual([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.nodes['b'].FindCycles()) self.nodes['b'].FindCycles())
def test_two_cycles(self): def test_two_cycles(self):
@ -68,7 +68,7 @@ class TestFindCycles(unittest.TestCase):
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles) [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
self.assertTrue( self.assertTrue(
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles) [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
self.assertEquals(2, len(cycles)) self.assertEqual(2, len(cycles))
def test_big_cycle(self): def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['a'], self.nodes['b'])
@ -77,7 +77,7 @@ class TestFindCycles(unittest.TestCase):
self._create_dependency(self.nodes['d'], self.nodes['e']) self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a']) self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([[self.nodes['a'], self.assertEqual([[self.nodes['a'],
self.nodes['b'], self.nodes['b'],
self.nodes['c'], self.nodes['c'],
self.nodes['d'], self.nodes['d'],

View File

@ -161,8 +161,8 @@ class OrderedDict(dict):
for k in self: for k in self:
yield self[k] yield self[k]
def iteritems(self): def items(self):
'od.iteritems -> an iterator over the (key, value) items in od' 'od.items -> an iterator over the (key, value) items in od'
for k in self: for k in self:
yield (k, self[k]) yield (k, self[k])

View File

@ -28,8 +28,12 @@ _deepcopy_dispatch = d = {}
def _deepcopy_atomic(x): def _deepcopy_atomic(x):
return x return x
for x in (type(None), int, long, float, try:
bool, str, unicode, type): types = bool, float, int, str, type, type(None), long, unicode
except NameError: # Python 3
types = bool, float, int, str, type, type(None)
for x in types:
d[x] = _deepcopy_atomic d[x] = _deepcopy_atomic
def _deepcopy_list(x): def _deepcopy_list(x):
@ -38,7 +42,7 @@ d[list] = _deepcopy_list
def _deepcopy_dict(x): def _deepcopy_dict(x):
y = {} y = {}
for key, value in x.iteritems(): for key, value in x.items():
y[deepcopy(key)] = deepcopy(value) y[deepcopy(key)] = deepcopy(value)
return y return y
d[dict] = _deepcopy_dict d[dict] = _deepcopy_dict

View File

@ -8,6 +8,8 @@
generate input suitable for graphviz to render a dependency graph of generate input suitable for graphviz to render a dependency graph of
targets.""" targets."""
from __future__ import print_function
import collections import collections
import json import json
import sys import sys
@ -50,9 +52,9 @@ def WriteGraph(edges):
build_file, target_name, toolset = ParseTarget(src) build_file, target_name, toolset = ParseTarget(src)
files[build_file].append(src) files[build_file].append(src)
print 'digraph D {' print('digraph D {')
print ' fontsize=8' # Used by subgraphs. print(' fontsize=8') # Used by subgraphs.
print ' node [fontsize=8]' print(' node [fontsize=8]')
# Output nodes by file. We must first write out each node within # Output nodes by file. We must first write out each node within
# its file grouping before writing out any edges that may refer # its file grouping before writing out any edges that may refer
@ -63,31 +65,31 @@ def WriteGraph(edges):
# the display by making it a box without an internal node. # the display by making it a box without an internal node.
target = targets[0] target = targets[0]
build_file, target_name, toolset = ParseTarget(target) build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, print(' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
target_name) target_name))
else: else:
# Group multiple nodes together in a subgraph. # Group multiple nodes together in a subgraph.
print ' subgraph "cluster_%s" {' % filename print(' subgraph "cluster_%s" {' % filename)
print ' label = "%s"' % filename print(' label = "%s"' % filename)
for target in targets: for target in targets:
build_file, target_name, toolset = ParseTarget(target) build_file, target_name, toolset = ParseTarget(target)
print ' "%s" [label="%s"]' % (target, target_name) print(' "%s" [label="%s"]' % (target, target_name))
print ' }' print(' }')
# Now that we've placed all the nodes within subgraphs, output all # Now that we've placed all the nodes within subgraphs, output all
# the edges between nodes. # the edges between nodes.
for src, dsts in edges.items(): for src, dsts in edges.items():
for dst in dsts: for dst in dsts:
print ' "%s" -> "%s"' % (src, dst) print(' "%s" -> "%s"' % (src, dst))
print '}' print('}')
def main(): def main():
if len(sys.argv) < 2: if len(sys.argv) < 2:
print >>sys.stderr, __doc__ print(__doc__, file=sys.stderr)
print >>sys.stderr print(file=sys.stderr)
print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) print('usage: %s target1 target2...' % (sys.argv[0]), file=sys.stderr)
return 1 return 1
edges = LoadEdges('dump.json', sys.argv[1:]) edges = LoadEdges('dump.json', sys.argv[1:])

View File

@ -6,6 +6,8 @@
"""Pretty-prints the contents of a GYP file.""" """Pretty-prints the contents of a GYP file."""
from __future__ import print_function
import sys import sys
import re import re
@ -118,23 +120,24 @@ def prettyprint_input(lines):
basic_offset = 2 basic_offset = 2
last_line = "" last_line = ""
for line in lines: for line in lines:
line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. if COMMENT_RE.match(line):
if len(line) > 0: print(line)
brace_diff = 0
if not COMMENT_RE.match(line):
(brace_diff, after) = count_braces(line)
if brace_diff != 0:
if after:
print " " * (basic_offset * indent) + line
indent += brace_diff
else:
indent += brace_diff
print " " * (basic_offset * indent) + line
else:
print " " * (basic_offset * indent) + line
else: else:
print "" line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
last_line = line if len(line) > 0:
(brace_diff, after) = count_braces(line)
if brace_diff != 0:
if after:
print(" " * (basic_offset * indent) + line)
indent += brace_diff
else:
indent += brace_diff
print(" " * (basic_offset * indent) + line)
else:
print(" " * (basic_offset * indent) + line)
else:
print("")
last_line = line
def main(): def main():

View File

@ -12,13 +12,15 @@
Then it outputs a possible build order. Then it outputs a possible build order.
""" """
__author__ = 'nsylvain (Nicolas Sylvain)' from __future__ import print_function
import os import os
import re import re
import sys import sys
import pretty_vcproj import pretty_vcproj
__author__ = 'nsylvain (Nicolas Sylvain)'
def BuildProject(project, built, projects, deps): def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the # if all dependencies are done, we can build it, otherwise we try to build the
# dependency. # dependency.
@ -26,7 +28,7 @@ def BuildProject(project, built, projects, deps):
for dep in deps[project]: for dep in deps[project]:
if dep not in built: if dep not in built:
BuildProject(dep, built, projects, deps) BuildProject(dep, built, projects, deps)
print project print(project)
built.append(project) built.append(project)
def ParseSolution(solution_file): def ParseSolution(solution_file):
@ -100,44 +102,44 @@ def ParseSolution(solution_file):
return (projects, dependencies) return (projects, dependencies)
def PrintDependencies(projects, deps): def PrintDependencies(projects, deps):
print "---------------------------------------" print("---------------------------------------")
print "Dependencies for all projects" print("Dependencies for all projects")
print "---------------------------------------" print("---------------------------------------")
print "-- --" print("-- --")
for (project, dep_list) in sorted(deps.items()): for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project print("Project : %s" % project)
print "Path : %s" % projects[project][0] print("Path : %s" % projects[project][0])
if dep_list: if dep_list:
for dep in dep_list: for dep in dep_list:
print " - %s" % dep print(" - %s" % dep)
print "" print("")
print "-- --" print("-- --")
def PrintBuildOrder(projects, deps): def PrintBuildOrder(projects, deps):
print "---------------------------------------" print("---------------------------------------")
print "Build order " print("Build order ")
print "---------------------------------------" print("---------------------------------------")
print "-- --" print("-- --")
built = [] built = []
for (project, _) in sorted(deps.items()): for (project, _) in sorted(deps.items()):
if project not in built: if project not in built:
BuildProject(project, built, projects, deps) BuildProject(project, built, projects, deps)
print "-- --" print("-- --")
def PrintVCProj(projects): def PrintVCProj(projects):
for project in projects: for project in projects:
print "-------------------------------------" print("-------------------------------------")
print "-------------------------------------" print("-------------------------------------")
print project print(project)
print project print(project)
print project print(project)
print "-------------------------------------" print("-------------------------------------")
print "-------------------------------------" print("-------------------------------------")
project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2])) projects[project][2]))
@ -153,7 +155,7 @@ def PrintVCProj(projects):
def main(): def main():
# check if we have exactly 1 parameter. # check if we have exactly 1 parameter.
if len(sys.argv) < 2: if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0])
return 1 return 1
(projects, deps) = ParseSolution(sys.argv[1]) (projects, deps) = ParseSolution(sys.argv[1])

View File

@ -12,7 +12,7 @@
It outputs the resulting xml to stdout. It outputs the resulting xml to stdout.
""" """
__author__ = 'nsylvain (Nicolas Sylvain)' from __future__ import print_function
import os import os
import sys import sys
@ -20,6 +20,14 @@ import sys
from xml.dom.minidom import parse from xml.dom.minidom import parse
from xml.dom.minidom import Node from xml.dom.minidom import Node
__author__ = 'nsylvain (Nicolas Sylvain)'
try:
cmp
except NameError:
def cmp(x, y):
return (x > y) - (x < y)
REPLACEMENTS = dict() REPLACEMENTS = dict()
ARGUMENTS = None ARGUMENTS = None
@ -61,7 +69,7 @@ class CmpNode(object):
def PrettyPrintNode(node, indent=0): def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE: if node.nodeType == Node.TEXT_NODE:
if node.data.strip(): if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip()) print('%s%s' % (' '*indent, node.data.strip()))
return return
if node.childNodes: if node.childNodes:
@ -73,23 +81,23 @@ def PrettyPrintNode(node, indent=0):
# Print the main tag # Print the main tag
if attr_count == 0: if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName) print('%s<%s>' % (' '*indent, node.nodeName))
else: else:
print '%s<%s' % (' '*indent, node.nodeName) print('%s<%s' % (' '*indent, node.nodeName))
all_attributes = [] all_attributes = []
for (name, value) in node.attributes.items(): for (name, value) in node.attributes.items():
all_attributes.append((name, value)) all_attributes.append((name, value))
all_attributes.sort(CmpTuple()) all_attributes.sort(CmpTuple())
for (name, value) in all_attributes: for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value) print('%s %s="%s"' % (' '*indent, name, value))
print '%s>' % (' '*indent) print('%s>' % (' '*indent))
if node.nodeValue: if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue) print('%s %s' % (' '*indent, node.nodeValue))
for sub_node in node.childNodes: for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2) PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName) print('%s</%s>' % (' '*indent, node.nodeName))
def FlattenFilter(node): def FlattenFilter(node):
@ -283,8 +291,8 @@ def main(argv):
# check if we have exactly 1 parameter. # check if we have exactly 1 parameter.
if len(argv) < 2: if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' print('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0]) '[key2=value2]' % argv[0])
return 1 return 1
# Parse the keys # Parse the keys