From edda0c3b41f7a36085121ad6cd5349b7db556ff7 Mon Sep 17 00:00:00 2001 From: Craig Rodrigues Date: Sun, 19 Mar 2017 13:26:41 -0700 Subject: [PATCH 1/6] Use print as a function, as specified in PEP 3105. --- gyp/pylib/gyp/MSVSSettings.py | 15 ++++++++------- gyp/pylib/gyp/__init__.py | 9 +++++---- gyp/pylib/gyp/generator/analyzer.py | 15 ++++++++------- gyp/pylib/gyp/generator/android.py | 15 ++++++++------- gyp/pylib/gyp/generator/cmake.py | 9 +++++---- gyp/pylib/gyp/generator/make.py | 12 ++++++------ gyp/pylib/gyp/generator/msvs.py | 13 +++++++------ gyp/pylib/gyp/input.py | 7 ++++--- gyp/pylib/gyp/mac_tool.py | 7 +++---- gyp/pylib/gyp/win_tool.py | 1 + gyp/pylib/gyp/xcode_emulation.py | 1 + gyp/tools/graphviz.py | 1 + gyp/tools/pretty_gyp.py | 1 + gyp/tools/pretty_sln.py | 3 ++- gyp/tools/pretty_vcproj.py | 8 ++++---- 15 files changed, 64 insertions(+), 53 deletions(-) diff --git a/gyp/pylib/gyp/MSVSSettings.py b/gyp/pylib/gyp/MSVSSettings.py index 94525ae1cc..455862c636 100644 --- a/gyp/pylib/gyp/MSVSSettings.py +++ b/gyp/pylib/gyp/MSVSSettings.py @@ -13,6 +13,7 @@ The MSBuild schemas were also considered. They are typically found in the MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild """ + from __future__ import print_function import sys @@ -463,8 +464,8 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) except ValueError as e: - print(('Warning: while converting %s/%s to MSBuild, ' - '%s' % (msvs_tool_name, msvs_setting, e)), file=stderr) + print('Warning: while converting %s/%s to MSBuild, ' + '%s' % (msvs_tool_name, msvs_setting, e), file=stderr) else: _ValidateExclusionSetting(msvs_setting, msvs_tool, @@ -473,8 +474,8 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): (msvs_tool_name, msvs_setting)), stderr) else: - print(('Warning: unrecognized tool %s while converting to ' - 'MSBuild.' % msvs_tool_name), file=stderr) + print('Warning: unrecognized tool %s while converting to ' + 'MSBuild.' % msvs_tool_name, file=stderr) return msbuild_settings @@ -519,8 +520,8 @@ def _ValidateSettings(validators, settings, stderr): try: tool_validators[setting](value) except ValueError as e: - print(('Warning: for %s/%s, %s' % - (tool_name, setting, e)), file=stderr) + print('Warning: for %s/%s, %s' % + (tool_name, setting, e), file=stderr) else: _ValidateExclusionSetting(setting, tool_validators, @@ -529,7 +530,7 @@ def _ValidateSettings(validators, settings, stderr): stderr) else: - print(('Warning: unrecognized tool %s' % tool_name), file=stderr) + print('Warning: unrecognized tool %s' % (tool_name), file=stderr) # MSVS and MBuild names of the tools. diff --git a/gyp/pylib/gyp/__init__.py b/gyp/pylib/gyp/__init__.py index 30fb7093ee..bd51cde888 100755 --- a/gyp/pylib/gyp/__init__.py +++ b/gyp/pylib/gyp/__init__.py @@ -5,6 +5,7 @@ # found in the LICENSE file. from __future__ import print_function + import copy import gyp.input import optparse @@ -227,12 +228,12 @@ def Noop(value): (action == 'store_false' and not value)): flags.append(opt) elif options.use_environment and env_name: - print(('Warning: environment regeneration unimplemented ' + print('Warning: environment regeneration unimplemented ' 'for %s flag %r env_name %r' % (action, opt, - env_name)), file=sys.stderr) + env_name), file=sys.stderr) else: - print(('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt)), file=sys.stderr) + print('Warning: regeneration unimplemented for action %r ' + 'flag %r' % (action, opt), file=sys.stderr) return flags diff --git a/gyp/pylib/gyp/generator/analyzer.py b/gyp/pylib/gyp/generator/analyzer.py index ce7f83c932..dc17c96524 100644 --- a/gyp/pylib/gyp/generator/analyzer.py +++ b/gyp/pylib/gyp/generator/analyzer.py @@ -61,6 +61,7 @@ directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp then the "all" target includes "b1" and "b2". """ + from __future__ import print_function import gyp.common @@ -292,8 +293,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir): _ToGypPath(gyp.common.UnrelativePath(include_file, build_file)) if _ToLocalPath(toplevel_dir, rel_include_file) in files: if debug: - print('included gyp file modified, gyp_file=', build_file, \ - 'included file=', rel_include_file) + print('included gyp file modified, gyp_file=', build_file, + 'included file=', rel_include_file) return True return False @@ -485,11 +486,11 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result): (add_if_no_ancestor or target.requires_build)) or (target.is_static_library and add_if_no_ancestor and not target.is_or_has_linked_ancestor)): - print('\t\tadding to compile targets', target.name, 'executable', \ - target.is_executable, 'added_to_compile_targets', \ - target.added_to_compile_targets, 'add_if_no_ancestor', \ - add_if_no_ancestor, 'requires_build', target.requires_build, \ - 'is_static_library', target.is_static_library, \ + print('\t\tadding to compile targets', target.name, 'executable', + target.is_executable, 'added_to_compile_targets', + target.added_to_compile_targets, 'add_if_no_ancestor', + add_if_no_ancestor, 'requires_build', target.requires_build, + 'is_static_library', target.is_static_library, 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor) result.add(target) target.added_to_compile_targets = True diff --git a/gyp/pylib/gyp/generator/android.py b/gyp/pylib/gyp/generator/android.py index ed507fbd0d..66139892cc 100644 --- a/gyp/pylib/gyp/generator/android.py +++ b/gyp/pylib/gyp/generator/android.py @@ -1,4 +1,3 @@ -from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -15,6 +14,8 @@ # variables set potentially clash with other Android build system variables. # Try to avoid setting global variables where possible. +from __future__ import print_function + import gyp import gyp.common import gyp.generator.make as make # Reuse global functions from make backend. @@ -251,7 +252,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs): dirs = set() for out in outputs: if not out.startswith('$'): - print ('WARNING: Action for target "%s" writes output to local path ' + print('WARNING: Action for target "%s" writes output to local path ' '"%s".' % (self.target, out)) dir = os.path.split(out)[0] if dir: @@ -356,7 +357,7 @@ def WriteRules(self, rules, extra_sources, extra_outputs): dirs = set() for out in outputs: if not out.startswith('$'): - print ('WARNING: Rule for target %s writes output to local path %s' + print('WARNING: Rule for target %s writes output to local path %s' % (self.target, out)) dir = os.path.dirname(out) if dir: @@ -430,7 +431,7 @@ def WriteCopies(self, copies, extra_outputs): # $(gyp_shared_intermediate_dir). Note that we can't use an assertion # because some of the gyp tests depend on this. if not copy['destination'].startswith('$'): - print ('WARNING: Copy rule for target %s writes output to ' + print('WARNING: Copy rule for target %s writes output to ' 'local path %s' % (self.target, copy['destination'])) # LocalPathify() calls normpath, stripping trailing slashes. @@ -637,8 +638,8 @@ def ComputeOutputParts(self, spec): elif self.type == 'none': target_ext = '.stamp' elif self.type != 'executable': - print(("ERROR: What output file should be generated?", - "type", self.type, "target", target)) + print("ERROR: What output file should be generated?", + "type", self.type, "target", target) if self.type != 'static_library' and self.type != 'shared_library': target_prefix = spec.get('product_prefix', target_prefix) @@ -1066,7 +1067,7 @@ def CalculateMakefilePath(build_file, base_name): write_alias_target=write_alias_targets, sdk_version=sdk_version) if android_module in android_modules: - print ('ERROR: Android module names must be unique. The following ' + print('ERROR: Android module names must be unique. The following ' 'targets both generate Android module name %s.\n %s\n %s' % (android_module, android_modules[android_module], qualified_target)) diff --git a/gyp/pylib/gyp/generator/cmake.py b/gyp/pylib/gyp/generator/cmake.py index 6f901656ce..7aabddb633 100644 --- a/gyp/pylib/gyp/generator/cmake.py +++ b/gyp/pylib/gyp/generator/cmake.py @@ -27,6 +27,7 @@ not be able to find the header file directories described in the generated CMakeLists.txt file. """ + from __future__ import print_function import multiprocessing @@ -640,8 +641,8 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) if cmake_target_type is None: - print ('Target %s has unknown target type %s, skipping.' % - ( target_name, target_type ) ) + print('Target %s has unknown target type %s, skipping.' % + ( target_name, target_type )) return SetVariable(output, 'TARGET', target_name) @@ -864,8 +865,8 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use, default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX'] elif target_type != 'executable': - print(('ERROR: What output file should be generated?', - 'type', target_type, 'target', target_name)) + print('ERROR: What output file should be generated?', + 'type', target_type, 'target', target_name) product_prefix = spec.get('product_prefix', default_product_prefix) product_name = spec.get('product_name', default_product_name) diff --git a/gyp/pylib/gyp/generator/make.py b/gyp/pylib/gyp/generator/make.py index 8f7082ab45..96212be1fc 100644 --- a/gyp/pylib/gyp/generator/make.py +++ b/gyp/pylib/gyp/generator/make.py @@ -1,4 +1,3 @@ -from __future__ import print_function # Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. @@ -22,6 +21,8 @@ # toplevel Makefile. It may make sense to generate some .mk files on # the side to keep the files readable. +from __future__ import print_function + import os import re import sys @@ -678,9 +679,8 @@ def _ValidateSourcesForOSX(spec, all_sources): error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - spec['target_name'] + error + 'libtool on OS X will generate' + - ' warnings for them.') + print(('static library %s has several files with the same basename:\n' % spec['target_name']) + + error + 'libtool on OS X will generate' + ' warnings for them.') raise GypError('Duplicate basenames in sources section, see list above') @@ -1382,8 +1382,8 @@ def ComputeOutputBasename(self, spec): elif self.type == 'none': target = '%s.stamp' % target elif self.type != 'executable': - print(("ERROR: What output file should be generated?", - "type", self.type, "target", target)) + print("ERROR: What output file should be generated?", + "type", self.type, "target", target) target_prefix = spec.get('product_prefix', target_prefix) target = spec.get('product_name', target) diff --git a/gyp/pylib/gyp/generator/msvs.py b/gyp/pylib/gyp/generator/msvs.py index 344cd1d18c..128456b181 100644 --- a/gyp/pylib/gyp/generator/msvs.py +++ b/gyp/pylib/gyp/generator/msvs.py @@ -1,8 +1,9 @@ -from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import copy import ntpath import os @@ -756,8 +757,8 @@ def _Replace(match): # the VCProj but cause the same problem on the final command-line. Moving # the item to the end of the list does works, but that's only possible if # there's only one such item. Let's just warn the user. - print(('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s), file=sys.stderr) + print('Warning: MSVS may misinterpret the odd number of ' + + 'quotes in ' + s, file=sys.stderr) return s @@ -975,8 +976,8 @@ def _ValidateSourcesForMSVSProject(spec, version): error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - spec['target_name'] + error + 'MSVC08 cannot handle that.') + print('static library %s has several files with the same basename:\n' % spec['target_name'] + + error + 'MSVC08 cannot handle that.') raise GypError('Duplicate basenames in sources section, see list above') @@ -3028,7 +3029,7 @@ def _FinalizeMSBuildSettings(spec, configuration): for ignored_setting in ignored_settings: value = configuration.get(ignored_setting) if value: - print ('Warning: The automatic conversion to MSBuild does not handle ' + print('Warning: The automatic conversion to MSBuild does not handle ' '%s. Ignoring setting of %s' % (ignored_setting, str(value))) defines = [_EscapeCppDefineForMSBuild(d) for d in defines] diff --git a/gyp/pylib/gyp/input.py b/gyp/pylib/gyp/input.py index f55619f3b0..5afbba03fe 100644 --- a/gyp/pylib/gyp/input.py +++ b/gyp/pylib/gyp/input.py @@ -1,8 +1,9 @@ -from __future__ import print_function # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + from compiler.ast import Const from compiler.ast import Dict from compiler.ast import Discard @@ -2530,8 +2531,8 @@ def ValidateSourcesInTarget(target, target_dict, build_file, error += ' %s: %s\n' % (basename, ' '.join(files)) if error: - print('static library %s has several files with the same basename:\n' % - target + error + 'libtool on Mac cannot handle that. Use ' + print('static library %s has several files with the same basename:\n' % target + + error + 'libtool on Mac cannot handle that. Use ' '--no-duplicate-basename-check to disable this validation.') raise GypError('Duplicate basenames in sources section, see list above') diff --git a/gyp/pylib/gyp/mac_tool.py b/gyp/pylib/gyp/mac_tool.py index 36203801c8..0ab3de18f7 100755 --- a/gyp/pylib/gyp/mac_tool.py +++ b/gyp/pylib/gyp/mac_tool.py @@ -7,6 +7,7 @@ These functions are executed via gyp-mac-tool when using the Makefile generator. """ + from __future__ import print_function import fcntl @@ -441,8 +442,7 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): profiles_dir = os.path.join( os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles') if not os.path.isdir(profiles_dir): - print(( - 'cannot find mobile provisioning for %s' % bundle_identifier), file=sys.stderr) + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) sys.exit(1) provisioning_profiles = None if profile: @@ -463,8 +463,7 @@ def _FindProvisioningProfile(self, profile, bundle_identifier): valid_provisioning_profiles[app_id_pattern] = ( profile_path, profile_data, team_identifier) if not valid_provisioning_profiles: - print(( - 'cannot find mobile provisioning for %s' % bundle_identifier), file=sys.stderr) + print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr) sys.exit(1) # If the user has multiple provisioning profiles installed that can be # used for ${bundle_identifier}, pick the most specific one (ie. the diff --git a/gyp/pylib/gyp/win_tool.py b/gyp/pylib/gyp/win_tool.py index 85a7926095..3bade00dbe 100755 --- a/gyp/pylib/gyp/win_tool.py +++ b/gyp/pylib/gyp/win_tool.py @@ -8,6 +8,7 @@ These functions are executed via gyp-win-tool when using the ninja generator. """ + from __future__ import print_function import os diff --git a/gyp/pylib/gyp/xcode_emulation.py b/gyp/pylib/gyp/xcode_emulation.py index cbd589b43b..fd4427b86b 100644 --- a/gyp/pylib/gyp/xcode_emulation.py +++ b/gyp/pylib/gyp/xcode_emulation.py @@ -6,6 +6,7 @@ This module contains classes that help to emulate xcodebuild behavior on top of other build systems, such as make and ninja. """ + from __future__ import print_function import copy diff --git a/gyp/tools/graphviz.py b/gyp/tools/graphviz.py index dcf7c765e5..538b059da4 100755 --- a/gyp/tools/graphviz.py +++ b/gyp/tools/graphviz.py @@ -7,6 +7,7 @@ """Using the JSON dumped by the dump-dependency-json generator, generate input suitable for graphviz to render a dependency graph of targets.""" + from __future__ import print_function import collections diff --git a/gyp/tools/pretty_gyp.py b/gyp/tools/pretty_gyp.py index 8111224595..d01c692edc 100755 --- a/gyp/tools/pretty_gyp.py +++ b/gyp/tools/pretty_gyp.py @@ -5,6 +5,7 @@ # found in the LICENSE file. """Pretty-prints the contents of a GYP file.""" + from __future__ import print_function import sys diff --git a/gyp/tools/pretty_sln.py b/gyp/tools/pretty_sln.py index 64a9ec2bf6..731844caf7 100755 --- a/gyp/tools/pretty_sln.py +++ b/gyp/tools/pretty_sln.py @@ -11,10 +11,11 @@ Then it outputs a possible build order. """ -from __future__ import print_function __author__ = 'nsylvain (Nicolas Sylvain)' +from __future__ import print_function + import os import re import sys diff --git a/gyp/tools/pretty_vcproj.py b/gyp/tools/pretty_vcproj.py index c96ba061ef..51cfc4e081 100755 --- a/gyp/tools/pretty_vcproj.py +++ b/gyp/tools/pretty_vcproj.py @@ -11,10 +11,10 @@ It outputs the resulting xml to stdout. """ -from __future__ import print_function - __author__ = 'nsylvain (Nicolas Sylvain)' +from __future__ import print_function + import os import sys @@ -284,8 +284,8 @@ def main(argv): # check if we have exactly 1 parameter. if len(argv) < 2: - print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - '[key2=value2]' % argv[0]) + print(('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' + '[key2=value2]' % argv[0])) return 1 # Parse the keys From 1fe5bd97428052f1d6d05a42487ac57b4c1d1137 Mon Sep 17 00:00:00 2001 From: Craig Rodrigues Date: Sun, 19 Mar 2017 13:36:13 -0700 Subject: [PATCH 2/6] Replace deprecated function assertEquals() with assertEqual() --- gyp/pylib/gyp/input_test.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/gyp/pylib/gyp/input_test.py b/gyp/pylib/gyp/input_test.py index 4234fbb830..f64123f6e3 100755 --- a/gyp/pylib/gyp/input_test.py +++ b/gyp/pylib/gyp/input_test.py @@ -23,7 +23,7 @@ def _create_dependency(self, dependent, dependency): def test_no_cycle_empty_graph(self): for label, node in self.nodes.iteritems(): - self.assertEquals([], node.FindCycles()) + self.assertEqual([], node.FindCycles()) def test_no_cycle_line(self): self._create_dependency(self.nodes['a'], self.nodes['b']) @@ -31,7 +31,7 @@ def test_no_cycle_line(self): self._create_dependency(self.nodes['c'], self.nodes['d']) for label, node in self.nodes.iteritems(): - self.assertEquals([], node.FindCycles()) + self.assertEqual([], node.FindCycles()) def test_no_cycle_dag(self): self._create_dependency(self.nodes['a'], self.nodes['b']) @@ -39,21 +39,21 @@ def test_no_cycle_dag(self): self._create_dependency(self.nodes['b'], self.nodes['c']) for label, node in self.nodes.iteritems(): - self.assertEquals([], node.FindCycles()) + self.assertEqual([], node.FindCycles()) def test_cycle_self_reference(self): self._create_dependency(self.nodes['a'], self.nodes['a']) - self.assertEquals([[self.nodes['a'], self.nodes['a']]], + self.assertEqual([[self.nodes['a'], self.nodes['a']]], self.nodes['a'].FindCycles()) def test_cycle_two_nodes(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['a']) - self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]], + self.assertEqual([[self.nodes['a'], self.nodes['b'], self.nodes['a']]], self.nodes['a'].FindCycles()) - self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]], + self.assertEqual([[self.nodes['b'], self.nodes['a'], self.nodes['b']]], self.nodes['b'].FindCycles()) def test_two_cycles(self): @@ -68,7 +68,7 @@ def test_two_cycles(self): [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles) self.assertTrue( [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles) - self.assertEquals(2, len(cycles)) + self.assertEqual(2, len(cycles)) def test_big_cycle(self): self._create_dependency(self.nodes['a'], self.nodes['b']) @@ -77,7 +77,7 @@ def test_big_cycle(self): self._create_dependency(self.nodes['d'], self.nodes['e']) self._create_dependency(self.nodes['e'], self.nodes['a']) - self.assertEquals([[self.nodes['a'], + self.assertEqual([[self.nodes['a'], self.nodes['b'], self.nodes['c'], self.nodes['d'], From 956225ed428e865339f96b795feb48535311b670 Mon Sep 17 00:00:00 2001 From: Craig Rodrigues Date: Sun, 19 Mar 2017 13:41:23 -0700 Subject: [PATCH 3/6] Replace deprecated iteritems() with items() for Python 3. --- gyp/pylib/gyp/MSVSSettings.py | 8 ++--- gyp/pylib/gyp/MSVSUserFile.py | 4 +-- gyp/pylib/gyp/MSVSUtil.py | 2 +- gyp/pylib/gyp/__init__.py | 2 +- gyp/pylib/gyp/easy_xml.py | 2 +- gyp/pylib/gyp/generator/android.py | 6 ++-- gyp/pylib/gyp/generator/gypd.py | 2 +- gyp/pylib/gyp/generator/make.py | 2 +- gyp/pylib/gyp/generator/msvs.py | 58 +++++++++++++++--------------- gyp/pylib/gyp/generator/ninja.py | 6 ++-- gyp/pylib/gyp/generator/xcode.py | 14 ++++---- gyp/pylib/gyp/input.py | 48 ++++++++++++------------- gyp/pylib/gyp/input_test.py | 6 ++-- gyp/pylib/gyp/mac_tool.py | 6 ++-- gyp/pylib/gyp/msvs_emulation.py | 6 ++-- gyp/pylib/gyp/ordered_dict.py | 4 +-- gyp/pylib/gyp/simple_copy.py | 2 +- gyp/pylib/gyp/win_tool.py | 2 +- gyp/pylib/gyp/xcode_emulation.py | 6 ++-- gyp/pylib/gyp/xcode_ninja.py | 4 +-- gyp/pylib/gyp/xcodeproj_file.py | 18 +++++----- 21 files changed, 104 insertions(+), 104 deletions(-) diff --git a/gyp/pylib/gyp/MSVSSettings.py b/gyp/pylib/gyp/MSVSSettings.py index 455862c636..00721eb237 100644 --- a/gyp/pylib/gyp/MSVSSettings.py +++ b/gyp/pylib/gyp/MSVSSettings.py @@ -435,7 +435,7 @@ def ConvertVCMacrosToMSBuild(s): '$(PlatformName)': '$(Platform)', '$(SafeInputName)': '%(Filename)', } - for old, new in replace_map.iteritems(): + for old, new in replace_map.items(): s = s.replace(old, new) s = FixVCMacroSlashes(s) return s @@ -455,10 +455,10 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): dictionaries of settings and their values. """ msbuild_settings = {} - for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): + for msvs_tool_name, msvs_tool_settings in msvs_settings.items(): if msvs_tool_name in _msvs_to_msbuild_converters: msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] - for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): + for msvs_setting, msvs_value in msvs_tool_settings.items(): if msvs_setting in msvs_tool: # Invoke the translation function. try: @@ -515,7 +515,7 @@ def _ValidateSettings(validators, settings, stderr): for tool_name in settings: if tool_name in validators: tool_validators = validators[tool_name] - for setting, value in settings[tool_name].iteritems(): + for setting, value in settings[tool_name].items(): if setting in tool_validators: try: tool_validators[setting](value) diff --git a/gyp/pylib/gyp/MSVSUserFile.py b/gyp/pylib/gyp/MSVSUserFile.py index 6c07e9a893..2264d64015 100644 --- a/gyp/pylib/gyp/MSVSUserFile.py +++ b/gyp/pylib/gyp/MSVSUserFile.py @@ -91,7 +91,7 @@ def AddDebugSettings(self, config_name, command, environment = {}, if environment and isinstance(environment, dict): env_list = ['%s="%s"' % (key, val) - for (key,val) in environment.iteritems()] + for (key,val) in environment.items()] environment = ' '.join(env_list) else: environment = '' @@ -135,7 +135,7 @@ def AddDebugSettings(self, config_name, command, environment = {}, def WriteIfChanged(self): """Writes the user file.""" configs = ['Configurations'] - for config, spec in sorted(self.configurations.iteritems()): + for config, spec in sorted(self.configurations.items()): configs.append(spec) content = ['VisualStudioUserFile', diff --git a/gyp/pylib/gyp/MSVSUtil.py b/gyp/pylib/gyp/MSVSUtil.py index 0b32e91180..c8187eb331 100644 --- a/gyp/pylib/gyp/MSVSUtil.py +++ b/gyp/pylib/gyp/MSVSUtil.py @@ -235,7 +235,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars): # Set up the shim to output its PDB to the same location as the final linker # target. - for config_name, config in shim_dict.get('configurations').iteritems(): + for config_name, config in shim_dict.get('configurations').items(): pdb_path = _GetPdbPath(target_dict, config_name, vars) # A few keys that we don't want to propagate. diff --git a/gyp/pylib/gyp/__init__.py b/gyp/pylib/gyp/__init__.py index bd51cde888..7137cb77d2 100755 --- a/gyp/pylib/gyp/__init__.py +++ b/gyp/pylib/gyp/__init__.py @@ -209,7 +209,7 @@ def Noop(value): # We always want to ignore the environment when regenerating, to avoid # duplicate or changed flags in the environment at the time of regeneration. flags = ['--ignore-environment'] - for name, metadata in options._regeneration_metadata.iteritems(): + for name, metadata in options._regeneration_metadata.items(): opt = metadata['opt'] value = getattr(options, name) value_predicate = metadata['type'] == 'path' and FixPath or Noop diff --git a/gyp/pylib/gyp/easy_xml.py b/gyp/pylib/gyp/easy_xml.py index fd525a712b..7c3f621f1f 100644 --- a/gyp/pylib/gyp/easy_xml.py +++ b/gyp/pylib/gyp/easy_xml.py @@ -81,7 +81,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0): # Optionally in second position is a dictionary of the attributes. rest = specification[1:] if rest and isinstance(rest[0], dict): - for at, val in sorted(rest[0].iteritems()): + for at, val in sorted(rest[0].items()): xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) rest = rest[1:] if rest: diff --git a/gyp/pylib/gyp/generator/android.py b/gyp/pylib/gyp/generator/android.py index 66139892cc..b7f9842888 100644 --- a/gyp/pylib/gyp/generator/android.py +++ b/gyp/pylib/gyp/generator/android.py @@ -460,7 +460,7 @@ def WriteSourceFlags(self, spec, configs): Args: spec, configs: input from gyp. """ - for configname, config in sorted(configs.iteritems()): + for configname, config in sorted(configs.items()): extracted_includes = [] self.WriteLn('\n# Flags passed to both C and C++ files.') @@ -790,7 +790,7 @@ def WriteTargetFlags(self, spec, configs, link_deps): static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries) if self.type != 'static_library': - for configname, config in sorted(configs.iteritems()): + for configname, config in sorted(configs.items()): ldflags = list(config.get('ldflags', [])) self.WriteLn('') self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname) @@ -839,7 +839,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, part_of_all, settings = spec.get('aosp_build_settings', {}) if settings: self.WriteLn('### Set directly by aosp_build_settings.') - for k, v in settings.iteritems(): + for k, v in settings.items(): if isinstance(v, list): self.WriteList(v, k) else: diff --git a/gyp/pylib/gyp/generator/gypd.py b/gyp/pylib/gyp/generator/gypd.py index 3efdb9966a..78eeaa61b2 100644 --- a/gyp/pylib/gyp/generator/gypd.py +++ b/gyp/pylib/gyp/generator/gypd.py @@ -88,7 +88,7 @@ def GenerateOutput(target_list, target_dicts, data, params): if not output_file in output_files: output_files[output_file] = input_file - for output_file, input_file in output_files.iteritems(): + for output_file, input_file in output_files.items(): output = open(output_file, 'w') pprint.pprint(data[input_file], output) output.close() diff --git a/gyp/pylib/gyp/generator/make.py b/gyp/pylib/gyp/generator/make.py index 96212be1fc..0ac0104701 100644 --- a/gyp/pylib/gyp/generator/make.py +++ b/gyp/pylib/gyp/generator/make.py @@ -674,7 +674,7 @@ def _ValidateSourcesForOSX(spec, all_sources): basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) diff --git a/gyp/pylib/gyp/generator/msvs.py b/gyp/pylib/gyp/generator/msvs.py index 128456b181..fb2549d025 100644 --- a/gyp/pylib/gyp/generator/msvs.py +++ b/gyp/pylib/gyp/generator/msvs.py @@ -451,7 +451,7 @@ def _AddCustomBuildToolForMSVS(p, spec, primary_input, 'CommandLine': cmd, }) # Add to the properties of primary input for each config. - for config_name, c_data in spec['configurations'].iteritems(): + for config_name, c_data in spec['configurations'].items(): p.AddFileConfig(_FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool]) @@ -971,7 +971,7 @@ def _ValidateSourcesForMSVSProject(spec, version): basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) @@ -1003,7 +1003,7 @@ def _GenerateMSVSProject(project, options, version, generator_flags): relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) config_type = _GetMSVSConfigurationType(spec, project.build_file) - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) # MSVC08 and prior version cannot handle duplicate basenames in the same @@ -1369,10 +1369,10 @@ def _ConvertToolsToExpectedForm(tools): A list of Tool objects. """ tool_list = [] - for tool, settings in tools.iteritems(): + for tool, settings in tools.items(): # Collapse settings with lists. settings_fixed = {} - for setting, value in settings.iteritems(): + for setting, value in settings.items(): if type(value) == list: if ((tool == 'VCLinkerTool' and setting == 'AdditionalDependencies') or @@ -1547,7 +1547,7 @@ def _IdlFilesHandledNonNatively(spec, sources): def _GetPrecompileRelatedFiles(spec): # Gather a list of precompiled header related sources. precompiled_related = [] - for _, config in spec['configurations'].iteritems(): + for _, config in spec['configurations'].items(): for k in precomp_keys: f = config.get(k) if f: @@ -1558,7 +1558,7 @@ def _GetPrecompileRelatedFiles(spec): def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded): exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - for file_name, excluded_configs in exclusions.iteritems(): + for file_name, excluded_configs in exclusions.items(): if (not list_excluded and len(excluded_configs) == len(spec['configurations'])): # If we're not listing excluded files, then they won't appear in the @@ -1575,7 +1575,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): # Exclude excluded sources from being built. for f in excluded_sources: excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): precomped = [_FixPath(config.get(i, '')) for i in precomp_keys] # Don't do this for ones that are precompiled header related. if f not in precomped: @@ -1585,7 +1585,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): # Exclude them now. for f in excluded_idl: excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): excluded_configs.append((config_name, config)) exclusions[f] = excluded_configs return exclusions @@ -1594,7 +1594,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): def _AddToolFilesToMSVS(p, spec): # Add in tool files (rules). tool_files = OrderedSet() - for _, config in spec['configurations'].iteritems(): + for _, config in spec['configurations'].items(): for f in config.get('msvs_tool_files', []): tool_files.add(f) for f in tool_files: @@ -1607,7 +1607,7 @@ def _HandlePreCompiledHeaders(p, sources, spec): # kind (i.e. C vs. C++) as the precompiled header source stub needs # to have use of precompiled headers disabled. extensions_excluded_from_precompile = [] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): source = config.get('msvs_precompiled_source') if source: source = _FixPath(source) @@ -1628,7 +1628,7 @@ def DisableForSourceTree(source_tree): else: basename, extension = os.path.splitext(source) if extension in extensions_excluded_from_precompile: - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): tool = MSVSProject.Tool('VCCLCompilerTool', {'UsePrecompiledHeader': '0', 'ForcedIncludeFiles': '$(NOINHERIT)'}) @@ -1679,7 +1679,7 @@ def _WriteMSVSUserFile(project_path, version, spec): return # Nothing to add # Write out the user file. user_file = _CreateMSVSUserFile(project_path, version, spec) - for config_name, c_data in spec['configurations'].iteritems(): + for config_name, c_data in spec['configurations'].items(): user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), action, environment, working_directory) user_file.WriteIfChanged() @@ -1730,7 +1730,7 @@ def _GetPathDict(root, path): def _DictsToFolders(base_path, bucket, flat): # Convert to folders recursively. children = [] - for folder, contents in bucket.iteritems(): + for folder, contents in bucket.items(): if type(contents) == dict: folder_children = _DictsToFolders(os.path.join(base_path, folder), contents, flat) @@ -1802,7 +1802,7 @@ def _GetPlatformOverridesOfProject(spec): # Prepare a dict indicating which project configurations are used for which # solution configurations for this target. config_platform_overrides = {} - for config_name, c in spec['configurations'].iteritems(): + for config_name, c in spec['configurations'].items(): config_fullname = _ConfigFullName(config_name, c) platform = c.get('msvs_target_platform', _ConfigPlatform(c)) fixed_config_fullname = '%s|%s' % ( @@ -1941,7 +1941,7 @@ def PerformBuild(data, configurations, params): msvs_version = params['msvs_version'] devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com') - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -1990,7 +1990,7 @@ def GenerateOutput(target_list, target_dicts, data, params): configs = set() for qualified_target in target_list: spec = target_dicts[qualified_target] - for config_name, config in spec['configurations'].iteritems(): + for config_name, config in spec['configurations'].items(): configs.add(_ConfigFullName(config_name, config)) configs = list(configs) @@ -2630,7 +2630,7 @@ def _GetConfigurationCondition(name, settings): def _GetMSBuildProjectConfigurations(configurations): group = ['ItemGroup', {'Label': 'ProjectConfigurations'}] - for (name, settings) in sorted(configurations.iteritems()): + for (name, settings) in sorted(configurations.items()): configuration, platform = _GetConfigurationAndPlatform(name, settings) designation = '%s|%s' % (configuration, platform) group.append( @@ -2700,7 +2700,7 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): def _GetMSBuildConfigurationDetails(spec, build_file): properties = {} - for name, settings in spec['configurations'].iteritems(): + for name, settings in spec['configurations'].items(): msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) condition = _GetConfigurationCondition(name, settings) character_set = msbuild_attributes.get('CharacterSet') @@ -2729,7 +2729,7 @@ def _GetMSBuildPropertySheets(configurations): user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props' additional_props = {} props_specified = False - for name, settings in sorted(configurations.iteritems()): + for name, settings in sorted(configurations.items()): configuration = _GetConfigurationCondition(name, settings) if 'msbuild_props' in settings: additional_props[configuration] = _FixPaths(settings['msbuild_props']) @@ -2751,7 +2751,7 @@ def _GetMSBuildPropertySheets(configurations): ] else: sheets = [] - for condition, props in additional_props.iteritems(): + for condition, props in additional_props.items(): import_group = [ 'ImportGroup', {'Label': 'PropertySheets', @@ -2878,7 +2878,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): new_paths = '$(ExecutablePath);' + ';'.join(new_paths) properties = {} - for (name, configuration) in sorted(configurations.iteritems()): + for (name, configuration) in sorted(configurations.items()): condition = _GetConfigurationCondition(name, configuration) attributes = _GetMSBuildAttributes(spec, configuration, build_file) msbuild_settings = configuration['finalized_msbuild_settings'] @@ -2903,7 +2903,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): _AddConditionalProperty(properties, condition, 'ExecutablePath', new_paths) tool_settings = msbuild_settings.get('', {}) - for name, value in sorted(tool_settings.iteritems()): + for name, value in sorted(tool_settings.items()): formatted_value = _GetValueFormattedForMSBuild('', name, value) _AddConditionalProperty(properties, condition, name, formatted_value) return _GetMSBuildPropertyGroup(spec, None, properties) @@ -2972,7 +2972,7 @@ def GetEdges(node): # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) for name in reversed(properties_ordered): values = properties[name] - for value, conditions in sorted(values.iteritems()): + for value, conditions in sorted(values.items()): if len(conditions) == num_configurations: # If the value is the same all configurations, # just add one unconditional entry. @@ -2985,18 +2985,18 @@ def GetEdges(node): def _GetMSBuildToolSettingsSections(spec, configurations): groups = [] - for (name, configuration) in sorted(configurations.iteritems()): + for (name, configuration) in sorted(configurations.items()): msbuild_settings = configuration['finalized_msbuild_settings'] group = ['ItemDefinitionGroup', {'Condition': _GetConfigurationCondition(name, configuration)} ] - for tool_name, tool_settings in sorted(msbuild_settings.iteritems()): + for tool_name, tool_settings in sorted(msbuild_settings.items()): # Skip the tool named '' which is a holder of global settings handled # by _GetMSBuildConfigurationGlobalProperties. if tool_name: if tool_settings: tool = [tool_name] - for name, value in sorted(tool_settings.iteritems()): + for name, value in sorted(tool_settings.items()): formatted_value = _GetValueFormattedForMSBuild(tool_name, name, value) tool.append([name, formatted_value]) @@ -3196,7 +3196,7 @@ def _AddSources2(spec, sources, exclusions, grouped_sources, {'Condition': condition}, 'true']) # Add precompile if needed - for config_name, configuration in spec['configurations'].iteritems(): + for config_name, configuration in spec['configurations'].items(): precompiled_source = configuration.get('msvs_precompiled_source', '') if precompiled_source != '': precompiled_source = _FixPath(precompiled_source) @@ -3436,7 +3436,7 @@ def _GenerateActionsForMSBuild(spec, actions_to_add): """ sources_handled_by_action = OrderedSet() actions_spec = [] - for primary_input, actions in actions_to_add.iteritems(): + for primary_input, actions in actions_to_add.items(): inputs = OrderedSet() outputs = OrderedSet() descriptions = [] diff --git a/gyp/pylib/gyp/generator/ninja.py b/gyp/pylib/gyp/generator/ninja.py index 1fc00e1f98..aae5b71310 100644 --- a/gyp/pylib/gyp/generator/ninja.py +++ b/gyp/pylib/gyp/generator/ninja.py @@ -797,7 +797,7 @@ def WriteMacXCassets(self, xcassets, bundle_depends): 'XCASSETS_LAUNCH_IMAGE': 'launch-image', } settings = self.xcode_settings.xcode_settings[self.config_name] - for settings_key, arg_name in settings_to_arg.iteritems(): + for settings_key, arg_name in settings_to_arg.items(): value = settings.get(settings_key) if value: extra_arguments[arg_name] = value @@ -1890,7 +1890,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) # Support wrappers from environment variables too. - for key, value in os.environ.iteritems(): + for key, value in os.environ.items(): if key.lower().endswith('_wrapper'): key_prefix = key[:-len('_wrapper')] key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) @@ -1906,7 +1906,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, configs, generator_flags) cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( toplevel_build, generator_flags, shared_system_includes, OpenOutput) - for arch, path in cl_paths.iteritems(): + for arch, path in cl_paths.items(): if clang_cl: # If we have selected clang-cl, use that instead. path = clang_cl diff --git a/gyp/pylib/gyp/generator/xcode.py b/gyp/pylib/gyp/generator/xcode.py index 63f80d7657..a2626b689e 100644 --- a/gyp/pylib/gyp/generator/xcode.py +++ b/gyp/pylib/gyp/generator/xcode.py @@ -183,7 +183,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): # the tree tree view for UI display. # Any values set globally are applied to all configurations, then any # per-configuration values are applied. - for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): + for xck, xcv in self.build_file_dict.get('xcode_settings', {}).items(): xccl.SetBuildSetting(xck, xcv) if 'xcode_config_file' in self.build_file_dict: config_ref = self.project.AddOrGetFileInRootGroup( @@ -197,7 +197,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): if build_file_configuration_named: xcc = xccl.ConfigurationNamed(config_name) for xck, xcv in build_file_configuration_named.get('xcode_settings', - {}).iteritems(): + {}).items(): xcc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in build_file_configuration_named: config_ref = self.project.AddOrGetFileInRootGroup( @@ -273,7 +273,7 @@ def Finalize1(self, xcode_targets, serialize_all_tests): script = script + "\n".join( ['export %s="%s"' % (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) - for (key, val) in command.get('environment').iteritems()]) + "\n" + for (key, val) in command.get('environment').items()]) + "\n" # Some test end up using sockets, files on disk, etc. and can get # confused if more then one test runs at a time. The generator @@ -566,7 +566,7 @@ def EscapeXcodeDefine(s): def PerformBuild(data, configurations, params): options = params['options'] - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -625,7 +625,7 @@ def GenerateOutput(target_list, target_dicts, data, params): skip_excluded_files = \ not generator_flags.get('xcode_list_excluded_files', True) xcode_projects = {} - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue @@ -1278,7 +1278,7 @@ def GenerateOutput(target_list, target_dicts, data, params): set_define = EscapeXcodeDefine(define) xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) if 'xcode_settings' in configuration: - for xck, xcv in configuration['xcode_settings'].iteritems(): + for xck, xcv in configuration['xcode_settings'].items(): xcbc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in configuration: config_ref = pbxp.AddOrGetFileInRootGroup( @@ -1286,7 +1286,7 @@ def GenerateOutput(target_list, target_dicts, data, params): xcbc.SetBaseConfiguration(config_ref) build_files = [] - for build_file, build_file_dict in data.iteritems(): + for build_file, build_file_dict in data.items(): if build_file.endswith('.gyp'): build_files.append(build_file) diff --git a/gyp/pylib/gyp/input.py b/gyp/pylib/gyp/input.py index 5afbba03fe..7f0bd78a52 100644 --- a/gyp/pylib/gyp/input.py +++ b/gyp/pylib/gyp/input.py @@ -311,7 +311,7 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, subdict_path, include) # Recurse into subdictionaries. - for k, v in subdict.iteritems(): + for k, v in subdict.items(): if type(v) is dict: LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check) @@ -497,7 +497,7 @@ def CallLoadTargetBuildFile(global_flags, signal.signal(signal.SIGINT, signal.SIG_IGN) # Apply globals so that the worker process behaves the same. - for key, value in global_flags.iteritems(): + for key, value in global_flags.items(): globals()[key] = value SetGeneratorGlobals(generator_input_info) @@ -1160,7 +1160,7 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file): def LoadAutomaticVariablesFromDict(variables, the_dict): # Any keys with plain string values in the_dict become automatic variables. # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): if type(value) in (str, int, list): variables['_' + key] = value @@ -1173,7 +1173,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): # the_dict in the_dict's parent dict. If the_dict's parent is not a dict # (it could be a list or it could be parentless because it is a root dict), # the_dict_key will be None. - for key, value in the_dict.get('variables', {}).iteritems(): + for key, value in the_dict.get('variables', {}).items(): if type(value) not in (str, int, list): continue @@ -1212,7 +1212,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # list before we process them so that you can reference one # variable from another. They will be fully expanded by recursion # in ExpandVariables. - for key, value in the_dict['variables'].iteritems(): + for key, value in the_dict['variables'].items(): variables[key] = value # Handle the associated variables dict first, so that any variable @@ -1225,7 +1225,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): # Skip "variables", which was already processed if present. if key != 'variables' and type(value) is str: expanded = ExpandVariables(value, phase, variables, build_file) @@ -1283,7 +1283,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, # Recurse into child dicts, or process child lists which may result in # further recursion into descendant dicts. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): # Skip "variables" and string values, which were already processed if # present. if key == 'variables' or type(value) is str: @@ -1380,7 +1380,7 @@ def QualifyDependencies(targets): for dep in dependency_sections for op in ('', '!', '/')] - for target, target_dict in targets.iteritems(): + for target, target_dict in targets.items(): target_build_file = gyp.common.BuildFile(target) toolset = target_dict['toolset'] for dependency_key in all_dependency_sections: @@ -1420,7 +1420,7 @@ def ExpandWildcardDependencies(targets, data): dependency list, must be qualified when this function is called. """ - for target, target_dict in targets.iteritems(): + for target, target_dict in targets.items(): toolset = target_dict['toolset'] target_build_file = gyp.common.BuildFile(target) for dependency_key in dependency_sections: @@ -1482,7 +1482,7 @@ def Unify(l): def RemoveDuplicateDependencies(targets): """Makes sure every dependency appears only once in all targets's dependency lists.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1498,7 +1498,7 @@ def Filter(l, item): def RemoveSelfDependencies(targets): """Remove self dependencies from targets that have the prune_self_dependency variable set.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1511,7 +1511,7 @@ def RemoveSelfDependencies(targets): def RemoveLinkDependenciesFromNoneTargets(targets): """Remove dependencies having the 'link_dependency' attribute from the 'none' targets.""" - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) if dependencies: @@ -1797,14 +1797,14 @@ def BuildDependencyList(targets): # Create a DependencyGraphNode for each target. Put it into a dict for easy # access. dependency_nodes = {} - for target, spec in targets.iteritems(): + for target, spec in targets.items(): if target not in dependency_nodes: dependency_nodes[target] = DependencyGraphNode(target) # Set up the dependency links. Targets that have no dependencies are treated # as dependent on root_node. root_node = DependencyGraphNode(None) - for target, spec in targets.iteritems(): + for target, spec in targets.items(): target_node = dependency_nodes[target] target_build_file = gyp.common.BuildFile(target) dependencies = spec.get('dependencies') @@ -1853,7 +1853,7 @@ def VerifyNoGYPFileCircularDependencies(targets): dependency_nodes[build_file] = DependencyGraphNode(build_file) # Set up the dependency links. - for target, spec in targets.iteritems(): + for target, spec in targets.items(): build_file = gyp.common.BuildFile(target) build_file_node = dependency_nodes[build_file] target_dependencies = spec.get('dependencies', []) @@ -2118,7 +2118,7 @@ def is_in_set_or_list(x, s, l): def MergeDicts(to, fro, to_file, fro_file): # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.iteritems(): + for k, v in fro.items(): # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give # copy semantics. Something else may want to merge from the |fro| dict # later, and having the same dict ref pointed to twice in the tree isn't @@ -2253,13 +2253,13 @@ def SetUpConfigurations(target, target_dict): if not 'configurations' in target_dict: target_dict['configurations'] = {'Default': {}} if not 'default_configuration' in target_dict: - concrete = [i for (i, config) in target_dict['configurations'].iteritems() + concrete = [i for (i, config) in target_dict['configurations'].items() if not config.get('abstract')] target_dict['default_configuration'] = sorted(concrete)[0] merged_configurations = {} configs = target_dict['configurations'] - for (configuration, old_configuration_dict) in configs.iteritems(): + for (configuration, old_configuration_dict) in configs.items(): # Skip abstract configurations (saves work only). if old_configuration_dict.get('abstract'): continue @@ -2267,7 +2267,7 @@ def SetUpConfigurations(target, target_dict): # Get the inheritance relationship right by making a copy of the target # dict. new_configuration_dict = {} - for (key, target_val) in target_dict.iteritems(): + for (key, target_val) in target_dict.items(): key_ext = key[-1:] if key_ext in key_suffixes: key_base = key[:-1] @@ -2351,7 +2351,7 @@ def ProcessListFiltersInDict(name, the_dict): lists = [] del_lists = [] - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): operation = key[-1] if operation != '!' and operation != '/': continue @@ -2469,7 +2469,7 @@ def ProcessListFiltersInDict(name, the_dict): the_dict[excluded_key] = excluded_list # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.iteritems(): + for key, value in the_dict.items(): if type(value) is dict: ProcessListFiltersInDict(key, value) elif type(value) is list: @@ -2526,7 +2526,7 @@ def ValidateSourcesInTarget(target, target_dict, build_file, basenames.setdefault(basename, []).append(source) error = '' - for basename, files in basenames.iteritems(): + for basename, files in basenames.items(): if len(files) > 1: error += ' %s: %s\n' % (basename, ' '.join(files)) @@ -2646,7 +2646,7 @@ def ValidateActionsInTarget(target, target_dict, build_file): def TurnIntIntoStrInDict(the_dict): """Given dict the_dict, recursively converts all integers into strings. """ - # Use items instead of iteritems because there's no need to try to look at + # Use items instead of items because there's no need to try to look at # reinserted keys and their associated values. for k, v in the_dict.items(): if type(v) is int: @@ -2805,7 +2805,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, RemoveLinkDependenciesFromNoneTargets(targets) # Apply exclude (!) and regex (/) list filters only for dependency_sections. - for target_name, target_dict in targets.iteritems(): + for target_name, target_dict in targets.items(): tmp_dict = {} for key_base in dependency_sections: for op in ('', '!', '/'): diff --git a/gyp/pylib/gyp/input_test.py b/gyp/pylib/gyp/input_test.py index f64123f6e3..1bc5e3d308 100755 --- a/gyp/pylib/gyp/input_test.py +++ b/gyp/pylib/gyp/input_test.py @@ -22,7 +22,7 @@ def _create_dependency(self, dependent, dependency): dependency.dependents.append(dependent) def test_no_cycle_empty_graph(self): - for label, node in self.nodes.iteritems(): + for label, node in self.nodes.items(): self.assertEqual([], node.FindCycles()) def test_no_cycle_line(self): @@ -30,7 +30,7 @@ def test_no_cycle_line(self): self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['d']) - for label, node in self.nodes.iteritems(): + for label, node in self.nodes.items(): self.assertEqual([], node.FindCycles()) def test_no_cycle_dag(self): @@ -38,7 +38,7 @@ def test_no_cycle_dag(self): self._create_dependency(self.nodes['a'], self.nodes['c']) self._create_dependency(self.nodes['b'], self.nodes['c']) - for label, node in self.nodes.iteritems(): + for label, node in self.nodes.items(): self.assertEqual([], node.FindCycles()) def test_cycle_self_reference(self): diff --git a/gyp/pylib/gyp/mac_tool.py b/gyp/pylib/gyp/mac_tool.py index 0ab3de18f7..b8f38a8fdc 100755 --- a/gyp/pylib/gyp/mac_tool.py +++ b/gyp/pylib/gyp/mac_tool.py @@ -326,7 +326,7 @@ def ExecCompileXcassets(self, keys, *inputs): ]) if keys: keys = json.loads(keys) - for key, value in keys.iteritems(): + for key, value in keys.items(): arg_name = '--' + key if isinstance(value, bool): if value: @@ -487,7 +487,7 @@ def _LoadProvisioningProfile(self, profile_path): def _MergePlist(self, merged_plist, plist): """Merge |plist| into |merged_plist|.""" - for key, value in plist.iteritems(): + for key, value in plist.items(): if isinstance(value, dict): merged_value = merged_plist.get(key, {}) if isinstance(merged_value, dict): @@ -597,7 +597,7 @@ def _ExpandVariables(self, data, substitutions): the key was not found. """ if isinstance(data, str): - for key, value in substitutions.iteritems(): + for key, value in substitutions.items(): data = data.replace('$(%s)' % key, value) return data if isinstance(data, list): diff --git a/gyp/pylib/gyp/msvs_emulation.py b/gyp/pylib/gyp/msvs_emulation.py index ca67b122f0..75da78526c 100644 --- a/gyp/pylib/gyp/msvs_emulation.py +++ b/gyp/pylib/gyp/msvs_emulation.py @@ -205,7 +205,7 @@ def __init__(self, spec, generator_flags): configs = spec['configurations'] for field, default in supported_fields: setattr(self, field, {}) - for configname, config in configs.iteritems(): + for configname, config in configs.items(): getattr(self, field)[configname] = config.get(field, default()) self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.']) @@ -941,7 +941,7 @@ def ExpandMacros(string, expansions): """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv for the canonical way to retrieve a suitable dict.""" if '$' in string: - for old, new in expansions.iteritems(): + for old, new in expansions.items(): assert '$(' not in new, new string = string.replace(old, new) return string @@ -985,7 +985,7 @@ def _FormatAsEnvironmentBlock(envvar_dict): CreateProcess documentation for more details.""" block = '' nul = '\0' - for key, value in envvar_dict.iteritems(): + for key, value in envvar_dict.items(): block += key + '=' + value + nul block += nul return block diff --git a/gyp/pylib/gyp/ordered_dict.py b/gyp/pylib/gyp/ordered_dict.py index a1e89f9199..6fe9c1f6c7 100644 --- a/gyp/pylib/gyp/ordered_dict.py +++ b/gyp/pylib/gyp/ordered_dict.py @@ -161,8 +161,8 @@ def itervalues(self): for k in self: yield self[k] - def iteritems(self): - 'od.iteritems -> an iterator over the (key, value) items in od' + def items(self): + 'od.items -> an iterator over the (key, value) items in od' for k in self: yield (k, self[k]) diff --git a/gyp/pylib/gyp/simple_copy.py b/gyp/pylib/gyp/simple_copy.py index 74c98c5a79..463929386e 100644 --- a/gyp/pylib/gyp/simple_copy.py +++ b/gyp/pylib/gyp/simple_copy.py @@ -38,7 +38,7 @@ def _deepcopy_list(x): def _deepcopy_dict(x): y = {} - for key, value in x.iteritems(): + for key, value in x.items(): y[deepcopy(key)] = deepcopy(value) return y d[dict] = _deepcopy_dict diff --git a/gyp/pylib/gyp/win_tool.py b/gyp/pylib/gyp/win_tool.py index 3bade00dbe..bca0b9e346 100755 --- a/gyp/pylib/gyp/win_tool.py +++ b/gyp/pylib/gyp/win_tool.py @@ -294,7 +294,7 @@ def ExecActionWrapper(self, arch, rspfile, *dir): env = self._GetEnv(arch) # TODO(scottmg): This is a temporary hack to get some specific variables # through to actions that are set after gyp-time. http://crbug.com/333738. - for k, v in os.environ.iteritems(): + for k, v in os.environ.items(): if k not in env: env[k] = v args = open(rspfile).read() diff --git a/gyp/pylib/gyp/xcode_emulation.py b/gyp/pylib/gyp/xcode_emulation.py index fd4427b86b..66f325932a 100644 --- a/gyp/pylib/gyp/xcode_emulation.py +++ b/gyp/pylib/gyp/xcode_emulation.py @@ -170,7 +170,7 @@ def __init__(self, spec): # the same for all configs are implicitly per-target settings. self.xcode_settings = {} configs = spec['configurations'] - for configname, config in configs.iteritems(): + for configname, config in configs.items(): self.xcode_settings[configname] = config.get('xcode_settings', {}) self._ConvertConditionalKeys(configname) if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET', @@ -891,7 +891,7 @@ def GetPerTargetSettings(self): result = dict(self.xcode_settings[configname]) first_pass = False else: - for key, value in self.xcode_settings[configname].iteritems(): + for key, value in self.xcode_settings[configname].items(): if key not in result: continue elif result[key] != value: @@ -1639,7 +1639,7 @@ def _AddIOSDeviceConfigurations(targets): for target_dict in targets.itervalues(): toolset = target_dict['toolset'] configs = target_dict['configurations'] - for config_name, config_dict in dict(configs).iteritems(): + for config_name, config_dict in dict(configs).items(): iphoneos_config_dict = copy.deepcopy(config_dict) configs[config_name + '-iphoneos'] = iphoneos_config_dict configs[config_name + '-iphonesimulator'] = config_dict diff --git a/gyp/pylib/gyp/xcode_ninja.py b/gyp/pylib/gyp/xcode_ninja.py index d781471005..5acd82e004 100644 --- a/gyp/pylib/gyp/xcode_ninja.py +++ b/gyp/pylib/gyp/xcode_ninja.py @@ -161,7 +161,7 @@ def CreateWrapper(target_list, target_dicts, data, params): params: Dict of global options for gyp. """ orig_gyp = params['build_files'][0] - for gyp_name, gyp_dict in data.iteritems(): + for gyp_name, gyp_dict in data.items(): if gyp_name == orig_gyp: depth = gyp_dict['_DEPTH'] @@ -228,7 +228,7 @@ def CreateWrapper(target_list, target_dicts, data, params): sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } } sources = [] - for target, target_dict in target_dicts.iteritems(): + for target, target_dict in target_dicts.items(): base = os.path.dirname(target) files = target_dict.get('sources', []) + \ target_dict.get('mac_bundle_resources', []) diff --git a/gyp/pylib/gyp/xcodeproj_file.py b/gyp/pylib/gyp/xcodeproj_file.py index 712eefe3f8..62ec580770 100644 --- a/gyp/pylib/gyp/xcodeproj_file.py +++ b/gyp/pylib/gyp/xcodeproj_file.py @@ -314,7 +314,7 @@ def Copy(self): """ that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.iteritems(): + for key, value in self._properties.items(): is_strong = self._schema[key][2] if isinstance(value, XCObject): @@ -475,7 +475,7 @@ def Children(self): """Returns a list of all of this object's owned (strong) children.""" children = [] - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong) = attributes[0:3] if is_strong and property in self._properties: if not is_list: @@ -622,7 +622,7 @@ def _XCPrintableValue(self, tabs, value, flatten_list=False): printable += end_tabs + ')' elif isinstance(value, dict): printable = '{' + sep - for item_key, item_value in sorted(value.iteritems()): + for item_key, item_value in sorted(value.items()): printable += element_tabs + \ self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \ self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \ @@ -730,7 +730,7 @@ def Print(self, file=sys.stdout): self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.iteritems()): + for property, value in sorted(self._properties.items()): self._XCKVPrint(file, 3, property, value) # End the object. @@ -752,7 +752,7 @@ def UpdateProperties(self, properties, do_copy=False): if properties is None: return - for property, value in properties.iteritems(): + for property, value in properties.items(): # Make sure the property is in the schema. if not property in self._schema: raise KeyError(property + ' not in ' + self.__class__.__name__) @@ -865,7 +865,7 @@ def VerifyHasRequiredProperties(self): # TODO(mark): A stronger verification mechanism is needed. Some # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong, is_required) = attributes[0:4] if is_required and not property in self._properties: raise KeyError(self.__class__.__name__ + ' requires ' + property) @@ -875,7 +875,7 @@ def _SetDefaultsFromSchema(self): overwrite properties that have already been set.""" defaults = {} - for property, attributes in self._schema.iteritems(): + for property, attributes in self._schema.items(): (is_list, property_type, is_strong, is_required) = attributes[0:4] if is_required and len(attributes) >= 5 and \ not property in self._properties: @@ -2844,7 +2844,7 @@ def CompareProducts(x, y, remote_products): # determine the sort order. return cmp(x_index, y_index) - for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems(): + for other_pbxproject, ref_dict in self._other_pbxprojects.items(): # Build up a list of products in the remote project file, ordered the # same as the targets that produce them. remote_products = [] @@ -2889,7 +2889,7 @@ def Print(self, file=sys.stdout): self._XCPrint(file, 0, '{ ') else: self._XCPrint(file, 0, '{\n') - for property, value in sorted(self._properties.iteritems(), + for property, value in sorted(self._properties.items(), cmp=lambda x, y: cmp(x, y)): if property == 'objects': self._PrintObjects(file) From fe1e89255bdc0d868375446437ecc5f5c0ead956 Mon Sep 17 00:00:00 2001 From: Craig Rodrigues Date: Sun, 19 Mar 2017 13:42:33 -0700 Subject: [PATCH 4/6] Replace deprecated xrange() with range() for Python 3. --- gyp/PRESUBMIT.py | 3 +-- gyp/pylib/gyp/__init__.py | 2 +- gyp/pylib/gyp/generator/make.py | 2 +- gyp/pylib/gyp/generator/xcode.py | 6 +++--- gyp/pylib/gyp/input.py | 14 +++++++------- gyp/pylib/gyp/xcodeproj_file.py | 8 ++++---- 6 files changed, 17 insertions(+), 18 deletions(-) diff --git a/gyp/PRESUBMIT.py b/gyp/PRESUBMIT.py index f6c8a357af..e52f9d2d22 100644 --- a/gyp/PRESUBMIT.py +++ b/gyp/PRESUBMIT.py @@ -76,8 +76,7 @@ def _LicenseHeader(input_api): # Accept any year number from 2009 to the current year. current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) - + allowed_years = (str(s) for s in reversed(range(2009, current_year + 1))) years_re = '(' + '|'.join(allowed_years) + ')' # The (c) is deprecated, but tolerate it until it's removed from all files. diff --git a/gyp/pylib/gyp/__init__.py b/gyp/pylib/gyp/__init__.py index 7137cb77d2..5f82f2ef2c 100755 --- a/gyp/pylib/gyp/__init__.py +++ b/gyp/pylib/gyp/__init__.py @@ -434,7 +434,7 @@ def gyp_main(args): build_file_dir = os.path.abspath(os.path.dirname(build_file)) build_file_dir_components = build_file_dir.split(os.path.sep) components_len = len(build_file_dir_components) - for index in xrange(components_len - 1, -1, -1): + for index in range(components_len - 1, -1, -1): if build_file_dir_components[index] == 'src': options.depth = os.path.sep.join(build_file_dir_components) break diff --git a/gyp/pylib/gyp/generator/make.py b/gyp/pylib/gyp/generator/make.py index 0ac0104701..cfc7b05433 100644 --- a/gyp/pylib/gyp/generator/make.py +++ b/gyp/pylib/gyp/generator/make.py @@ -1547,7 +1547,7 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, # Postbuilds expect to be run in the gyp file's directory, so insert an # implicit postbuild to cd to there. postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path])) - for i in xrange(len(postbuilds)): + for i in range(len(postbuilds)): if not postbuilds[i].startswith('$'): postbuilds[i] = EscapeShellArgument(postbuilds[i]) self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output)) diff --git a/gyp/pylib/gyp/generator/xcode.py b/gyp/pylib/gyp/generator/xcode.py index a2626b689e..694a28afb1 100644 --- a/gyp/pylib/gyp/generator/xcode.py +++ b/gyp/pylib/gyp/generator/xcode.py @@ -1014,7 +1014,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # target. makefile.write('all: \\\n') for concrete_output_index in \ - xrange(0, len(concrete_outputs_by_rule_source)): + range(0, len(concrete_outputs_by_rule_source)): # Only list the first (index [0]) concrete output of each input # in the "all" target. Otherwise, a parallel make (-j > 1) would # attempt to process each input multiple times simultaneously. @@ -1037,7 +1037,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # rule source. Collect the names of the directories that are # required. concrete_output_dirs = [] - for concrete_output_index in xrange(0, len(concrete_outputs)): + for concrete_output_index in range(0, len(concrete_outputs)): concrete_output = concrete_outputs[concrete_output_index] if concrete_output_index == 0: bol = '' @@ -1056,7 +1056,7 @@ def GenerateOutput(target_list, target_dicts, data, params): # the set of additional rule inputs, if any. prerequisites = [rule_source] prerequisites.extend(rule.get('inputs', [])) - for prerequisite_index in xrange(0, len(prerequisites)): + for prerequisite_index in range(0, len(prerequisites)): prerequisite = prerequisites[prerequisite_index] if prerequisite_index == len(prerequisites) - 1: eol = '' diff --git a/gyp/pylib/gyp/input.py b/gyp/pylib/gyp/input.py index 7f0bd78a52..68e51131d8 100644 --- a/gyp/pylib/gyp/input.py +++ b/gyp/pylib/gyp/input.py @@ -1028,7 +1028,7 @@ def ExpandVariables(input, phase, variables, build_file): # Convert all strings that are canonically-represented integers into integers. if type(output) is list: - for index in xrange(0, len(output)): + for index in range(0, len(output)): if IsStrCanonicalInt(output[index]): output[index] = int(output[index]) elif IsStrCanonicalInt(output): @@ -1385,7 +1385,7 @@ def QualifyDependencies(targets): toolset = target_dict['toolset'] for dependency_key in all_dependency_sections: dependencies = target_dict.get(dependency_key, []) - for index in xrange(0, len(dependencies)): + for index in range(0, len(dependencies)): dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( target_build_file, dependencies[index], toolset) if not multiple_toolsets: @@ -1426,7 +1426,7 @@ def ExpandWildcardDependencies(targets, data): for dependency_key in dependency_sections: dependencies = target_dict.get(dependency_key, []) - # Loop this way instead of "for dependency in" or "for index in xrange" + # Loop this way instead of "for dependency in" or "for index in range" # because the dependencies list will be modified within the loop body. index = 0 while index < len(dependencies): @@ -2399,7 +2399,7 @@ def ProcessListFiltersInDict(name, the_dict): exclude_key = list_key + '!' if exclude_key in the_dict: for exclude_item in the_dict[exclude_key]: - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): if exclude_item == the_list[index]: # This item matches the exclude_item, so set its action to 0 # (exclude). @@ -2425,7 +2425,7 @@ def ProcessListFiltersInDict(name, the_dict): raise ValueError('Unrecognized action ' + action + ' in ' + name + \ ' key ' + regex_key) - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): list_item = the_list[index] if list_actions[index] == action_value: # Even if the regex matches, nothing will change so continue (regex @@ -2456,7 +2456,7 @@ def ProcessListFiltersInDict(name, the_dict): # the indices of items that haven't been seen yet don't shift. That means # that things need to be prepended to excluded_list to maintain them in the # same order that they existed in the_list. - for index in xrange(len(list_actions) - 1, -1, -1): + for index in range(len(list_actions) - 1, -1, -1): if list_actions[index] == 0: # Dump anything with action 0 (exclude). Keep anything with action 1 # (include) or -1 (no include or exclude seen for the item). @@ -2665,7 +2665,7 @@ def TurnIntIntoStrInDict(the_dict): def TurnIntIntoStrInList(the_list): """Given list the_list, recursively converts all integers into strings. """ - for index in xrange(0, len(the_list)): + for index in range(0, len(the_list)): item = the_list[index] if type(item) is int: the_list[index] = str(item) diff --git a/gyp/pylib/gyp/xcodeproj_file.py b/gyp/pylib/gyp/xcodeproj_file.py index 62ec580770..9bd582e477 100644 --- a/gyp/pylib/gyp/xcodeproj_file.py +++ b/gyp/pylib/gyp/xcodeproj_file.py @@ -452,7 +452,7 @@ def _HashUpdate(hash, data): digest_int_count = hash.digest_size / 4 digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) id_ints = [0, 0, 0] - for index in xrange(0, digest_int_count): + for index in range(0, digest_int_count): id_ints[index % 3] ^= digest_ints[index] self.id = '%08X%08X%08X' % tuple(id_ints) @@ -1426,7 +1426,7 @@ def PathHashables(self): xche = self while xche != None and isinstance(xche, XCHierarchicalElement): xche_hashables = xche.Hashables() - for index in xrange(0, len(xche_hashables)): + for index in range(0, len(xche_hashables)): hashables.insert(index, xche_hashables[index]) xche = xche.parent return hashables @@ -2401,7 +2401,7 @@ def HeadersPhase(self): # The headers phase should come before the resources, sources, and # frameworks phases, if any. insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): + for index in range(0, len(self._properties['buildPhases'])): phase = self._properties['buildPhases'][index] if isinstance(phase, PBXResourcesBuildPhase) or \ isinstance(phase, PBXSourcesBuildPhase) or \ @@ -2422,7 +2422,7 @@ def ResourcesPhase(self): # The resources phase should come before the sources and frameworks # phases, if any. insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): + for index in range(0, len(self._properties['buildPhases'])): phase = self._properties['buildPhases'][index] if isinstance(phase, PBXSourcesBuildPhase) or \ isinstance(phase, PBXFrameworksBuildPhase): From 517f5e583244a3d9e2263e67954af9be5d040169 Mon Sep 17 00:00:00 2001 From: Craig Rodrigues Date: Sun, 19 Mar 2017 13:59:05 -0700 Subject: [PATCH 5/6] Replace basestring with str, but only on Python 3. On Python 2, basestring is (unicode, str). On Python 3, basestring and unicode are gone, and there is only str. --- gyp/pylib/gyp/MSVSSettings.py | 10 ++++++---- gyp/pylib/gyp/__init__.py | 9 ++++++++- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/gyp/pylib/gyp/MSVSSettings.py b/gyp/pylib/gyp/MSVSSettings.py index 00721eb237..8073f92b8d 100644 --- a/gyp/pylib/gyp/MSVSSettings.py +++ b/gyp/pylib/gyp/MSVSSettings.py @@ -16,6 +16,8 @@ from __future__ import print_function +from gyp import string_types + import sys import re @@ -108,11 +110,11 @@ class _String(_Type): """A setting that's just a string.""" def ValidateMSVS(self, value): - if not isinstance(value, basestring): + if not isinstance(value, string_types): raise ValueError('expected string; got %r' % value) def ValidateMSBuild(self, value): - if not isinstance(value, basestring): + if not isinstance(value, string_types): raise ValueError('expected string; got %r' % value) def ConvertToMSBuild(self, value): @@ -124,11 +126,11 @@ class _StringList(_Type): """A settings that's a list of strings.""" def ValidateMSVS(self, value): - if not isinstance(value, basestring) and not isinstance(value, list): + if not isinstance(value, string_types) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ValidateMSBuild(self, value): - if not isinstance(value, basestring) and not isinstance(value, list): + if not isinstance(value, string_types) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ConvertToMSBuild(self, value): diff --git a/gyp/pylib/gyp/__init__.py b/gyp/pylib/gyp/__init__.py index 5f82f2ef2c..d5fa9ecf50 100755 --- a/gyp/pylib/gyp/__init__.py +++ b/gyp/pylib/gyp/__init__.py @@ -16,6 +16,13 @@ import traceback from gyp.common import GypError +try: + # Python 2 + string_types = basestring +except NameError: + # Python 3 + string_types = str + # Default debug modes for GYP debug = {} @@ -412,7 +419,7 @@ def gyp_main(args): for option, value in sorted(options.__dict__.items()): if option[0] == '_': continue - if isinstance(value, basestring): + if isinstance(value, string_types): DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) else: DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) From 63e6bc7d5ca7d1a89d52f3033de7b4bb26c75957 Mon Sep 17 00:00:00 2001 From: Craig Rodrigues Date: Sun, 19 Mar 2017 14:04:36 -0700 Subject: [PATCH 6/6] _winreg module was renamed to winreg in Python 3. --- gyp/pylib/gyp/MSVSVersion.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/gyp/pylib/gyp/MSVSVersion.py b/gyp/pylib/gyp/MSVSVersion.py index c31ff3e114..293b4145c1 100644 --- a/gyp/pylib/gyp/MSVSVersion.py +++ b/gyp/pylib/gyp/MSVSVersion.py @@ -176,12 +176,18 @@ def _RegistryGetValueUsingWinReg(key, value): contents of the registry key's value, or None on failure. Throws ImportError if _winreg is unavailable. """ - import _winreg + try: + # Python 2 + from _winreg import OpenKey, QueryValueEx + except ImportError: + # Python 3 + from winreg import OpenKey, QueryValueEx + try: root, subkey = key.split('\\', 1) assert root == 'HKLM' # Only need HKLM for now. - with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey: - return _winreg.QueryValueEx(hkey, value)[0] + with OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey: + return QueryValueEx(hkey, value)[0] except WindowsError: return None