Chromium Code Reviews| Index: pylib/gyp/input.py |
| diff --git a/pylib/gyp/input.py b/pylib/gyp/input.py |
| index 22eb333d0524ab11918a36212f90390b522ba11e..124393707b73007cb986fbdbdabc73759c6c3227 100644 |
| --- a/pylib/gyp/input.py |
| +++ b/pylib/gyp/input.py |
| @@ -2,14 +2,9 @@ |
| # Use of this source code is governed by a BSD-style license that can be |
| # found in the LICENSE file. |
| -from compiler.ast import Const |
| -from compiler.ast import Dict |
| -from compiler.ast import Discard |
| -from compiler.ast import List |
| -from compiler.ast import Module |
| -from compiler.ast import Node |
| -from compiler.ast import Stmt |
| -import compiler |
| +from __future__ import print_function |
| + |
| +import ast |
| import gyp.common |
| import gyp.simple_copy |
| import multiprocessing |
| @@ -182,43 +177,38 @@ def CheckedEval(file_contents): |
| Note that this is slower than eval() is. |
| """ |
| - ast = compiler.parse(file_contents) |
| - assert isinstance(ast, Module) |
| - c1 = ast.getChildren() |
| - assert c1[0] is None |
| - assert isinstance(c1[1], Stmt) |
| - c2 = c1[1].getChildren() |
| - assert isinstance(c2[0], Discard) |
| - c3 = c2[0].getChildren() |
| - assert len(c3) == 1 |
| - return CheckNode(c3[0], []) |
| + syntax_tree = ast.parse(file_contents) |
| + assert isinstance(syntax_tree, ast.Module) |
| + c1 = syntax_tree.body |
| + assert len(c1) == 1 |
| + c2 = c1[0] |
| + assert isinstance(c2, ast.Expr) |
| + return CheckNode(c2.value, []) |
| def CheckNode(node, keypath): |
| - if isinstance(node, Dict): |
| - c = node.getChildren() |
| + if isinstance(node, ast.Dict): |
| dict = {} |
| - for n in range(0, len(c), 2): |
| - assert isinstance(c[n], Const) |
| - key = c[n].getChildren()[0] |
| + for key, value in zip(node.keys, node.values): |
| + assert isinstance(key, ast.Str) |
| + key = key.s |
| if key in dict: |
| raise GypError("Key '" + key + "' repeated at level " + |
| repr(len(keypath) + 1) + " with key path '" + |
| '.'.join(keypath) + "'") |
| kp = list(keypath) # Make a copy of the list for descending this node. |
| kp.append(key) |
| - dict[key] = CheckNode(c[n + 1], kp) |
| + dict[key] = CheckNode(value, kp) |
| return dict |
| - elif isinstance(node, List): |
| - c = node.getChildren() |
| + elif isinstance(node, ast.List): |
| children = [] |
| - for index, child in enumerate(c): |
| + for index, child in enumerate(node.elts): |
| kp = list(keypath) # Copy list. |
| kp.append(repr(index)) |
| children.append(CheckNode(child, kp)) |
| return children |
| - elif isinstance(node, Const): |
| - return node.getChildren()[0] |
| + elif isinstance(node, ast.Str): |
| + return node.s |
| else: |
| raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) + |
| "': " + repr(node)) |
| @@ -241,10 +231,10 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, |
| else: |
| build_file_data = eval(build_file_contents, {'__builtins__': None}, |
| None) |
| - except SyntaxError, e: |
| + except SyntaxError as e: |
| e.filename = build_file_path |
| raise |
| - except Exception, e: |
| + except Exception as e: |
| gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) |
| raise |
| @@ -264,7 +254,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, |
| else: |
| LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, |
| aux_data, None, check) |
| - except Exception, e: |
| + except Exception as e: |
| gyp.common.ExceptionAppend(e, |
| 'while reading includes of ' + build_file_path) |
| raise |
| @@ -301,7 +291,7 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, |
| subdict_path, include) |
| # Recurse into subdictionaries. |
| - for k, v in subdict.iteritems(): |
| + for k, v in subdict.items(): |
| if type(v) is dict: |
| LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, |
| None, check) |
| @@ -466,7 +456,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, |
| try: |
| LoadTargetBuildFile(dependency, data, aux_data, variables, |
| includes, depth, check, load_dependencies) |
| - except Exception, e: |
| + except Exception as e: |
| gyp.common.ExceptionAppend( |
| e, 'while loading dependencies of %s' % build_file_path) |
| raise |
| @@ -487,7 +477,7 @@ def CallLoadTargetBuildFile(global_flags, |
| signal.signal(signal.SIGINT, signal.SIG_IGN) |
| # Apply globals so that the worker process behaves the same. |
| - for key, value in global_flags.iteritems(): |
| + for key, value in global_flags.items(): |
| globals()[key] = value |
| SetGeneratorGlobals(generator_input_info) |
| @@ -509,12 +499,12 @@ def CallLoadTargetBuildFile(global_flags, |
| return (build_file_path, |
| build_file_data, |
| dependencies) |
| - except GypError, e: |
| + except GypError as e: |
| sys.stderr.write("gyp: %s\n" % e) |
| return None |
| - except Exception, e: |
| - print >>sys.stderr, 'Exception:', e |
| - print >>sys.stderr, traceback.format_exc() |
| + except Exception as e: |
| + print('Exception:', e, file=sys.stderr) |
| + print(traceback.format_exc(), file=sys.stderr) |
| return None |
| @@ -604,7 +594,7 @@ def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, |
| args = (global_flags, dependency, |
| variables, includes, depth, check, generator_input_info), |
| callback = parallel_state.LoadTargetBuildFileCallback) |
| - except KeyboardInterrupt, e: |
| + except KeyboardInterrupt as e: |
| parallel_state.pool.terminate() |
| raise e |
| @@ -903,8 +893,9 @@ def ExpandVariables(input, phase, variables, build_file): |
| stdout=subprocess.PIPE, |
| stderr=subprocess.PIPE, |
| stdin=subprocess.PIPE, |
| - cwd=build_file_dir) |
| - except Exception, e: |
| + cwd=build_file_dir, |
| + universal_newlines=True) |
| + except Exception as e: |
| raise GypError("%s while executing command '%s' in %s" % |
| (e, contents, build_file)) |
| @@ -1018,9 +1009,9 @@ def ExpandVariables(input, phase, variables, build_file): |
| # Convert all strings that are canonically-represented integers into integers. |
| if type(output) is list: |
| - for index in xrange(0, len(output)): |
| - if IsStrCanonicalInt(output[index]): |
| - output[index] = int(output[index]) |
| + for index, outstr in enumerate(output): |
| + if IsStrCanonicalInt(outstr): |
| + output[index] = int(outstr) |
| elif IsStrCanonicalInt(output): |
| output = int(output) |
| @@ -1089,13 +1080,13 @@ def EvalSingleCondition( |
| if eval(ast_code, {'__builtins__': None}, variables): |
| return true_dict |
| return false_dict |
| - except SyntaxError, e: |
| + except SyntaxError as e: |
| syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' |
| 'at character %d.' % |
| (str(e.args[0]), e.text, build_file, e.offset), |
| e.filename, e.lineno, e.offset, e.text) |
| raise syntax_error |
| - except NameError, e: |
| + except NameError as e: |
| gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % |
| (cond_expr_expanded, build_file)) |
| raise GypError(e) |
| @@ -1150,7 +1141,7 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file): |
| def LoadAutomaticVariablesFromDict(variables, the_dict): |
| # Any keys with plain string values in the_dict become automatic variables. |
| # The variable name is the key name with a "_" character prepended. |
| - for key, value in the_dict.iteritems(): |
| + for key, value in the_dict.items(): |
| if type(value) in (str, int, list): |
| variables['_' + key] = value |
| @@ -1163,7 +1154,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): |
| # the_dict in the_dict's parent dict. If the_dict's parent is not a dict |
| # (it could be a list or it could be parentless because it is a root dict), |
| # the_dict_key will be None. |
| - for key, value in the_dict.get('variables', {}).iteritems(): |
| + for key, value in the_dict.get('variables', {}).items(): |
| if type(value) not in (str, int, list): |
| continue |
| @@ -1202,7 +1193,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, |
| # list before we process them so that you can reference one |
| # variable from another. They will be fully expanded by recursion |
| # in ExpandVariables. |
| - for key, value in the_dict['variables'].iteritems(): |
| + for key, value in the_dict['variables'].items(): |
| variables[key] = value |
| # Handle the associated variables dict first, so that any variable |
| @@ -1215,7 +1206,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, |
| LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) |
| - for key, value in the_dict.iteritems(): |
| + for key, value in the_dict.items(): |
|
Nico
2016/07/29 22:22:06
Did you measure if this change makes gyp slower wh
AWhetter
2016/11/05 23:59:50
Just with a few runs of all of the tests, I got th
|
| # Skip "variables", which was already processed if present. |
| if key != 'variables' and type(value) is str: |
| expanded = ExpandVariables(value, phase, variables, build_file) |
| @@ -1273,7 +1264,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, |
| # Recurse into child dicts, or process child lists which may result in |
| # further recursion into descendant dicts. |
| - for key, value in the_dict.iteritems(): |
| + for key, value in the_dict.items(): |
| # Skip "variables" and string values, which were already processed if |
| # present. |
| if key == 'variables' or type(value) is str: |
| @@ -1370,14 +1361,14 @@ def QualifyDependencies(targets): |
| for dep in dependency_sections |
| for op in ('', '!', '/')] |
| - for target, target_dict in targets.iteritems(): |
| + for target, target_dict in targets.items(): |
| target_build_file = gyp.common.BuildFile(target) |
| toolset = target_dict['toolset'] |
| for dependency_key in all_dependency_sections: |
| dependencies = target_dict.get(dependency_key, []) |
| - for index in xrange(0, len(dependencies)): |
| + for index, dep in enumerate(dependencies): |
| dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( |
| - target_build_file, dependencies[index], toolset) |
| + target_build_file, dep, toolset) |
| if not multiple_toolsets: |
| # Ignore toolset specification in the dependency if it is specified. |
| dep_toolset = toolset |
| @@ -1410,7 +1401,7 @@ def ExpandWildcardDependencies(targets, data): |
| dependency list, must be qualified when this function is called. |
| """ |
| - for target, target_dict in targets.iteritems(): |
| + for target, target_dict in targets.items(): |
| toolset = target_dict['toolset'] |
| target_build_file = gyp.common.BuildFile(target) |
| for dependency_key in dependency_sections: |
| @@ -1472,7 +1463,7 @@ def Unify(l): |
| def RemoveDuplicateDependencies(targets): |
| """Makes sure every dependency appears only once in all targets's dependency |
| lists.""" |
| - for target_name, target_dict in targets.iteritems(): |
| + for target_name, target_dict in targets.items(): |
| for dependency_key in dependency_sections: |
| dependencies = target_dict.get(dependency_key, []) |
| if dependencies: |
| @@ -1488,7 +1479,7 @@ def Filter(l, item): |
| def RemoveSelfDependencies(targets): |
| """Remove self dependencies from targets that have the prune_self_dependency |
| variable set.""" |
| - for target_name, target_dict in targets.iteritems(): |
| + for target_name, target_dict in targets.items(): |
| for dependency_key in dependency_sections: |
| dependencies = target_dict.get(dependency_key, []) |
| if dependencies: |
| @@ -1501,7 +1492,7 @@ def RemoveSelfDependencies(targets): |
| def RemoveLinkDependenciesFromNoneTargets(targets): |
| """Remove dependencies having the 'link_dependency' attribute from the 'none' |
| targets.""" |
| - for target_name, target_dict in targets.iteritems(): |
| + for target_name, target_dict in targets.items(): |
| for dependency_key in dependency_sections: |
| dependencies = target_dict.get(dependency_key, []) |
| if dependencies: |
| @@ -1792,14 +1783,14 @@ def BuildDependencyList(targets): |
| # Create a DependencyGraphNode for each target. Put it into a dict for easy |
| # access. |
| dependency_nodes = {} |
| - for target, spec in targets.iteritems(): |
| + for target, spec in targets.items(): |
| if target not in dependency_nodes: |
| dependency_nodes[target] = DependencyGraphNode(target) |
| # Set up the dependency links. Targets that have no dependencies are treated |
| # as dependent on root_node. |
| root_node = DependencyGraphNode(None) |
| - for target, spec in targets.iteritems(): |
| + for target, spec in targets.items(): |
| target_node = dependency_nodes[target] |
| target_build_file = gyp.common.BuildFile(target) |
| dependencies = spec.get('dependencies') |
| @@ -1823,7 +1814,7 @@ def BuildDependencyList(targets): |
| if not root_node.dependents: |
| # If all targets have dependencies, add the first target as a dependent |
| # of root_node so that the cycle can be discovered from root_node. |
| - target = targets.keys()[0] |
| + target = next(iter(targets)) |
| target_node = dependency_nodes[target] |
| target_node.dependencies.append(root_node) |
| root_node.dependents.append(target_node) |
| @@ -1842,20 +1833,20 @@ def VerifyNoGYPFileCircularDependencies(targets): |
| # Create a DependencyGraphNode for each gyp file containing a target. Put |
| # it into a dict for easy access. |
| dependency_nodes = {} |
| - for target in targets.iterkeys(): |
| + for target in targets.keys(): |
| build_file = gyp.common.BuildFile(target) |
| if not build_file in dependency_nodes: |
| dependency_nodes[build_file] = DependencyGraphNode(build_file) |
| # Set up the dependency links. |
| - for target, spec in targets.iteritems(): |
| + for target, spec in targets.items(): |
| build_file = gyp.common.BuildFile(target) |
| build_file_node = dependency_nodes[build_file] |
| target_dependencies = spec.get('dependencies', []) |
| for dependency in target_dependencies: |
| try: |
| dependency_build_file = gyp.common.BuildFile(dependency) |
| - except GypError, e: |
| + except GypError as e: |
| gyp.common.ExceptionAppend( |
| e, 'while computing dependencies of .gyp file %s' % build_file) |
| raise |
| @@ -1873,7 +1864,7 @@ def VerifyNoGYPFileCircularDependencies(targets): |
| # Files that have no dependencies are treated as dependent on root_node. |
| root_node = DependencyGraphNode(None) |
| - for build_file_node in dependency_nodes.itervalues(): |
| + for build_file_node in dependency_nodes.values(): |
| if len(build_file_node.dependencies) == 0: |
| build_file_node.dependencies.append(root_node) |
| root_node.dependents.append(build_file_node) |
| @@ -1886,7 +1877,7 @@ def VerifyNoGYPFileCircularDependencies(targets): |
| if not root_node.dependents: |
| # If all files have dependencies, add the first file as a dependent |
| # of root_node so that the cycle can be discovered from root_node. |
| - file_node = dependency_nodes.values()[0] |
| + file_node = next(iter(dependency_nodes.values())) |
| file_node.dependencies.append(root_node) |
| root_node.dependents.append(file_node) |
| cycles = [] |
| @@ -2113,7 +2104,7 @@ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): |
| def MergeDicts(to, fro, to_file, fro_file): |
| # I wanted to name the parameter "from" but it's a Python keyword... |
| - for k, v in fro.iteritems(): |
| + for k, v in fro.items(): |
| # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give |
| # copy semantics. Something else may want to merge from the |fro| dict |
| # later, and having the same dict ref pointed to twice in the tree isn't |
| @@ -2248,13 +2239,13 @@ def SetUpConfigurations(target, target_dict): |
| if not 'configurations' in target_dict: |
| target_dict['configurations'] = {'Default': {}} |
| if not 'default_configuration' in target_dict: |
| - concrete = [i for (i, config) in target_dict['configurations'].iteritems() |
| + concrete = [i for (i, config) in target_dict['configurations'].items() |
| if not config.get('abstract')] |
| target_dict['default_configuration'] = sorted(concrete)[0] |
| merged_configurations = {} |
| configs = target_dict['configurations'] |
| - for (configuration, old_configuration_dict) in configs.iteritems(): |
| + for (configuration, old_configuration_dict) in configs.items(): |
| # Skip abstract configurations (saves work only). |
| if old_configuration_dict.get('abstract'): |
| continue |
| @@ -2262,7 +2253,7 @@ def SetUpConfigurations(target, target_dict): |
| # Get the inheritance relationship right by making a copy of the target |
| # dict. |
| new_configuration_dict = {} |
| - for (key, target_val) in target_dict.iteritems(): |
| + for (key, target_val) in target_dict.items(): |
| key_ext = key[-1:] |
| if key_ext in key_suffixes: |
| key_base = key[:-1] |
| @@ -2283,10 +2274,9 @@ def SetUpConfigurations(target, target_dict): |
| merged_configurations[configuration]) |
| # Now drop all the abstract ones. |
| - for configuration in target_dict['configurations'].keys(): |
| - old_configuration_dict = target_dict['configurations'][configuration] |
| - if old_configuration_dict.get('abstract'): |
| - del target_dict['configurations'][configuration] |
| + configs = target_dict['configurations'] |
| + target_dict['configurations'] = \ |
| + {k: v for k, v in configs.items() if not v.get('abstract')} |
| # Now that all of the target's configurations have been built, go through |
| # the target dict's keys and remove everything that's been moved into a |
| @@ -2346,7 +2336,7 @@ def ProcessListFiltersInDict(name, the_dict): |
| lists = [] |
| del_lists = [] |
| - for key, value in the_dict.iteritems(): |
| + for key, value in the_dict.items(): |
| operation = key[-1] |
| if operation != '!' and operation != '/': |
| continue |
| @@ -2394,8 +2384,8 @@ def ProcessListFiltersInDict(name, the_dict): |
| exclude_key = list_key + '!' |
| if exclude_key in the_dict: |
| for exclude_item in the_dict[exclude_key]: |
| - for index in xrange(0, len(the_list)): |
| - if exclude_item == the_list[index]: |
| + for index, list_item in enumerate(the_list): |
| + if exclude_item == list_item: |
| # This item matches the exclude_item, so set its action to 0 |
| # (exclude). |
| list_actions[index] = 0 |
| @@ -2420,8 +2410,7 @@ def ProcessListFiltersInDict(name, the_dict): |
| raise ValueError('Unrecognized action ' + action + ' in ' + name + \ |
| ' key ' + regex_key) |
| - for index in xrange(0, len(the_list)): |
| - list_item = the_list[index] |
| + for index, list_item in enumerate(the_list): |
| if list_actions[index] == action_value: |
| # Even if the regex matches, nothing will change so continue (regex |
| # searches are expensive). |
| @@ -2451,7 +2440,7 @@ def ProcessListFiltersInDict(name, the_dict): |
| # the indices of items that haven't been seen yet don't shift. That means |
| # that things need to be prepended to excluded_list to maintain them in the |
| # same order that they existed in the_list. |
| - for index in xrange(len(list_actions) - 1, -1, -1): |
| + for index in range(len(list_actions) - 1, -1, -1): |
| if list_actions[index] == 0: |
| # Dump anything with action 0 (exclude). Keep anything with action 1 |
| # (include) or -1 (no include or exclude seen for the item). |
| @@ -2464,7 +2453,7 @@ def ProcessListFiltersInDict(name, the_dict): |
| the_dict[excluded_key] = excluded_list |
| # Now recurse into subdicts and lists that may contain dicts. |
| - for key, value in the_dict.iteritems(): |
| + for key, value in the_dict.items(): |
| if type(value) is dict: |
| ProcessListFiltersInDict(key, value) |
| elif type(value) is list: |
| @@ -2521,7 +2510,7 @@ def ValidateSourcesInTarget(target, target_dict, build_file, |
| basenames.setdefault(basename, []).append(source) |
| error = '' |
| - for basename, files in basenames.iteritems(): |
| + for basename, files in basenames.items(): |
| if len(files) > 1: |
| error += ' %s: %s\n' % (basename, ' '.join(files)) |
| @@ -2660,8 +2649,7 @@ def TurnIntIntoStrInDict(the_dict): |
| def TurnIntIntoStrInList(the_list): |
| """Given list the_list, recursively converts all integers into strings. |
| """ |
| - for index in xrange(0, len(the_list)): |
| - item = the_list[index] |
| + for index, item in enumerate(the_list): |
| if type(item) is int: |
| the_list[index] = str(item) |
| elif type(item) is dict: |
| @@ -2778,7 +2766,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, |
| try: |
| LoadTargetBuildFile(build_file, data, aux_data, |
| variables, includes, depth, check, True) |
| - except Exception, e: |
| + except Exception as e: |
| gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) |
| raise |
| @@ -2800,7 +2788,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check, |
| RemoveLinkDependenciesFromNoneTargets(targets) |
| # Apply exclude (!) and regex (/) list filters only for dependency_sections. |
| - for target_name, target_dict in targets.iteritems(): |
| + for target_name, target_dict in targets.items(): |
| tmp_dict = {} |
| for key_base in dependency_sections: |
| for op in ('', '!', '/'): |