Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright (c) 2012 Google Inc. All rights reserved. | 1 # Copyright (c) 2012 Google Inc. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 from compiler.ast import Const | 5 from __future__ import print_function |
| 6 from compiler.ast import Dict | 6 |
| 7 from compiler.ast import Discard | 7 import ast |
| 8 from compiler.ast import List | |
| 9 from compiler.ast import Module | |
| 10 from compiler.ast import Node | |
| 11 from compiler.ast import Stmt | |
| 12 import compiler | |
| 13 import gyp.common | 8 import gyp.common |
| 14 import gyp.simple_copy | 9 import gyp.simple_copy |
| 15 import multiprocessing | 10 import multiprocessing |
| 16 import optparse | 11 import optparse |
| 17 import os.path | 12 import os.path |
| 18 import re | 13 import re |
| 19 import shlex | 14 import shlex |
| 20 import signal | 15 import signal |
| 21 import subprocess | 16 import subprocess |
| 22 import sys | 17 import sys |
| (...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 175 | 170 |
| 176 def CheckedEval(file_contents): | 171 def CheckedEval(file_contents): |
| 177 """Return the eval of a gyp file. | 172 """Return the eval of a gyp file. |
| 178 | 173 |
| 179 The gyp file is restricted to dictionaries and lists only, and | 174 The gyp file is restricted to dictionaries and lists only, and |
| 180 repeated keys are not allowed. | 175 repeated keys are not allowed. |
| 181 | 176 |
| 182 Note that this is slower than eval() is. | 177 Note that this is slower than eval() is. |
| 183 """ | 178 """ |
| 184 | 179 |
| 185 ast = compiler.parse(file_contents) | 180 syntax_tree = ast.parse(file_contents) |
| 186 assert isinstance(ast, Module) | 181 assert isinstance(syntax_tree, ast.Module) |
| 187 c1 = ast.getChildren() | 182 c1 = syntax_tree.body |
| 188 assert c1[0] is None | 183 assert len(c1) == 1 |
| 189 assert isinstance(c1[1], Stmt) | 184 c2 = c1[0] |
| 190 c2 = c1[1].getChildren() | 185 assert isinstance(c2, ast.Expr) |
| 191 assert isinstance(c2[0], Discard) | 186 return CheckNode(c2.value, []) |
| 192 c3 = c2[0].getChildren() | |
| 193 assert len(c3) == 1 | |
| 194 return CheckNode(c3[0], []) | |
| 195 | 187 |
| 196 | 188 |
| 197 def CheckNode(node, keypath): | 189 def CheckNode(node, keypath): |
| 198 if isinstance(node, Dict): | 190 if isinstance(node, ast.Dict): |
| 199 c = node.getChildren() | |
| 200 dict = {} | 191 dict = {} |
| 201 for n in range(0, len(c), 2): | 192 for key, value in zip(node.keys, node.values): |
| 202 assert isinstance(c[n], Const) | 193 assert isinstance(key, ast.Str) |
| 203 key = c[n].getChildren()[0] | 194 key = key.s |
| 204 if key in dict: | 195 if key in dict: |
| 205 raise GypError("Key '" + key + "' repeated at level " + | 196 raise GypError("Key '" + key + "' repeated at level " + |
| 206 repr(len(keypath) + 1) + " with key path '" + | 197 repr(len(keypath) + 1) + " with key path '" + |
| 207 '.'.join(keypath) + "'") | 198 '.'.join(keypath) + "'") |
| 208 kp = list(keypath) # Make a copy of the list for descending this node. | 199 kp = list(keypath) # Make a copy of the list for descending this node. |
| 209 kp.append(key) | 200 kp.append(key) |
| 210 dict[key] = CheckNode(c[n + 1], kp) | 201 dict[key] = CheckNode(value, kp) |
| 211 return dict | 202 return dict |
| 212 elif isinstance(node, List): | 203 elif isinstance(node, ast.List): |
| 213 c = node.getChildren() | |
| 214 children = [] | 204 children = [] |
| 215 for index, child in enumerate(c): | 205 for index, child in enumerate(node.elts): |
| 216 kp = list(keypath) # Copy list. | 206 kp = list(keypath) # Copy list. |
| 217 kp.append(repr(index)) | 207 kp.append(repr(index)) |
| 218 children.append(CheckNode(child, kp)) | 208 children.append(CheckNode(child, kp)) |
| 219 return children | 209 return children |
| 220 elif isinstance(node, Const): | 210 elif isinstance(node, ast.Str): |
| 221 return node.getChildren()[0] | 211 return node.s |
| 222 else: | 212 else: |
| 223 raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) + | 213 raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) + |
| 224 "': " + repr(node)) | 214 "': " + repr(node)) |
| 225 | 215 |
| 226 | 216 |
| 227 def LoadOneBuildFile(build_file_path, data, aux_data, includes, | 217 def LoadOneBuildFile(build_file_path, data, aux_data, includes, |
| 228 is_target, check): | 218 is_target, check): |
| 229 if build_file_path in data: | 219 if build_file_path in data: |
| 230 return data[build_file_path] | 220 return data[build_file_path] |
| 231 | 221 |
| 232 if os.path.exists(build_file_path): | 222 if os.path.exists(build_file_path): |
| 233 build_file_contents = open(build_file_path).read() | 223 build_file_contents = open(build_file_path).read() |
| 234 else: | 224 else: |
| 235 raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) | 225 raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) |
| 236 | 226 |
| 237 build_file_data = None | 227 build_file_data = None |
| 238 try: | 228 try: |
| 239 if check: | 229 if check: |
| 240 build_file_data = CheckedEval(build_file_contents) | 230 build_file_data = CheckedEval(build_file_contents) |
| 241 else: | 231 else: |
| 242 build_file_data = eval(build_file_contents, {'__builtins__': None}, | 232 build_file_data = eval(build_file_contents, {'__builtins__': None}, |
| 243 None) | 233 None) |
| 244 except SyntaxError, e: | 234 except SyntaxError as e: |
| 245 e.filename = build_file_path | 235 e.filename = build_file_path |
| 246 raise | 236 raise |
| 247 except Exception, e: | 237 except Exception as e: |
| 248 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) | 238 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) |
| 249 raise | 239 raise |
| 250 | 240 |
| 251 if type(build_file_data) is not dict: | 241 if type(build_file_data) is not dict: |
| 252 raise GypError("%s does not evaluate to a dictionary." % build_file_path) | 242 raise GypError("%s does not evaluate to a dictionary." % build_file_path) |
| 253 | 243 |
| 254 data[build_file_path] = build_file_data | 244 data[build_file_path] = build_file_data |
| 255 aux_data[build_file_path] = {} | 245 aux_data[build_file_path] = {} |
| 256 | 246 |
| 257 # Scan for includes and merge them in. | 247 # Scan for includes and merge them in. |
| 258 if ('skip_includes' not in build_file_data or | 248 if ('skip_includes' not in build_file_data or |
| 259 not build_file_data['skip_includes']): | 249 not build_file_data['skip_includes']): |
| 260 try: | 250 try: |
| 261 if is_target: | 251 if is_target: |
| 262 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, | 252 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, |
| 263 aux_data, includes, check) | 253 aux_data, includes, check) |
| 264 else: | 254 else: |
| 265 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, | 255 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, |
| 266 aux_data, None, check) | 256 aux_data, None, check) |
| 267 except Exception, e: | 257 except Exception as e: |
| 268 gyp.common.ExceptionAppend(e, | 258 gyp.common.ExceptionAppend(e, |
| 269 'while reading includes of ' + build_file_path) | 259 'while reading includes of ' + build_file_path) |
| 270 raise | 260 raise |
| 271 | 261 |
| 272 return build_file_data | 262 return build_file_data |
| 273 | 263 |
| 274 | 264 |
| 275 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, | 265 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, |
| 276 includes, check): | 266 includes, check): |
| 277 includes_list = [] | 267 includes_list = [] |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 294 aux_data[subdict_path]['included'] = [] | 284 aux_data[subdict_path]['included'] = [] |
| 295 aux_data[subdict_path]['included'].append(include) | 285 aux_data[subdict_path]['included'].append(include) |
| 296 | 286 |
| 297 gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) | 287 gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) |
| 298 | 288 |
| 299 MergeDicts(subdict, | 289 MergeDicts(subdict, |
| 300 LoadOneBuildFile(include, data, aux_data, None, False, check), | 290 LoadOneBuildFile(include, data, aux_data, None, False, check), |
| 301 subdict_path, include) | 291 subdict_path, include) |
| 302 | 292 |
| 303 # Recurse into subdictionaries. | 293 # Recurse into subdictionaries. |
| 304 for k, v in subdict.iteritems(): | 294 for k, v in subdict.items(): |
| 305 if type(v) is dict: | 295 if type(v) is dict: |
| 306 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, | 296 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, |
| 307 None, check) | 297 None, check) |
| 308 elif type(v) is list: | 298 elif type(v) is list: |
| 309 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, | 299 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, |
| 310 check) | 300 check) |
| 311 | 301 |
| 312 | 302 |
| 313 # This recurses into lists so that it can look for dicts. | 303 # This recurses into lists so that it can look for dicts. |
| 314 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check): | 304 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check): |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 459 continue | 449 continue |
| 460 for dependency in target_dict['dependencies']: | 450 for dependency in target_dict['dependencies']: |
| 461 dependencies.append( | 451 dependencies.append( |
| 462 gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) | 452 gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) |
| 463 | 453 |
| 464 if load_dependencies: | 454 if load_dependencies: |
| 465 for dependency in dependencies: | 455 for dependency in dependencies: |
| 466 try: | 456 try: |
| 467 LoadTargetBuildFile(dependency, data, aux_data, variables, | 457 LoadTargetBuildFile(dependency, data, aux_data, variables, |
| 468 includes, depth, check, load_dependencies) | 458 includes, depth, check, load_dependencies) |
| 469 except Exception, e: | 459 except Exception as e: |
| 470 gyp.common.ExceptionAppend( | 460 gyp.common.ExceptionAppend( |
| 471 e, 'while loading dependencies of %s' % build_file_path) | 461 e, 'while loading dependencies of %s' % build_file_path) |
| 472 raise | 462 raise |
| 473 else: | 463 else: |
| 474 return (build_file_path, dependencies) | 464 return (build_file_path, dependencies) |
| 475 | 465 |
| 476 def CallLoadTargetBuildFile(global_flags, | 466 def CallLoadTargetBuildFile(global_flags, |
| 477 build_file_path, variables, | 467 build_file_path, variables, |
| 478 includes, depth, check, | 468 includes, depth, check, |
| 479 generator_input_info): | 469 generator_input_info): |
| 480 """Wrapper around LoadTargetBuildFile for parallel processing. | 470 """Wrapper around LoadTargetBuildFile for parallel processing. |
| 481 | 471 |
| 482 This wrapper is used when LoadTargetBuildFile is executed in | 472 This wrapper is used when LoadTargetBuildFile is executed in |
| 483 a worker process. | 473 a worker process. |
| 484 """ | 474 """ |
| 485 | 475 |
| 486 try: | 476 try: |
| 487 signal.signal(signal.SIGINT, signal.SIG_IGN) | 477 signal.signal(signal.SIGINT, signal.SIG_IGN) |
| 488 | 478 |
| 489 # Apply globals so that the worker process behaves the same. | 479 # Apply globals so that the worker process behaves the same. |
| 490 for key, value in global_flags.iteritems(): | 480 for key, value in global_flags.items(): |
| 491 globals()[key] = value | 481 globals()[key] = value |
| 492 | 482 |
| 493 SetGeneratorGlobals(generator_input_info) | 483 SetGeneratorGlobals(generator_input_info) |
| 494 result = LoadTargetBuildFile(build_file_path, per_process_data, | 484 result = LoadTargetBuildFile(build_file_path, per_process_data, |
| 495 per_process_aux_data, variables, | 485 per_process_aux_data, variables, |
| 496 includes, depth, check, False) | 486 includes, depth, check, False) |
| 497 if not result: | 487 if not result: |
| 498 return result | 488 return result |
| 499 | 489 |
| 500 (build_file_path, dependencies) = result | 490 (build_file_path, dependencies) = result |
| 501 | 491 |
| 502 # We can safely pop the build_file_data from per_process_data because it | 492 # We can safely pop the build_file_data from per_process_data because it |
| 503 # will never be referenced by this process again, so we don't need to keep | 493 # will never be referenced by this process again, so we don't need to keep |
| 504 # it in the cache. | 494 # it in the cache. |
| 505 build_file_data = per_process_data.pop(build_file_path) | 495 build_file_data = per_process_data.pop(build_file_path) |
| 506 | 496 |
| 507 # This gets serialized and sent back to the main process via a pipe. | 497 # This gets serialized and sent back to the main process via a pipe. |
| 508 # It's handled in LoadTargetBuildFileCallback. | 498 # It's handled in LoadTargetBuildFileCallback. |
| 509 return (build_file_path, | 499 return (build_file_path, |
| 510 build_file_data, | 500 build_file_data, |
| 511 dependencies) | 501 dependencies) |
| 512 except GypError, e: | 502 except GypError as e: |
| 513 sys.stderr.write("gyp: %s\n" % e) | 503 sys.stderr.write("gyp: %s\n" % e) |
| 514 return None | 504 return None |
| 515 except Exception, e: | 505 except Exception as e: |
| 516 print >>sys.stderr, 'Exception:', e | 506 print('Exception:', e, file=sys.stderr) |
| 517 print >>sys.stderr, traceback.format_exc() | 507 print(traceback.format_exc(), file=sys.stderr) |
| 518 return None | 508 return None |
| 519 | 509 |
| 520 | 510 |
| 521 class ParallelProcessingError(Exception): | 511 class ParallelProcessingError(Exception): |
| 522 pass | 512 pass |
| 523 | 513 |
| 524 | 514 |
| 525 class ParallelState(object): | 515 class ParallelState(object): |
| 526 """Class to keep track of state when processing input files in parallel. | 516 """Class to keep track of state when processing input files in parallel. |
| 527 | 517 |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 597 'non_configuration_keys': globals()['non_configuration_keys'], | 587 'non_configuration_keys': globals()['non_configuration_keys'], |
| 598 'multiple_toolsets': globals()['multiple_toolsets']} | 588 'multiple_toolsets': globals()['multiple_toolsets']} |
| 599 | 589 |
| 600 if not parallel_state.pool: | 590 if not parallel_state.pool: |
| 601 parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count()) | 591 parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count()) |
| 602 parallel_state.pool.apply_async( | 592 parallel_state.pool.apply_async( |
| 603 CallLoadTargetBuildFile, | 593 CallLoadTargetBuildFile, |
| 604 args = (global_flags, dependency, | 594 args = (global_flags, dependency, |
| 605 variables, includes, depth, check, generator_input_info), | 595 variables, includes, depth, check, generator_input_info), |
| 606 callback = parallel_state.LoadTargetBuildFileCallback) | 596 callback = parallel_state.LoadTargetBuildFileCallback) |
| 607 except KeyboardInterrupt, e: | 597 except KeyboardInterrupt as e: |
| 608 parallel_state.pool.terminate() | 598 parallel_state.pool.terminate() |
| 609 raise e | 599 raise e |
| 610 | 600 |
| 611 parallel_state.condition.release() | 601 parallel_state.condition.release() |
| 612 | 602 |
| 613 parallel_state.pool.close() | 603 parallel_state.pool.close() |
| 614 parallel_state.pool.join() | 604 parallel_state.pool.join() |
| 615 parallel_state.pool = None | 605 parallel_state.pool = None |
| 616 | 606 |
| 617 if parallel_state.error: | 607 if parallel_state.error: |
| (...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 896 raise GypError("Unknown command string '%s' in '%s'." % | 886 raise GypError("Unknown command string '%s' in '%s'." % |
| 897 (command_string, contents)) | 887 (command_string, contents)) |
| 898 else: | 888 else: |
| 899 # Fix up command with platform specific workarounds. | 889 # Fix up command with platform specific workarounds. |
| 900 contents = FixupPlatformCommand(contents) | 890 contents = FixupPlatformCommand(contents) |
| 901 try: | 891 try: |
| 902 p = subprocess.Popen(contents, shell=use_shell, | 892 p = subprocess.Popen(contents, shell=use_shell, |
| 903 stdout=subprocess.PIPE, | 893 stdout=subprocess.PIPE, |
| 904 stderr=subprocess.PIPE, | 894 stderr=subprocess.PIPE, |
| 905 stdin=subprocess.PIPE, | 895 stdin=subprocess.PIPE, |
| 906 cwd=build_file_dir) | 896 cwd=build_file_dir, |
| 907 except Exception, e: | 897 universal_newlines=True) |
| 898 except Exception as e: | |
| 908 raise GypError("%s while executing command '%s' in %s" % | 899 raise GypError("%s while executing command '%s' in %s" % |
| 909 (e, contents, build_file)) | 900 (e, contents, build_file)) |
| 910 | 901 |
| 911 p_stdout, p_stderr = p.communicate('') | 902 p_stdout, p_stderr = p.communicate('') |
| 912 | 903 |
| 913 if p.wait() != 0 or p_stderr: | 904 if p.wait() != 0 or p_stderr: |
| 914 sys.stderr.write(p_stderr) | 905 sys.stderr.write(p_stderr) |
| 915 # Simulate check_call behavior, since check_call only exists | 906 # Simulate check_call behavior, since check_call only exists |
| 916 # in python 2.5 and later. | 907 # in python 2.5 and later. |
| 917 raise GypError("Call to '%s' returned exit status %d while in %s." % | 908 raise GypError("Call to '%s' returned exit status %d while in %s." % |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1011 new_output = [] | 1002 new_output = [] |
| 1012 for item in output: | 1003 for item in output: |
| 1013 new_output.append( | 1004 new_output.append( |
| 1014 ExpandVariables(item, phase, variables, build_file)) | 1005 ExpandVariables(item, phase, variables, build_file)) |
| 1015 output = new_output | 1006 output = new_output |
| 1016 else: | 1007 else: |
| 1017 output = ExpandVariables(output, phase, variables, build_file) | 1008 output = ExpandVariables(output, phase, variables, build_file) |
| 1018 | 1009 |
| 1019 # Convert all strings that are canonically-represented integers into integers. | 1010 # Convert all strings that are canonically-represented integers into integers. |
| 1020 if type(output) is list: | 1011 if type(output) is list: |
| 1021 for index in xrange(0, len(output)): | 1012 for index, outstr in enumerate(output): |
| 1022 if IsStrCanonicalInt(output[index]): | 1013 if IsStrCanonicalInt(outstr): |
| 1023 output[index] = int(output[index]) | 1014 output[index] = int(outstr) |
| 1024 elif IsStrCanonicalInt(output): | 1015 elif IsStrCanonicalInt(output): |
| 1025 output = int(output) | 1016 output = int(output) |
| 1026 | 1017 |
| 1027 return output | 1018 return output |
| 1028 | 1019 |
| 1029 # The same condition is often evaluated over and over again so it | 1020 # The same condition is often evaluated over and over again so it |
| 1030 # makes sense to cache as much as possible between evaluations. | 1021 # makes sense to cache as much as possible between evaluations. |
| 1031 cached_conditions_asts = {} | 1022 cached_conditions_asts = {} |
| 1032 | 1023 |
| 1033 def EvalCondition(condition, conditions_key, phase, variables, build_file): | 1024 def EvalCondition(condition, conditions_key, phase, variables, build_file): |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1082 | 1073 |
| 1083 try: | 1074 try: |
| 1084 if cond_expr_expanded in cached_conditions_asts: | 1075 if cond_expr_expanded in cached_conditions_asts: |
| 1085 ast_code = cached_conditions_asts[cond_expr_expanded] | 1076 ast_code = cached_conditions_asts[cond_expr_expanded] |
| 1086 else: | 1077 else: |
| 1087 ast_code = compile(cond_expr_expanded, '<string>', 'eval') | 1078 ast_code = compile(cond_expr_expanded, '<string>', 'eval') |
| 1088 cached_conditions_asts[cond_expr_expanded] = ast_code | 1079 cached_conditions_asts[cond_expr_expanded] = ast_code |
| 1089 if eval(ast_code, {'__builtins__': None}, variables): | 1080 if eval(ast_code, {'__builtins__': None}, variables): |
| 1090 return true_dict | 1081 return true_dict |
| 1091 return false_dict | 1082 return false_dict |
| 1092 except SyntaxError, e: | 1083 except SyntaxError as e: |
| 1093 syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' | 1084 syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' |
| 1094 'at character %d.' % | 1085 'at character %d.' % |
| 1095 (str(e.args[0]), e.text, build_file, e.offset), | 1086 (str(e.args[0]), e.text, build_file, e.offset), |
| 1096 e.filename, e.lineno, e.offset, e.text) | 1087 e.filename, e.lineno, e.offset, e.text) |
| 1097 raise syntax_error | 1088 raise syntax_error |
| 1098 except NameError, e: | 1089 except NameError as e: |
| 1099 gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % | 1090 gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % |
| 1100 (cond_expr_expanded, build_file)) | 1091 (cond_expr_expanded, build_file)) |
| 1101 raise GypError(e) | 1092 raise GypError(e) |
| 1102 | 1093 |
| 1103 | 1094 |
| 1104 def ProcessConditionsInDict(the_dict, phase, variables, build_file): | 1095 def ProcessConditionsInDict(the_dict, phase, variables, build_file): |
| 1105 # Process a 'conditions' or 'target_conditions' section in the_dict, | 1096 # Process a 'conditions' or 'target_conditions' section in the_dict, |
| 1106 # depending on phase. | 1097 # depending on phase. |
| 1107 # early -> conditions | 1098 # early -> conditions |
| 1108 # late -> target_conditions | 1099 # late -> target_conditions |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1143 # merging it. | 1134 # merging it. |
| 1144 ProcessVariablesAndConditionsInDict(merge_dict, phase, | 1135 ProcessVariablesAndConditionsInDict(merge_dict, phase, |
| 1145 variables, build_file) | 1136 variables, build_file) |
| 1146 | 1137 |
| 1147 MergeDicts(the_dict, merge_dict, build_file, build_file) | 1138 MergeDicts(the_dict, merge_dict, build_file, build_file) |
| 1148 | 1139 |
| 1149 | 1140 |
| 1150 def LoadAutomaticVariablesFromDict(variables, the_dict): | 1141 def LoadAutomaticVariablesFromDict(variables, the_dict): |
| 1151 # Any keys with plain string values in the_dict become automatic variables. | 1142 # Any keys with plain string values in the_dict become automatic variables. |
| 1152 # The variable name is the key name with a "_" character prepended. | 1143 # The variable name is the key name with a "_" character prepended. |
| 1153 for key, value in the_dict.iteritems(): | 1144 for key, value in the_dict.items(): |
| 1154 if type(value) in (str, int, list): | 1145 if type(value) in (str, int, list): |
| 1155 variables['_' + key] = value | 1146 variables['_' + key] = value |
| 1156 | 1147 |
| 1157 | 1148 |
| 1158 def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): | 1149 def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): |
| 1159 # Any keys in the_dict's "variables" dict, if it has one, becomes a | 1150 # Any keys in the_dict's "variables" dict, if it has one, becomes a |
| 1160 # variable. The variable name is the key name in the "variables" dict. | 1151 # variable. The variable name is the key name in the "variables" dict. |
| 1161 # Variables that end with the % character are set only if they are unset in | 1152 # Variables that end with the % character are set only if they are unset in |
| 1162 # the variables dict. the_dict_key is the name of the key that accesses | 1153 # the variables dict. the_dict_key is the name of the key that accesses |
| 1163 # the_dict in the_dict's parent dict. If the_dict's parent is not a dict | 1154 # the_dict in the_dict's parent dict. If the_dict's parent is not a dict |
| 1164 # (it could be a list or it could be parentless because it is a root dict), | 1155 # (it could be a list or it could be parentless because it is a root dict), |
| 1165 # the_dict_key will be None. | 1156 # the_dict_key will be None. |
| 1166 for key, value in the_dict.get('variables', {}).iteritems(): | 1157 for key, value in the_dict.get('variables', {}).items(): |
| 1167 if type(value) not in (str, int, list): | 1158 if type(value) not in (str, int, list): |
| 1168 continue | 1159 continue |
| 1169 | 1160 |
| 1170 if key.endswith('%'): | 1161 if key.endswith('%'): |
| 1171 variable_name = key[:-1] | 1162 variable_name = key[:-1] |
| 1172 if variable_name in variables: | 1163 if variable_name in variables: |
| 1173 # If the variable is already set, don't set it. | 1164 # If the variable is already set, don't set it. |
| 1174 continue | 1165 continue |
| 1175 if the_dict_key is 'variables' and variable_name in the_dict: | 1166 if the_dict_key is 'variables' and variable_name in the_dict: |
| 1176 # If the variable is set without a % in the_dict, and the_dict is a | 1167 # If the variable is set without a % in the_dict, and the_dict is a |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 1195 # Make a copy of the variables_in dict that can be modified during the | 1186 # Make a copy of the variables_in dict that can be modified during the |
| 1196 # loading of automatics and the loading of the variables dict. | 1187 # loading of automatics and the loading of the variables dict. |
| 1197 variables = variables_in.copy() | 1188 variables = variables_in.copy() |
| 1198 LoadAutomaticVariablesFromDict(variables, the_dict) | 1189 LoadAutomaticVariablesFromDict(variables, the_dict) |
| 1199 | 1190 |
| 1200 if 'variables' in the_dict: | 1191 if 'variables' in the_dict: |
| 1201 # Make sure all the local variables are added to the variables | 1192 # Make sure all the local variables are added to the variables |
| 1202 # list before we process them so that you can reference one | 1193 # list before we process them so that you can reference one |
| 1203 # variable from another. They will be fully expanded by recursion | 1194 # variable from another. They will be fully expanded by recursion |
| 1204 # in ExpandVariables. | 1195 # in ExpandVariables. |
| 1205 for key, value in the_dict['variables'].iteritems(): | 1196 for key, value in the_dict['variables'].items(): |
| 1206 variables[key] = value | 1197 variables[key] = value |
| 1207 | 1198 |
| 1208 # Handle the associated variables dict first, so that any variable | 1199 # Handle the associated variables dict first, so that any variable |
| 1209 # references within can be resolved prior to using them as variables. | 1200 # references within can be resolved prior to using them as variables. |
| 1210 # Pass a copy of the variables dict to avoid having it be tainted. | 1201 # Pass a copy of the variables dict to avoid having it be tainted. |
| 1211 # Otherwise, it would have extra automatics added for everything that | 1202 # Otherwise, it would have extra automatics added for everything that |
| 1212 # should just be an ordinary variable in this scope. | 1203 # should just be an ordinary variable in this scope. |
| 1213 ProcessVariablesAndConditionsInDict(the_dict['variables'], phase, | 1204 ProcessVariablesAndConditionsInDict(the_dict['variables'], phase, |
| 1214 variables, build_file, 'variables') | 1205 variables, build_file, 'variables') |
| 1215 | 1206 |
| 1216 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) | 1207 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) |
| 1217 | 1208 |
| 1218 for key, value in the_dict.iteritems(): | 1209 for key, value in the_dict.items(): |
|
Nico
2016/07/29 22:22:06
Did you measure if this change makes gyp slower wh
AWhetter
2016/11/05 23:59:50
Just with a few runs of all of the tests, I got th
| |
| 1219 # Skip "variables", which was already processed if present. | 1210 # Skip "variables", which was already processed if present. |
| 1220 if key != 'variables' and type(value) is str: | 1211 if key != 'variables' and type(value) is str: |
| 1221 expanded = ExpandVariables(value, phase, variables, build_file) | 1212 expanded = ExpandVariables(value, phase, variables, build_file) |
| 1222 if type(expanded) not in (str, int): | 1213 if type(expanded) not in (str, int): |
| 1223 raise ValueError( | 1214 raise ValueError( |
| 1224 'Variable expansion in this context permits str and int ' + \ | 1215 'Variable expansion in this context permits str and int ' + \ |
| 1225 'only, found ' + expanded.__class__.__name__ + ' for ' + key) | 1216 'only, found ' + expanded.__class__.__name__ + ' for ' + key) |
| 1226 the_dict[key] = expanded | 1217 the_dict[key] = expanded |
| 1227 | 1218 |
| 1228 # Variable expansion may have resulted in changes to automatics. Reload. | 1219 # Variable expansion may have resulted in changes to automatics. Reload. |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1266 ProcessConditionsInDict(the_dict, phase, variables, build_file) | 1257 ProcessConditionsInDict(the_dict, phase, variables, build_file) |
| 1267 | 1258 |
| 1268 # Conditional processing may have resulted in changes to automatics or the | 1259 # Conditional processing may have resulted in changes to automatics or the |
| 1269 # variables dict. Reload. | 1260 # variables dict. Reload. |
| 1270 variables = variables_in.copy() | 1261 variables = variables_in.copy() |
| 1271 LoadAutomaticVariablesFromDict(variables, the_dict) | 1262 LoadAutomaticVariablesFromDict(variables, the_dict) |
| 1272 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) | 1263 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) |
| 1273 | 1264 |
| 1274 # Recurse into child dicts, or process child lists which may result in | 1265 # Recurse into child dicts, or process child lists which may result in |
| 1275 # further recursion into descendant dicts. | 1266 # further recursion into descendant dicts. |
| 1276 for key, value in the_dict.iteritems(): | 1267 for key, value in the_dict.items(): |
| 1277 # Skip "variables" and string values, which were already processed if | 1268 # Skip "variables" and string values, which were already processed if |
| 1278 # present. | 1269 # present. |
| 1279 if key == 'variables' or type(value) is str: | 1270 if key == 'variables' or type(value) is str: |
| 1280 continue | 1271 continue |
| 1281 if type(value) is dict: | 1272 if type(value) is dict: |
| 1282 # Pass a copy of the variables dict so that subdicts can't influence | 1273 # Pass a copy of the variables dict so that subdicts can't influence |
| 1283 # parents. | 1274 # parents. |
| 1284 ProcessVariablesAndConditionsInDict(value, phase, variables, | 1275 ProcessVariablesAndConditionsInDict(value, phase, variables, |
| 1285 build_file, key) | 1276 build_file, key) |
| 1286 elif type(value) is list: | 1277 elif type(value) is list: |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1363 links are examined, and any dependencies referenced will be rewritten | 1354 links are examined, and any dependencies referenced will be rewritten |
| 1364 so that they are fully-qualified and relative to the current directory. | 1355 so that they are fully-qualified and relative to the current directory. |
| 1365 All rewritten dependencies are suitable for use as keys to |targets| or a | 1356 All rewritten dependencies are suitable for use as keys to |targets| or a |
| 1366 similar dict. | 1357 similar dict. |
| 1367 """ | 1358 """ |
| 1368 | 1359 |
| 1369 all_dependency_sections = [dep + op | 1360 all_dependency_sections = [dep + op |
| 1370 for dep in dependency_sections | 1361 for dep in dependency_sections |
| 1371 for op in ('', '!', '/')] | 1362 for op in ('', '!', '/')] |
| 1372 | 1363 |
| 1373 for target, target_dict in targets.iteritems(): | 1364 for target, target_dict in targets.items(): |
| 1374 target_build_file = gyp.common.BuildFile(target) | 1365 target_build_file = gyp.common.BuildFile(target) |
| 1375 toolset = target_dict['toolset'] | 1366 toolset = target_dict['toolset'] |
| 1376 for dependency_key in all_dependency_sections: | 1367 for dependency_key in all_dependency_sections: |
| 1377 dependencies = target_dict.get(dependency_key, []) | 1368 dependencies = target_dict.get(dependency_key, []) |
| 1378 for index in xrange(0, len(dependencies)): | 1369 for index, dep in enumerate(dependencies): |
| 1379 dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( | 1370 dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( |
| 1380 target_build_file, dependencies[index], toolset) | 1371 target_build_file, dep, toolset) |
| 1381 if not multiple_toolsets: | 1372 if not multiple_toolsets: |
| 1382 # Ignore toolset specification in the dependency if it is specified. | 1373 # Ignore toolset specification in the dependency if it is specified. |
| 1383 dep_toolset = toolset | 1374 dep_toolset = toolset |
| 1384 dependency = gyp.common.QualifiedTarget(dep_file, | 1375 dependency = gyp.common.QualifiedTarget(dep_file, |
| 1385 dep_target, | 1376 dep_target, |
| 1386 dep_toolset) | 1377 dep_toolset) |
| 1387 dependencies[index] = dependency | 1378 dependencies[index] = dependency |
| 1388 | 1379 |
| 1389 # Make sure anything appearing in a list other than "dependencies" also | 1380 # Make sure anything appearing in a list other than "dependencies" also |
| 1390 # appears in the "dependencies" list. | 1381 # appears in the "dependencies" list. |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 1403 build_file. The |data| dict provides access to build file dicts. | 1394 build_file. The |data| dict provides access to build file dicts. |
| 1404 | 1395 |
| 1405 Any target that does not wish to be included by wildcard can provide an | 1396 Any target that does not wish to be included by wildcard can provide an |
| 1406 optional "suppress_wildcard" key in its target dict. When present and | 1397 optional "suppress_wildcard" key in its target dict. When present and |
| 1407 true, a wildcard dependency link will not include such targets. | 1398 true, a wildcard dependency link will not include such targets. |
| 1408 | 1399 |
| 1409 All dependency names, including the keys to |targets| and the values in each | 1400 All dependency names, including the keys to |targets| and the values in each |
| 1410 dependency list, must be qualified when this function is called. | 1401 dependency list, must be qualified when this function is called. |
| 1411 """ | 1402 """ |
| 1412 | 1403 |
| 1413 for target, target_dict in targets.iteritems(): | 1404 for target, target_dict in targets.items(): |
| 1414 toolset = target_dict['toolset'] | 1405 toolset = target_dict['toolset'] |
| 1415 target_build_file = gyp.common.BuildFile(target) | 1406 target_build_file = gyp.common.BuildFile(target) |
| 1416 for dependency_key in dependency_sections: | 1407 for dependency_key in dependency_sections: |
| 1417 dependencies = target_dict.get(dependency_key, []) | 1408 dependencies = target_dict.get(dependency_key, []) |
| 1418 | 1409 |
| 1419 # Loop this way instead of "for dependency in" or "for index in xrange" | 1410 # Loop this way instead of "for dependency in" or "for index in xrange" |
| 1420 # because the dependencies list will be modified within the loop body. | 1411 # because the dependencies list will be modified within the loop body. |
| 1421 index = 0 | 1412 index = 0 |
| 1422 while index < len(dependencies): | 1413 while index < len(dependencies): |
| 1423 (dependency_build_file, dependency_target, dependency_toolset) = \ | 1414 (dependency_build_file, dependency_target, dependency_toolset) = \ |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1465 | 1456 |
| 1466 def Unify(l): | 1457 def Unify(l): |
| 1467 """Removes duplicate elements from l, keeping the first element.""" | 1458 """Removes duplicate elements from l, keeping the first element.""" |
| 1468 seen = {} | 1459 seen = {} |
| 1469 return [seen.setdefault(e, e) for e in l if e not in seen] | 1460 return [seen.setdefault(e, e) for e in l if e not in seen] |
| 1470 | 1461 |
| 1471 | 1462 |
| 1472 def RemoveDuplicateDependencies(targets): | 1463 def RemoveDuplicateDependencies(targets): |
| 1473 """Makes sure every dependency appears only once in all targets's dependency | 1464 """Makes sure every dependency appears only once in all targets's dependency |
| 1474 lists.""" | 1465 lists.""" |
| 1475 for target_name, target_dict in targets.iteritems(): | 1466 for target_name, target_dict in targets.items(): |
| 1476 for dependency_key in dependency_sections: | 1467 for dependency_key in dependency_sections: |
| 1477 dependencies = target_dict.get(dependency_key, []) | 1468 dependencies = target_dict.get(dependency_key, []) |
| 1478 if dependencies: | 1469 if dependencies: |
| 1479 target_dict[dependency_key] = Unify(dependencies) | 1470 target_dict[dependency_key] = Unify(dependencies) |
| 1480 | 1471 |
| 1481 | 1472 |
| 1482 def Filter(l, item): | 1473 def Filter(l, item): |
| 1483 """Removes item from l.""" | 1474 """Removes item from l.""" |
| 1484 res = {} | 1475 res = {} |
| 1485 return [res.setdefault(e, e) for e in l if e != item] | 1476 return [res.setdefault(e, e) for e in l if e != item] |
| 1486 | 1477 |
| 1487 | 1478 |
| 1488 def RemoveSelfDependencies(targets): | 1479 def RemoveSelfDependencies(targets): |
| 1489 """Remove self dependencies from targets that have the prune_self_dependency | 1480 """Remove self dependencies from targets that have the prune_self_dependency |
| 1490 variable set.""" | 1481 variable set.""" |
| 1491 for target_name, target_dict in targets.iteritems(): | 1482 for target_name, target_dict in targets.items(): |
| 1492 for dependency_key in dependency_sections: | 1483 for dependency_key in dependency_sections: |
| 1493 dependencies = target_dict.get(dependency_key, []) | 1484 dependencies = target_dict.get(dependency_key, []) |
| 1494 if dependencies: | 1485 if dependencies: |
| 1495 for t in dependencies: | 1486 for t in dependencies: |
| 1496 if t == target_name: | 1487 if t == target_name: |
| 1497 if targets[t].get('variables', {}).get('prune_self_dependency', 0): | 1488 if targets[t].get('variables', {}).get('prune_self_dependency', 0): |
| 1498 target_dict[dependency_key] = Filter(dependencies, target_name) | 1489 target_dict[dependency_key] = Filter(dependencies, target_name) |
| 1499 | 1490 |
| 1500 | 1491 |
| 1501 def RemoveLinkDependenciesFromNoneTargets(targets): | 1492 def RemoveLinkDependenciesFromNoneTargets(targets): |
| 1502 """Remove dependencies having the 'link_dependency' attribute from the 'none' | 1493 """Remove dependencies having the 'link_dependency' attribute from the 'none' |
| 1503 targets.""" | 1494 targets.""" |
| 1504 for target_name, target_dict in targets.iteritems(): | 1495 for target_name, target_dict in targets.items(): |
| 1505 for dependency_key in dependency_sections: | 1496 for dependency_key in dependency_sections: |
| 1506 dependencies = target_dict.get(dependency_key, []) | 1497 dependencies = target_dict.get(dependency_key, []) |
| 1507 if dependencies: | 1498 if dependencies: |
| 1508 for t in dependencies: | 1499 for t in dependencies: |
| 1509 if target_dict.get('type', None) == 'none': | 1500 if target_dict.get('type', None) == 'none': |
| 1510 if targets[t].get('variables', {}).get('link_dependency', 0): | 1501 if targets[t].get('variables', {}).get('link_dependency', 0): |
| 1511 target_dict[dependency_key] = \ | 1502 target_dict[dependency_key] = \ |
| 1512 Filter(target_dict[dependency_key], t) | 1503 Filter(target_dict[dependency_key], t) |
| 1513 | 1504 |
| 1514 | 1505 |
| (...skipping 270 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1785 """ | 1776 """ |
| 1786 Returns a list of dependency targets that are linked into this target. | 1777 Returns a list of dependency targets that are linked into this target. |
| 1787 """ | 1778 """ |
| 1788 return self._LinkDependenciesInternal(targets, True) | 1779 return self._LinkDependenciesInternal(targets, True) |
| 1789 | 1780 |
| 1790 | 1781 |
| 1791 def BuildDependencyList(targets): | 1782 def BuildDependencyList(targets): |
| 1792 # Create a DependencyGraphNode for each target. Put it into a dict for easy | 1783 # Create a DependencyGraphNode for each target. Put it into a dict for easy |
| 1793 # access. | 1784 # access. |
| 1794 dependency_nodes = {} | 1785 dependency_nodes = {} |
| 1795 for target, spec in targets.iteritems(): | 1786 for target, spec in targets.items(): |
| 1796 if target not in dependency_nodes: | 1787 if target not in dependency_nodes: |
| 1797 dependency_nodes[target] = DependencyGraphNode(target) | 1788 dependency_nodes[target] = DependencyGraphNode(target) |
| 1798 | 1789 |
| 1799 # Set up the dependency links. Targets that have no dependencies are treated | 1790 # Set up the dependency links. Targets that have no dependencies are treated |
| 1800 # as dependent on root_node. | 1791 # as dependent on root_node. |
| 1801 root_node = DependencyGraphNode(None) | 1792 root_node = DependencyGraphNode(None) |
| 1802 for target, spec in targets.iteritems(): | 1793 for target, spec in targets.items(): |
| 1803 target_node = dependency_nodes[target] | 1794 target_node = dependency_nodes[target] |
| 1804 target_build_file = gyp.common.BuildFile(target) | 1795 target_build_file = gyp.common.BuildFile(target) |
| 1805 dependencies = spec.get('dependencies') | 1796 dependencies = spec.get('dependencies') |
| 1806 if not dependencies: | 1797 if not dependencies: |
| 1807 target_node.dependencies = [root_node] | 1798 target_node.dependencies = [root_node] |
| 1808 root_node.dependents.append(target_node) | 1799 root_node.dependents.append(target_node) |
| 1809 else: | 1800 else: |
| 1810 for dependency in dependencies: | 1801 for dependency in dependencies: |
| 1811 dependency_node = dependency_nodes.get(dependency) | 1802 dependency_node = dependency_nodes.get(dependency) |
| 1812 if not dependency_node: | 1803 if not dependency_node: |
| 1813 raise GypError("Dependency '%s' not found while " | 1804 raise GypError("Dependency '%s' not found while " |
| 1814 "trying to load target %s" % (dependency, target)) | 1805 "trying to load target %s" % (dependency, target)) |
| 1815 target_node.dependencies.append(dependency_node) | 1806 target_node.dependencies.append(dependency_node) |
| 1816 dependency_node.dependents.append(target_node) | 1807 dependency_node.dependents.append(target_node) |
| 1817 | 1808 |
| 1818 flat_list = root_node.FlattenToList() | 1809 flat_list = root_node.FlattenToList() |
| 1819 | 1810 |
| 1820 # If there's anything left unvisited, there must be a circular dependency | 1811 # If there's anything left unvisited, there must be a circular dependency |
| 1821 # (cycle). | 1812 # (cycle). |
| 1822 if len(flat_list) != len(targets): | 1813 if len(flat_list) != len(targets): |
| 1823 if not root_node.dependents: | 1814 if not root_node.dependents: |
| 1824 # If all targets have dependencies, add the first target as a dependent | 1815 # If all targets have dependencies, add the first target as a dependent |
| 1825 # of root_node so that the cycle can be discovered from root_node. | 1816 # of root_node so that the cycle can be discovered from root_node. |
| 1826 target = targets.keys()[0] | 1817 target = next(iter(targets)) |
| 1827 target_node = dependency_nodes[target] | 1818 target_node = dependency_nodes[target] |
| 1828 target_node.dependencies.append(root_node) | 1819 target_node.dependencies.append(root_node) |
| 1829 root_node.dependents.append(target_node) | 1820 root_node.dependents.append(target_node) |
| 1830 | 1821 |
| 1831 cycles = [] | 1822 cycles = [] |
| 1832 for cycle in root_node.FindCycles(): | 1823 for cycle in root_node.FindCycles(): |
| 1833 paths = [node.ref for node in cycle] | 1824 paths = [node.ref for node in cycle] |
| 1834 cycles.append('Cycle: %s' % ' -> '.join(paths)) | 1825 cycles.append('Cycle: %s' % ' -> '.join(paths)) |
| 1835 raise DependencyGraphNode.CircularException( | 1826 raise DependencyGraphNode.CircularException( |
| 1836 'Cycles in dependency graph detected:\n' + '\n'.join(cycles)) | 1827 'Cycles in dependency graph detected:\n' + '\n'.join(cycles)) |
| 1837 | 1828 |
| 1838 return [dependency_nodes, flat_list] | 1829 return [dependency_nodes, flat_list] |
| 1839 | 1830 |
| 1840 | 1831 |
| 1841 def VerifyNoGYPFileCircularDependencies(targets): | 1832 def VerifyNoGYPFileCircularDependencies(targets): |
| 1842 # Create a DependencyGraphNode for each gyp file containing a target. Put | 1833 # Create a DependencyGraphNode for each gyp file containing a target. Put |
| 1843 # it into a dict for easy access. | 1834 # it into a dict for easy access. |
| 1844 dependency_nodes = {} | 1835 dependency_nodes = {} |
| 1845 for target in targets.iterkeys(): | 1836 for target in targets.keys(): |
| 1846 build_file = gyp.common.BuildFile(target) | 1837 build_file = gyp.common.BuildFile(target) |
| 1847 if not build_file in dependency_nodes: | 1838 if not build_file in dependency_nodes: |
| 1848 dependency_nodes[build_file] = DependencyGraphNode(build_file) | 1839 dependency_nodes[build_file] = DependencyGraphNode(build_file) |
| 1849 | 1840 |
| 1850 # Set up the dependency links. | 1841 # Set up the dependency links. |
| 1851 for target, spec in targets.iteritems(): | 1842 for target, spec in targets.items(): |
| 1852 build_file = gyp.common.BuildFile(target) | 1843 build_file = gyp.common.BuildFile(target) |
| 1853 build_file_node = dependency_nodes[build_file] | 1844 build_file_node = dependency_nodes[build_file] |
| 1854 target_dependencies = spec.get('dependencies', []) | 1845 target_dependencies = spec.get('dependencies', []) |
| 1855 for dependency in target_dependencies: | 1846 for dependency in target_dependencies: |
| 1856 try: | 1847 try: |
| 1857 dependency_build_file = gyp.common.BuildFile(dependency) | 1848 dependency_build_file = gyp.common.BuildFile(dependency) |
| 1858 except GypError, e: | 1849 except GypError as e: |
| 1859 gyp.common.ExceptionAppend( | 1850 gyp.common.ExceptionAppend( |
| 1860 e, 'while computing dependencies of .gyp file %s' % build_file) | 1851 e, 'while computing dependencies of .gyp file %s' % build_file) |
| 1861 raise | 1852 raise |
| 1862 | 1853 |
| 1863 if dependency_build_file == build_file: | 1854 if dependency_build_file == build_file: |
| 1864 # A .gyp file is allowed to refer back to itself. | 1855 # A .gyp file is allowed to refer back to itself. |
| 1865 continue | 1856 continue |
| 1866 dependency_node = dependency_nodes.get(dependency_build_file) | 1857 dependency_node = dependency_nodes.get(dependency_build_file) |
| 1867 if not dependency_node: | 1858 if not dependency_node: |
| 1868 raise GypError("Dependancy '%s' not found" % dependency_build_file) | 1859 raise GypError("Dependancy '%s' not found" % dependency_build_file) |
| 1869 if dependency_node not in build_file_node.dependencies: | 1860 if dependency_node not in build_file_node.dependencies: |
| 1870 build_file_node.dependencies.append(dependency_node) | 1861 build_file_node.dependencies.append(dependency_node) |
| 1871 dependency_node.dependents.append(build_file_node) | 1862 dependency_node.dependents.append(build_file_node) |
| 1872 | 1863 |
| 1873 | 1864 |
| 1874 # Files that have no dependencies are treated as dependent on root_node. | 1865 # Files that have no dependencies are treated as dependent on root_node. |
| 1875 root_node = DependencyGraphNode(None) | 1866 root_node = DependencyGraphNode(None) |
| 1876 for build_file_node in dependency_nodes.itervalues(): | 1867 for build_file_node in dependency_nodes.values(): |
| 1877 if len(build_file_node.dependencies) == 0: | 1868 if len(build_file_node.dependencies) == 0: |
| 1878 build_file_node.dependencies.append(root_node) | 1869 build_file_node.dependencies.append(root_node) |
| 1879 root_node.dependents.append(build_file_node) | 1870 root_node.dependents.append(build_file_node) |
| 1880 | 1871 |
| 1881 flat_list = root_node.FlattenToList() | 1872 flat_list = root_node.FlattenToList() |
| 1882 | 1873 |
| 1883 # If there's anything left unvisited, there must be a circular dependency | 1874 # If there's anything left unvisited, there must be a circular dependency |
| 1884 # (cycle). | 1875 # (cycle). |
| 1885 if len(flat_list) != len(dependency_nodes): | 1876 if len(flat_list) != len(dependency_nodes): |
| 1886 if not root_node.dependents: | 1877 if not root_node.dependents: |
| 1887 # If all files have dependencies, add the first file as a dependent | 1878 # If all files have dependencies, add the first file as a dependent |
| 1888 # of root_node so that the cycle can be discovered from root_node. | 1879 # of root_node so that the cycle can be discovered from root_node. |
| 1889 file_node = dependency_nodes.values()[0] | 1880 file_node = next(iter(dependency_nodes.values())) |
| 1890 file_node.dependencies.append(root_node) | 1881 file_node.dependencies.append(root_node) |
| 1891 root_node.dependents.append(file_node) | 1882 root_node.dependents.append(file_node) |
| 1892 cycles = [] | 1883 cycles = [] |
| 1893 for cycle in root_node.FindCycles(): | 1884 for cycle in root_node.FindCycles(): |
| 1894 paths = [node.ref for node in cycle] | 1885 paths = [node.ref for node in cycle] |
| 1895 cycles.append('Cycle: %s' % ' -> '.join(paths)) | 1886 cycles.append('Cycle: %s' % ' -> '.join(paths)) |
| 1896 raise DependencyGraphNode.CircularException( | 1887 raise DependencyGraphNode.CircularException( |
| 1897 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)) | 1888 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)) |
| 1898 | 1889 |
| 1899 | 1890 |
| (...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2106 # items to the list in reverse order, which would be an unwelcome | 2097 # items to the list in reverse order, which would be an unwelcome |
| 2107 # surprise. | 2098 # surprise. |
| 2108 to.insert(prepend_index, to_item) | 2099 to.insert(prepend_index, to_item) |
| 2109 if is_hashable(to_item): | 2100 if is_hashable(to_item): |
| 2110 hashable_to_set.add(to_item) | 2101 hashable_to_set.add(to_item) |
| 2111 prepend_index = prepend_index + 1 | 2102 prepend_index = prepend_index + 1 |
| 2112 | 2103 |
| 2113 | 2104 |
| 2114 def MergeDicts(to, fro, to_file, fro_file): | 2105 def MergeDicts(to, fro, to_file, fro_file): |
| 2115 # I wanted to name the parameter "from" but it's a Python keyword... | 2106 # I wanted to name the parameter "from" but it's a Python keyword... |
| 2116 for k, v in fro.iteritems(): | 2107 for k, v in fro.items(): |
| 2117 # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give | 2108 # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give |
| 2118 # copy semantics. Something else may want to merge from the |fro| dict | 2109 # copy semantics. Something else may want to merge from the |fro| dict |
| 2119 # later, and having the same dict ref pointed to twice in the tree isn't | 2110 # later, and having the same dict ref pointed to twice in the tree isn't |
| 2120 # what anyone wants considering that the dicts may subsequently be | 2111 # what anyone wants considering that the dicts may subsequently be |
| 2121 # modified. | 2112 # modified. |
| 2122 if k in to: | 2113 if k in to: |
| 2123 bad_merge = False | 2114 bad_merge = False |
| 2124 if type(v) in (str, int): | 2115 if type(v) in (str, int): |
| 2125 if type(to[k]) not in (str, int): | 2116 if type(to[k]) not in (str, int): |
| 2126 bad_merge = True | 2117 bad_merge = True |
| (...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2241 key_suffixes = ['=', '+', '?', '!', '/'] | 2232 key_suffixes = ['=', '+', '?', '!', '/'] |
| 2242 | 2233 |
| 2243 build_file = gyp.common.BuildFile(target) | 2234 build_file = gyp.common.BuildFile(target) |
| 2244 | 2235 |
| 2245 # Provide a single configuration by default if none exists. | 2236 # Provide a single configuration by default if none exists. |
| 2246 # TODO(mark): Signal an error if default_configurations exists but | 2237 # TODO(mark): Signal an error if default_configurations exists but |
| 2247 # configurations does not. | 2238 # configurations does not. |
| 2248 if not 'configurations' in target_dict: | 2239 if not 'configurations' in target_dict: |
| 2249 target_dict['configurations'] = {'Default': {}} | 2240 target_dict['configurations'] = {'Default': {}} |
| 2250 if not 'default_configuration' in target_dict: | 2241 if not 'default_configuration' in target_dict: |
| 2251 concrete = [i for (i, config) in target_dict['configurations'].iteritems() | 2242 concrete = [i for (i, config) in target_dict['configurations'].items() |
| 2252 if not config.get('abstract')] | 2243 if not config.get('abstract')] |
| 2253 target_dict['default_configuration'] = sorted(concrete)[0] | 2244 target_dict['default_configuration'] = sorted(concrete)[0] |
| 2254 | 2245 |
| 2255 merged_configurations = {} | 2246 merged_configurations = {} |
| 2256 configs = target_dict['configurations'] | 2247 configs = target_dict['configurations'] |
| 2257 for (configuration, old_configuration_dict) in configs.iteritems(): | 2248 for (configuration, old_configuration_dict) in configs.items(): |
| 2258 # Skip abstract configurations (saves work only). | 2249 # Skip abstract configurations (saves work only). |
| 2259 if old_configuration_dict.get('abstract'): | 2250 if old_configuration_dict.get('abstract'): |
| 2260 continue | 2251 continue |
| 2261 # Configurations inherit (most) settings from the enclosing target scope. | 2252 # Configurations inherit (most) settings from the enclosing target scope. |
| 2262 # Get the inheritance relationship right by making a copy of the target | 2253 # Get the inheritance relationship right by making a copy of the target |
| 2263 # dict. | 2254 # dict. |
| 2264 new_configuration_dict = {} | 2255 new_configuration_dict = {} |
| 2265 for (key, target_val) in target_dict.iteritems(): | 2256 for (key, target_val) in target_dict.items(): |
| 2266 key_ext = key[-1:] | 2257 key_ext = key[-1:] |
| 2267 if key_ext in key_suffixes: | 2258 if key_ext in key_suffixes: |
| 2268 key_base = key[:-1] | 2259 key_base = key[:-1] |
| 2269 else: | 2260 else: |
| 2270 key_base = key | 2261 key_base = key |
| 2271 if not key_base in non_configuration_keys: | 2262 if not key_base in non_configuration_keys: |
| 2272 new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val) | 2263 new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val) |
| 2273 | 2264 |
| 2274 # Merge in configuration (with all its parents first). | 2265 # Merge in configuration (with all its parents first). |
| 2275 MergeConfigWithInheritance(new_configuration_dict, build_file, | 2266 MergeConfigWithInheritance(new_configuration_dict, build_file, |
| 2276 target_dict, configuration, []) | 2267 target_dict, configuration, []) |
| 2277 | 2268 |
| 2278 merged_configurations[configuration] = new_configuration_dict | 2269 merged_configurations[configuration] = new_configuration_dict |
| 2279 | 2270 |
| 2280 # Put the new configurations back into the target dict as a configuration. | 2271 # Put the new configurations back into the target dict as a configuration. |
| 2281 for configuration in merged_configurations.keys(): | 2272 for configuration in merged_configurations.keys(): |
| 2282 target_dict['configurations'][configuration] = ( | 2273 target_dict['configurations'][configuration] = ( |
| 2283 merged_configurations[configuration]) | 2274 merged_configurations[configuration]) |
| 2284 | 2275 |
| 2285 # Now drop all the abstract ones. | 2276 # Now drop all the abstract ones. |
| 2286 for configuration in target_dict['configurations'].keys(): | 2277 configs = target_dict['configurations'] |
| 2287 old_configuration_dict = target_dict['configurations'][configuration] | 2278 target_dict['configurations'] = \ |
| 2288 if old_configuration_dict.get('abstract'): | 2279 {k: v for k, v in configs.items() if not v.get('abstract')} |
| 2289 del target_dict['configurations'][configuration] | |
| 2290 | 2280 |
| 2291 # Now that all of the target's configurations have been built, go through | 2281 # Now that all of the target's configurations have been built, go through |
| 2292 # the target dict's keys and remove everything that's been moved into a | 2282 # the target dict's keys and remove everything that's been moved into a |
| 2293 # "configurations" section. | 2283 # "configurations" section. |
| 2294 delete_keys = [] | 2284 delete_keys = [] |
| 2295 for key in target_dict: | 2285 for key in target_dict: |
| 2296 key_ext = key[-1:] | 2286 key_ext = key[-1:] |
| 2297 if key_ext in key_suffixes: | 2287 if key_ext in key_suffixes: |
| 2298 key_base = key[:-1] | 2288 key_base = key[:-1] |
| 2299 else: | 2289 else: |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2339 # Look through the dictionary for any lists whose keys end in "!" or "/". | 2329 # Look through the dictionary for any lists whose keys end in "!" or "/". |
| 2340 # These are lists that will be treated as exclude lists and regular | 2330 # These are lists that will be treated as exclude lists and regular |
| 2341 # expression-based exclude/include lists. Collect the lists that are | 2331 # expression-based exclude/include lists. Collect the lists that are |
| 2342 # needed first, looking for the lists that they operate on, and assemble | 2332 # needed first, looking for the lists that they operate on, and assemble |
| 2343 # then into |lists|. This is done in a separate loop up front, because | 2333 # then into |lists|. This is done in a separate loop up front, because |
| 2344 # the _included and _excluded keys need to be added to the_dict, and that | 2334 # the _included and _excluded keys need to be added to the_dict, and that |
| 2345 # can't be done while iterating through it. | 2335 # can't be done while iterating through it. |
| 2346 | 2336 |
| 2347 lists = [] | 2337 lists = [] |
| 2348 del_lists = [] | 2338 del_lists = [] |
| 2349 for key, value in the_dict.iteritems(): | 2339 for key, value in the_dict.items(): |
| 2350 operation = key[-1] | 2340 operation = key[-1] |
| 2351 if operation != '!' and operation != '/': | 2341 if operation != '!' and operation != '/': |
| 2352 continue | 2342 continue |
| 2353 | 2343 |
| 2354 if type(value) is not list: | 2344 if type(value) is not list: |
| 2355 raise ValueError(name + ' key ' + key + ' must be list, not ' + \ | 2345 raise ValueError(name + ' key ' + key + ' must be list, not ' + \ |
| 2356 value.__class__.__name__) | 2346 value.__class__.__name__) |
| 2357 | 2347 |
| 2358 list_key = key[:-1] | 2348 list_key = key[:-1] |
| 2359 if list_key not in the_dict: | 2349 if list_key not in the_dict: |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 2387 # no exclusion or inclusion has been applied (yet) have value -1, items | 2377 # no exclusion or inclusion has been applied (yet) have value -1, items |
| 2388 # excluded have value 0, and items included have value 1. Includes and | 2378 # excluded have value 0, and items included have value 1. Includes and |
| 2389 # excludes override previous actions. All items in list_actions are | 2379 # excludes override previous actions. All items in list_actions are |
| 2390 # initialized to -1 because no excludes or includes have been processed | 2380 # initialized to -1 because no excludes or includes have been processed |
| 2391 # yet. | 2381 # yet. |
| 2392 list_actions = list((-1,) * len(the_list)) | 2382 list_actions = list((-1,) * len(the_list)) |
| 2393 | 2383 |
| 2394 exclude_key = list_key + '!' | 2384 exclude_key = list_key + '!' |
| 2395 if exclude_key in the_dict: | 2385 if exclude_key in the_dict: |
| 2396 for exclude_item in the_dict[exclude_key]: | 2386 for exclude_item in the_dict[exclude_key]: |
| 2397 for index in xrange(0, len(the_list)): | 2387 for index, list_item in enumerate(the_list): |
| 2398 if exclude_item == the_list[index]: | 2388 if exclude_item == list_item: |
| 2399 # This item matches the exclude_item, so set its action to 0 | 2389 # This item matches the exclude_item, so set its action to 0 |
| 2400 # (exclude). | 2390 # (exclude). |
| 2401 list_actions[index] = 0 | 2391 list_actions[index] = 0 |
| 2402 | 2392 |
| 2403 # The "whatever!" list is no longer needed, dump it. | 2393 # The "whatever!" list is no longer needed, dump it. |
| 2404 del the_dict[exclude_key] | 2394 del the_dict[exclude_key] |
| 2405 | 2395 |
| 2406 regex_key = list_key + '/' | 2396 regex_key = list_key + '/' |
| 2407 if regex_key in the_dict: | 2397 if regex_key in the_dict: |
| 2408 for regex_item in the_dict[regex_key]: | 2398 for regex_item in the_dict[regex_key]: |
| 2409 [action, pattern] = regex_item | 2399 [action, pattern] = regex_item |
| 2410 pattern_re = re.compile(pattern) | 2400 pattern_re = re.compile(pattern) |
| 2411 | 2401 |
| 2412 if action == 'exclude': | 2402 if action == 'exclude': |
| 2413 # This item matches an exclude regex, so set its value to 0 (exclude). | 2403 # This item matches an exclude regex, so set its value to 0 (exclude). |
| 2414 action_value = 0 | 2404 action_value = 0 |
| 2415 elif action == 'include': | 2405 elif action == 'include': |
| 2416 # This item matches an include regex, so set its value to 1 (include). | 2406 # This item matches an include regex, so set its value to 1 (include). |
| 2417 action_value = 1 | 2407 action_value = 1 |
| 2418 else: | 2408 else: |
| 2419 # This is an action that doesn't make any sense. | 2409 # This is an action that doesn't make any sense. |
| 2420 raise ValueError('Unrecognized action ' + action + ' in ' + name + \ | 2410 raise ValueError('Unrecognized action ' + action + ' in ' + name + \ |
| 2421 ' key ' + regex_key) | 2411 ' key ' + regex_key) |
| 2422 | 2412 |
| 2423 for index in xrange(0, len(the_list)): | 2413 for index, list_item in enumerate(the_list): |
| 2424 list_item = the_list[index] | |
| 2425 if list_actions[index] == action_value: | 2414 if list_actions[index] == action_value: |
| 2426 # Even if the regex matches, nothing will change so continue (regex | 2415 # Even if the regex matches, nothing will change so continue (regex |
| 2427 # searches are expensive). | 2416 # searches are expensive). |
| 2428 continue | 2417 continue |
| 2429 if pattern_re.search(list_item): | 2418 if pattern_re.search(list_item): |
| 2430 # Regular expression match. | 2419 # Regular expression match. |
| 2431 list_actions[index] = action_value | 2420 list_actions[index] = action_value |
| 2432 | 2421 |
| 2433 # The "whatever/" list is no longer needed, dump it. | 2422 # The "whatever/" list is no longer needed, dump it. |
| 2434 del the_dict[regex_key] | 2423 del the_dict[regex_key] |
| 2435 | 2424 |
| 2436 # Add excluded items to the excluded list. | 2425 # Add excluded items to the excluded list. |
| 2437 # | 2426 # |
| 2438 # Note that exclude_key ("sources!") is different from excluded_key | 2427 # Note that exclude_key ("sources!") is different from excluded_key |
| 2439 # ("sources_excluded"). The exclude_key list is input and it was already | 2428 # ("sources_excluded"). The exclude_key list is input and it was already |
| 2440 # processed and deleted; the excluded_key list is output and it's about | 2429 # processed and deleted; the excluded_key list is output and it's about |
| 2441 # to be created. | 2430 # to be created. |
| 2442 excluded_key = list_key + '_excluded' | 2431 excluded_key = list_key + '_excluded' |
| 2443 if excluded_key in the_dict: | 2432 if excluded_key in the_dict: |
| 2444 raise GypError(name + ' key ' + excluded_key + | 2433 raise GypError(name + ' key ' + excluded_key + |
| 2445 ' must not be present prior ' | 2434 ' must not be present prior ' |
| 2446 ' to applying exclusion/regex filters for ' + list_key) | 2435 ' to applying exclusion/regex filters for ' + list_key) |
| 2447 | 2436 |
| 2448 excluded_list = [] | 2437 excluded_list = [] |
| 2449 | 2438 |
| 2450 # Go backwards through the list_actions list so that as items are deleted, | 2439 # Go backwards through the list_actions list so that as items are deleted, |
| 2451 # the indices of items that haven't been seen yet don't shift. That means | 2440 # the indices of items that haven't been seen yet don't shift. That means |
| 2452 # that things need to be prepended to excluded_list to maintain them in the | 2441 # that things need to be prepended to excluded_list to maintain them in the |
| 2453 # same order that they existed in the_list. | 2442 # same order that they existed in the_list. |
| 2454 for index in xrange(len(list_actions) - 1, -1, -1): | 2443 for index in range(len(list_actions) - 1, -1, -1): |
| 2455 if list_actions[index] == 0: | 2444 if list_actions[index] == 0: |
| 2456 # Dump anything with action 0 (exclude). Keep anything with action 1 | 2445 # Dump anything with action 0 (exclude). Keep anything with action 1 |
| 2457 # (include) or -1 (no include or exclude seen for the item). | 2446 # (include) or -1 (no include or exclude seen for the item). |
| 2458 excluded_list.insert(0, the_list[index]) | 2447 excluded_list.insert(0, the_list[index]) |
| 2459 del the_list[index] | 2448 del the_list[index] |
| 2460 | 2449 |
| 2461 # If anything was excluded, put the excluded list into the_dict at | 2450 # If anything was excluded, put the excluded list into the_dict at |
| 2462 # excluded_key. | 2451 # excluded_key. |
| 2463 if len(excluded_list) > 0: | 2452 if len(excluded_list) > 0: |
| 2464 the_dict[excluded_key] = excluded_list | 2453 the_dict[excluded_key] = excluded_list |
| 2465 | 2454 |
| 2466 # Now recurse into subdicts and lists that may contain dicts. | 2455 # Now recurse into subdicts and lists that may contain dicts. |
| 2467 for key, value in the_dict.iteritems(): | 2456 for key, value in the_dict.items(): |
| 2468 if type(value) is dict: | 2457 if type(value) is dict: |
| 2469 ProcessListFiltersInDict(key, value) | 2458 ProcessListFiltersInDict(key, value) |
| 2470 elif type(value) is list: | 2459 elif type(value) is list: |
| 2471 ProcessListFiltersInList(key, value) | 2460 ProcessListFiltersInList(key, value) |
| 2472 | 2461 |
| 2473 | 2462 |
| 2474 def ProcessListFiltersInList(name, the_list): | 2463 def ProcessListFiltersInList(name, the_list): |
| 2475 for item in the_list: | 2464 for item in the_list: |
| 2476 if type(item) is dict: | 2465 if type(item) is dict: |
| 2477 ProcessListFiltersInDict(name, item) | 2466 ProcessListFiltersInDict(name, item) |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2514 for source in sources: | 2503 for source in sources: |
| 2515 name, ext = os.path.splitext(source) | 2504 name, ext = os.path.splitext(source) |
| 2516 is_compiled_file = ext in [ | 2505 is_compiled_file = ext in [ |
| 2517 '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] | 2506 '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] |
| 2518 if not is_compiled_file: | 2507 if not is_compiled_file: |
| 2519 continue | 2508 continue |
| 2520 basename = os.path.basename(name) # Don't include extension. | 2509 basename = os.path.basename(name) # Don't include extension. |
| 2521 basenames.setdefault(basename, []).append(source) | 2510 basenames.setdefault(basename, []).append(source) |
| 2522 | 2511 |
| 2523 error = '' | 2512 error = '' |
| 2524 for basename, files in basenames.iteritems(): | 2513 for basename, files in basenames.items(): |
| 2525 if len(files) > 1: | 2514 if len(files) > 1: |
| 2526 error += ' %s: %s\n' % (basename, ' '.join(files)) | 2515 error += ' %s: %s\n' % (basename, ' '.join(files)) |
| 2527 | 2516 |
| 2528 if error: | 2517 if error: |
| 2529 print('static library %s has several files with the same basename:\n' % | 2518 print('static library %s has several files with the same basename:\n' % |
| 2530 target + error + 'libtool on Mac cannot handle that. Use ' | 2519 target + error + 'libtool on Mac cannot handle that. Use ' |
| 2531 '--no-duplicate-basename-check to disable this validation.') | 2520 '--no-duplicate-basename-check to disable this validation.') |
| 2532 raise GypError('Duplicate basenames in sources section, see list above') | 2521 raise GypError('Duplicate basenames in sources section, see list above') |
| 2533 | 2522 |
| 2534 | 2523 |
| (...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2653 TurnIntIntoStrInList(v) | 2642 TurnIntIntoStrInList(v) |
| 2654 | 2643 |
| 2655 if type(k) is int: | 2644 if type(k) is int: |
| 2656 del the_dict[k] | 2645 del the_dict[k] |
| 2657 the_dict[str(k)] = v | 2646 the_dict[str(k)] = v |
| 2658 | 2647 |
| 2659 | 2648 |
| 2660 def TurnIntIntoStrInList(the_list): | 2649 def TurnIntIntoStrInList(the_list): |
| 2661 """Given list the_list, recursively converts all integers into strings. | 2650 """Given list the_list, recursively converts all integers into strings. |
| 2662 """ | 2651 """ |
| 2663 for index in xrange(0, len(the_list)): | 2652 for index, item in enumerate(the_list): |
| 2664 item = the_list[index] | |
| 2665 if type(item) is int: | 2653 if type(item) is int: |
| 2666 the_list[index] = str(item) | 2654 the_list[index] = str(item) |
| 2667 elif type(item) is dict: | 2655 elif type(item) is dict: |
| 2668 TurnIntIntoStrInDict(item) | 2656 TurnIntIntoStrInDict(item) |
| 2669 elif type(item) is list: | 2657 elif type(item) is list: |
| 2670 TurnIntIntoStrInList(item) | 2658 TurnIntIntoStrInList(item) |
| 2671 | 2659 |
| 2672 | 2660 |
| 2673 def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, | 2661 def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, |
| 2674 data): | 2662 data): |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2771 build_files = set(map(os.path.normpath, build_files)) | 2759 build_files = set(map(os.path.normpath, build_files)) |
| 2772 if parallel: | 2760 if parallel: |
| 2773 LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, | 2761 LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, |
| 2774 check, generator_input_info) | 2762 check, generator_input_info) |
| 2775 else: | 2763 else: |
| 2776 aux_data = {} | 2764 aux_data = {} |
| 2777 for build_file in build_files: | 2765 for build_file in build_files: |
| 2778 try: | 2766 try: |
| 2779 LoadTargetBuildFile(build_file, data, aux_data, | 2767 LoadTargetBuildFile(build_file, data, aux_data, |
| 2780 variables, includes, depth, check, True) | 2768 variables, includes, depth, check, True) |
| 2781 except Exception, e: | 2769 except Exception as e: |
| 2782 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) | 2770 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) |
| 2783 raise | 2771 raise |
| 2784 | 2772 |
| 2785 # Build a dict to access each target's subdict by qualified name. | 2773 # Build a dict to access each target's subdict by qualified name. |
| 2786 targets = BuildTargetsDict(data) | 2774 targets = BuildTargetsDict(data) |
| 2787 | 2775 |
| 2788 # Fully qualify all dependency links. | 2776 # Fully qualify all dependency links. |
| 2789 QualifyDependencies(targets) | 2777 QualifyDependencies(targets) |
| 2790 | 2778 |
| 2791 # Remove self-dependencies from targets that have 'prune_self_dependencies' | 2779 # Remove self-dependencies from targets that have 'prune_self_dependencies' |
| 2792 # set to 1. | 2780 # set to 1. |
| 2793 RemoveSelfDependencies(targets) | 2781 RemoveSelfDependencies(targets) |
| 2794 | 2782 |
| 2795 # Expand dependencies specified as build_file:*. | 2783 # Expand dependencies specified as build_file:*. |
| 2796 ExpandWildcardDependencies(targets, data) | 2784 ExpandWildcardDependencies(targets, data) |
| 2797 | 2785 |
| 2798 # Remove all dependencies marked as 'link_dependency' from the targets of | 2786 # Remove all dependencies marked as 'link_dependency' from the targets of |
| 2799 # type 'none'. | 2787 # type 'none'. |
| 2800 RemoveLinkDependenciesFromNoneTargets(targets) | 2788 RemoveLinkDependenciesFromNoneTargets(targets) |
| 2801 | 2789 |
| 2802 # Apply exclude (!) and regex (/) list filters only for dependency_sections. | 2790 # Apply exclude (!) and regex (/) list filters only for dependency_sections. |
| 2803 for target_name, target_dict in targets.iteritems(): | 2791 for target_name, target_dict in targets.items(): |
| 2804 tmp_dict = {} | 2792 tmp_dict = {} |
| 2805 for key_base in dependency_sections: | 2793 for key_base in dependency_sections: |
| 2806 for op in ('', '!', '/'): | 2794 for op in ('', '!', '/'): |
| 2807 key = key_base + op | 2795 key = key_base + op |
| 2808 if key in target_dict: | 2796 if key in target_dict: |
| 2809 tmp_dict[key] = target_dict[key] | 2797 tmp_dict[key] = target_dict[key] |
| 2810 del target_dict[key] | 2798 del target_dict[key] |
| 2811 ProcessListFiltersInDict(target_name, tmp_dict) | 2799 ProcessListFiltersInDict(target_name, tmp_dict) |
| 2812 # Write the results back to |target_dict|. | 2800 # Write the results back to |target_dict|. |
| 2813 for key in tmp_dict: | 2801 for key in tmp_dict: |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2890 ValidateRunAsInTarget(target, target_dict, build_file) | 2878 ValidateRunAsInTarget(target, target_dict, build_file) |
| 2891 ValidateActionsInTarget(target, target_dict, build_file) | 2879 ValidateActionsInTarget(target, target_dict, build_file) |
| 2892 | 2880 |
| 2893 # Generators might not expect ints. Turn them into strs. | 2881 # Generators might not expect ints. Turn them into strs. |
| 2894 TurnIntIntoStrInDict(data) | 2882 TurnIntIntoStrInDict(data) |
| 2895 | 2883 |
| 2896 # TODO(mark): Return |data| for now because the generator needs a list of | 2884 # TODO(mark): Return |data| for now because the generator needs a list of |
| 2897 # build files that came in. In the future, maybe it should just accept | 2885 # build files that came in. In the future, maybe it should just accept |
| 2898 # a list, and not the whole data dict. | 2886 # a list, and not the whole data dict. |
| 2899 return [flat_list, targets, data] | 2887 return [flat_list, targets, data] |
| OLD | NEW |