OLD | NEW |
---|---|
1 # Copyright (c) 2012 Google Inc. All rights reserved. | 1 # Copyright (c) 2012 Google Inc. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 from compiler.ast import Const | 5 from compiler.ast import Const |
6 from compiler.ast import Dict | 6 from compiler.ast import Dict |
7 from compiler.ast import Discard | 7 from compiler.ast import Discard |
8 from compiler.ast import List | 8 from compiler.ast import List |
9 from compiler.ast import Module | 9 from compiler.ast import Module |
10 from compiler.ast import Node | 10 from compiler.ast import Node |
11 from compiler.ast import Stmt | 11 from compiler.ast import Stmt |
12 import compiler | 12 import compiler |
13 import copy | 13 import copy |
14 import gyp.common | 14 import gyp.common |
15 import multiprocessing | |
15 import optparse | 16 import optparse |
16 import os.path | 17 import os.path |
17 import re | 18 import re |
18 import shlex | 19 import shlex |
19 import subprocess | 20 import subprocess |
20 import sys | 21 import sys |
22 import threading | |
23 import time | |
21 from gyp.common import GypError | 24 from gyp.common import GypError |
22 | 25 |
23 | 26 |
24 # A list of types that are treated as linkable. | 27 # A list of types that are treated as linkable. |
25 linkable_types = ['executable', 'shared_library', 'loadable_module'] | 28 linkable_types = ['executable', 'shared_library', 'loadable_module'] |
26 | 29 |
27 # A list of sections that contain links to other targets. | 30 # A list of sections that contain links to other targets. |
28 dependency_sections = ['dependencies', 'export_dependent_settings'] | 31 dependency_sections = ['dependencies', 'export_dependent_settings'] |
29 | 32 |
30 # base_path_sections is a list of sections defined by GYP that contain | 33 # base_path_sections is a list of sections defined by GYP that contain |
(...skipping 292 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
323 for condition in data['conditions']: | 326 for condition in data['conditions']: |
324 if isinstance(condition, list): | 327 if isinstance(condition, list): |
325 for condition_dict in condition[1:]: | 328 for condition_dict in condition[1:]: |
326 ProcessToolsetsInDict(condition_dict) | 329 ProcessToolsetsInDict(condition_dict) |
327 | 330 |
328 | 331 |
329 # TODO(mark): I don't love this name. It just means that it's going to load | 332 # TODO(mark): I don't love this name. It just means that it's going to load |
330 # a build file that contains targets and is expected to provide a targets dict | 333 # a build file that contains targets and is expected to provide a targets dict |
331 # that contains the targets... | 334 # that contains the targets... |
332 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, | 335 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, |
333 depth, check): | 336 depth, check, load_dependencies): |
334 # If depth is set, predefine the DEPTH variable to be a relative path from | 337 # If depth is set, predefine the DEPTH variable to be a relative path from |
335 # this build file's directory to the directory identified by depth. | 338 # this build file's directory to the directory identified by depth. |
336 if depth: | 339 if depth: |
337 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a | 340 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a |
338 # temporary measure. This should really be addressed by keeping all paths | 341 # temporary measure. This should really be addressed by keeping all paths |
339 # in POSIX until actual project generation. | 342 # in POSIX until actual project generation. |
340 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) | 343 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) |
341 if d == '': | 344 if d == '': |
342 variables['DEPTH'] = '.' | 345 variables['DEPTH'] = '.' |
343 else: | 346 else: |
344 variables['DEPTH'] = d.replace('\\', '/') | 347 variables['DEPTH'] = d.replace('\\', '/') |
345 | 348 |
346 # If the generator needs absolue paths, then do so. | 349 # If the generator needs absolue paths, then do so. |
347 if absolute_build_file_paths: | 350 if absolute_build_file_paths: |
348 build_file_path = os.path.abspath(build_file_path) | 351 build_file_path = os.path.abspath(build_file_path) |
349 | 352 |
350 if build_file_path in data['target_build_files']: | 353 if build_file_path in data['target_build_files']: |
351 # Already loaded. | 354 # Already loaded. |
352 return | 355 return False |
353 data['target_build_files'].add(build_file_path) | 356 data['target_build_files'].add(build_file_path) |
354 | 357 |
355 gyp.DebugOutput(gyp.DEBUG_INCLUDES, | 358 gyp.DebugOutput(gyp.DEBUG_INCLUDES, |
356 "Loading Target Build File '%s'" % build_file_path) | 359 "Loading Target Build File '%s'" % build_file_path) |
357 | 360 |
358 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, | 361 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, |
359 includes, True, check) | 362 includes, True, check) |
360 | 363 |
361 # Store DEPTH for later use in generators. | 364 # Store DEPTH for later use in generators. |
362 build_file_data['_DEPTH'] = depth | 365 build_file_data['_DEPTH'] = depth |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
412 index += 1 | 415 index += 1 |
413 | 416 |
414 # No longer needed. | 417 # No longer needed. |
415 del build_file_data['target_defaults'] | 418 del build_file_data['target_defaults'] |
416 | 419 |
417 # Look for dependencies. This means that dependency resolution occurs | 420 # Look for dependencies. This means that dependency resolution occurs |
418 # after "pre" conditionals and variable expansion, but before "post" - | 421 # after "pre" conditionals and variable expansion, but before "post" - |
419 # in other words, you can't put a "dependencies" section inside a "post" | 422 # in other words, you can't put a "dependencies" section inside a "post" |
420 # conditional within a target. | 423 # conditional within a target. |
421 | 424 |
425 dependencies = [] | |
422 if 'targets' in build_file_data: | 426 if 'targets' in build_file_data: |
423 for target_dict in build_file_data['targets']: | 427 for target_dict in build_file_data['targets']: |
424 if 'dependencies' not in target_dict: | 428 if 'dependencies' not in target_dict: |
425 continue | 429 continue |
426 for dependency in target_dict['dependencies']: | 430 for dependency in target_dict['dependencies']: |
427 other_build_file = \ | 431 dependencies.append( |
428 gyp.common.ResolveTarget(build_file_path, dependency, None)[0] | 432 gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) |
429 try: | |
430 LoadTargetBuildFile(other_build_file, data, aux_data, variables, | |
431 includes, depth, check) | |
432 except Exception, e: | |
433 gyp.common.ExceptionAppend( | |
434 e, 'while loading dependencies of %s' % build_file_path) | |
435 raise | |
436 | 433 |
437 return data | 434 if load_dependencies: |
435 for dependency in dependencies: | |
436 try: | |
437 LoadTargetBuildFile(dependency, data, aux_data, variables, | |
438 includes, depth, check, load_dependencies) | |
439 except Exception, e: | |
440 gyp.common.ExceptionAppend( | |
441 e, 'while loading dependencies of %s' % build_file_path) | |
442 raise | |
443 else: | |
444 return (build_file_path, dependencies) | |
445 | |
446 | |
447 # Wrapper around LoadTargetBuildFile used when LoadTargetBuildFile | |
M-A Ruel
2012/09/27 00:41:31
Why not a docstring?
dmazzoni
2012/09/28 18:05:21
Done.
| |
448 # is executed in a worker process. | |
449 def CallLoadTargetBuildFile(global_flags, | |
450 build_file_path, data, | |
451 aux_data, variables, | |
452 includes, depth, check): | |
453 # Apply globals so that the worker process behaves the same. | |
454 for key, value in global_flags.iteritems(): | |
455 globals()[key] = value | |
456 | |
457 # Save the keys so we can return data that changed. | |
458 data_keys = set(data) | |
459 aux_data_keys = set(aux_data) | |
460 | |
461 result = LoadTargetBuildFile(build_file_path, data, | |
462 aux_data, variables, | |
463 includes, depth, check, False) | |
464 if not result: | |
465 return result | |
466 | |
467 (build_file_path, dependencies) = result | |
468 | |
469 data_out = {} | |
470 for key in data: | |
471 if key == 'target_build_files': | |
472 continue | |
473 if key not in data_keys: | |
474 data_out[key] = data[key] | |
475 aux_data_out = {} | |
476 for key in aux_data: | |
477 if key not in aux_data_keys: | |
478 aux_data_out[key] = aux_data[key] | |
479 | |
480 # This gets serialized and sent back to the main process via a pipe. | |
481 # It's handled in LoadTargetBuildFileCallback. | |
482 return (build_file_path, | |
483 data_out, | |
484 aux_data_out, | |
485 dependencies) | |
486 | |
487 | |
488 # If build files are loaded in parallel, use this to keep track of | |
M-A Ruel
2012/09/27 00:41:31
Same for all file level symbols with comments.
dmazzoni
2012/09/28 18:05:21
Done.
| |
489 # state during farming out and processing parallel jobs. It's stored | |
490 # in a global so that the callback function can have access to it. | |
491 class ParallelState: | |
M-A Ruel
2012/09/27 00:41:31
class ParallelState(object):
dmazzoni
2012/09/28 18:05:21
Done.
| |
492 def __init__(self): | |
493 # The multiprocessing pool. | |
494 self.pool = None | |
495 # The condition variable used to protect this object and notify | |
496 # the main loop when there might be more data to process. | |
497 self.condition = None | |
498 # The "data" dict that was passed to LoadTargetBuildFileParallel | |
499 self.data = None | |
500 # The "aux_data" dict that was passed to LoadTargetBuildFileParallel | |
501 self.aux_data = None | |
502 # The number of parallel calls outstanding; decremented when a response | |
503 # was received. | |
504 self.pending = 0 | |
505 # The set of all build files that have been scheduled, so we don't | |
506 # schedule the same one twice. | |
507 self.scheduled = set() | |
508 # A list of dependency build file paths that haven't been scheduled yet. | |
509 self.dependencies = [] | |
510 | |
511 | |
512 # Handle the results of LoadTargetBuildFile that executed in a separate | |
513 # process. | |
514 def LoadTargetBuildFileCallback(result): | |
M-A Ruel
2012/09/27 00:41:31
If you had put it a member function of ParallelSta
dmazzoni
2012/09/28 18:05:21
Good idea, done.
| |
515 (build_file_path0, data0, aux_data0, dependencies0) = result | |
516 global parallel_state | |
517 parallel_state.condition.acquire() | |
518 parallel_state.data['target_build_files'].add(build_file_path0) | |
519 for key in data0: | |
520 parallel_state.data[key] = data0[key] | |
521 for key in aux_data0: | |
522 parallel_state.aux_data[key] = aux_data0[key] | |
523 for new_dependency in dependencies0: | |
524 if new_dependency not in parallel_state.scheduled: | |
525 parallel_state.scheduled.add(new_dependency) | |
526 parallel_state.dependencies.append(new_dependency) | |
527 parallel_state.pending -= 1 | |
528 parallel_state.condition.notify() | |
529 parallel_state.condition.release() | |
530 | |
531 | |
532 def LoadTargetBuildFileParallel(build_file_path, data, aux_data, | |
533 variables, includes, depth, check): | |
534 global parallel_state | |
535 parallel_state = ParallelState() | |
536 parallel_state.condition = threading.Condition() | |
537 parallel_state.dependencies = [build_file_path] | |
538 parallel_state.scheduled = set([build_file_path]) | |
539 parallel_state.pending = 0 | |
540 parallel_state.data = data | |
541 parallel_state.aux_data = aux_data | |
542 | |
543 parallel_state.condition.acquire() | |
544 while parallel_state.dependencies or parallel_state.pending: | |
545 if not parallel_state.dependencies: | |
546 parallel_state.condition.wait() | |
547 continue | |
548 | |
549 dependency = parallel_state.dependencies.pop() | |
550 | |
551 parallel_state.pending += 1 | |
552 data_in = {} | |
553 data_in['target_build_files'] = data['target_build_files'] | |
554 aux_data_in = {} | |
555 global_flags = { | |
556 'path_sections': globals()['path_sections'], | |
557 'non_configuration_keys': globals()['non_configuration_keys'], | |
558 'absolute_build_file_paths': globals()['absolute_build_file_paths'], | |
559 'multiple_toolsets': globals()['multiple_toolsets']} | |
560 | |
561 if not parallel_state.pool: | |
562 parallel_state.pool = multiprocessing.Pool(8) | |
563 parallel_state.pool.apply_async( | |
564 CallLoadTargetBuildFile, | |
565 args = (global_flags, dependency, | |
566 data_in, aux_data_in, | |
567 variables, includes, depth, check), | |
568 callback = LoadTargetBuildFileCallback) | |
569 | |
570 parallel_state.condition.release() | |
438 | 571 |
439 | 572 |
440 # Look for the bracket that matches the first bracket seen in a | 573 # Look for the bracket that matches the first bracket seen in a |
441 # string, and return the start and end as a tuple. For example, if | 574 # string, and return the start and end as a tuple. For example, if |
442 # the input is something like "<(foo <(bar)) blah", then it would | 575 # the input is something like "<(foo <(bar)) blah", then it would |
443 # return (1, 13), indicating the entire string except for the leading | 576 # return (1, 13), indicating the entire string except for the leading |
444 # "<" and trailing " blah". | 577 # "<" and trailing " blah". |
445 def FindEnclosingBracketGroup(input): | 578 def FindEnclosingBracketGroup(input): |
446 brackets = { '}': '{', | 579 brackets = { '}': '{', |
447 ']': '[', | 580 ']': '[', |
(...skipping 1880 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2328 # Prepare a key like 'path/to:target_name'. | 2461 # Prepare a key like 'path/to:target_name'. |
2329 key = subdir + ':' + name | 2462 key = subdir + ':' + name |
2330 if key in used: | 2463 if key in used: |
2331 # Complain if this target is already used. | 2464 # Complain if this target is already used. |
2332 raise GypError('Duplicate target name "%s" in directory "%s" used both ' | 2465 raise GypError('Duplicate target name "%s" in directory "%s" used both ' |
2333 'in "%s" and "%s".' % (name, subdir, gyp, used[key])) | 2466 'in "%s" and "%s".' % (name, subdir, gyp, used[key])) |
2334 used[key] = gyp | 2467 used[key] = gyp |
2335 | 2468 |
2336 | 2469 |
2337 def Load(build_files, variables, includes, depth, generator_input_info, check, | 2470 def Load(build_files, variables, includes, depth, generator_input_info, check, |
2338 circular_check): | 2471 circular_check, parallel): |
2339 # Set up path_sections and non_configuration_keys with the default data plus | 2472 # Set up path_sections and non_configuration_keys with the default data plus |
2340 # the generator-specifc data. | 2473 # the generator-specifc data. |
2341 global path_sections | 2474 global path_sections |
2342 path_sections = base_path_sections[:] | 2475 path_sections = base_path_sections[:] |
2343 path_sections.extend(generator_input_info['path_sections']) | 2476 path_sections.extend(generator_input_info['path_sections']) |
2344 | 2477 |
2345 global non_configuration_keys | 2478 global non_configuration_keys |
2346 non_configuration_keys = base_non_configuration_keys[:] | 2479 non_configuration_keys = base_non_configuration_keys[:] |
2347 non_configuration_keys.extend(generator_input_info['non_configuration_keys']) | 2480 non_configuration_keys.extend(generator_input_info['non_configuration_keys']) |
2348 | 2481 |
(...skipping 20 matching lines...) Expand all Loading... | |
2369 # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as | 2502 # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as |
2370 # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps | 2503 # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps |
2371 # track of the keys corresponding to "target" files. | 2504 # track of the keys corresponding to "target" files. |
2372 data = {'target_build_files': set()} | 2505 data = {'target_build_files': set()} |
2373 aux_data = {} | 2506 aux_data = {} |
2374 for build_file in build_files: | 2507 for build_file in build_files: |
2375 # Normalize paths everywhere. This is important because paths will be | 2508 # Normalize paths everywhere. This is important because paths will be |
2376 # used as keys to the data dict and for references between input files. | 2509 # used as keys to the data dict and for references between input files. |
2377 build_file = os.path.normpath(build_file) | 2510 build_file = os.path.normpath(build_file) |
2378 try: | 2511 try: |
2379 LoadTargetBuildFile(build_file, data, aux_data, variables, includes, | 2512 if parallel: |
2380 depth, check) | 2513 print >>sys.stderr, 'Using parallel processing (experimental).' |
2514 LoadTargetBuildFileParallel(build_file, data, aux_data, | |
2515 variables, includes, depth, check) | |
2516 else: | |
2517 LoadTargetBuildFile(build_file, data, aux_data, | |
2518 variables, includes, depth, check, True) | |
2381 except Exception, e: | 2519 except Exception, e: |
2382 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) | 2520 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) |
2383 raise | 2521 raise |
2384 | 2522 |
2385 # Build a dict to access each target's subdict by qualified name. | 2523 # Build a dict to access each target's subdict by qualified name. |
2386 targets = BuildTargetsDict(data) | 2524 targets = BuildTargetsDict(data) |
2387 | 2525 |
2388 # Fully qualify all dependency links. | 2526 # Fully qualify all dependency links. |
2389 QualifyDependencies(targets) | 2527 QualifyDependencies(targets) |
2390 | 2528 |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2478 ValidateRunAsInTarget(target, target_dict, build_file) | 2616 ValidateRunAsInTarget(target, target_dict, build_file) |
2479 ValidateActionsInTarget(target, target_dict, build_file) | 2617 ValidateActionsInTarget(target, target_dict, build_file) |
2480 | 2618 |
2481 # Generators might not expect ints. Turn them into strs. | 2619 # Generators might not expect ints. Turn them into strs. |
2482 TurnIntIntoStrInDict(data) | 2620 TurnIntIntoStrInDict(data) |
2483 | 2621 |
2484 # TODO(mark): Return |data| for now because the generator needs a list of | 2622 # TODO(mark): Return |data| for now because the generator needs a list of |
2485 # build files that came in. In the future, maybe it should just accept | 2623 # build files that came in. In the future, maybe it should just accept |
2486 # a list, and not the whole data dict. | 2624 # a list, and not the whole data dict. |
2487 return [flat_list, targets, data] | 2625 return [flat_list, targets, data] |
OLD | NEW |