Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(288)

Side by Side Diff: pylib/gyp/input.py

Issue 11098023: Make child process errors kill gyp when parallel processing is on. (Closed) Base URL: http://git.chromium.org/external/gyp.git@master
Patch Set: Also fix syntax error handling Created 8 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « pylib/gyp/generator/ninja.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright (c) 2012 Google Inc. All rights reserved. 1 # Copyright (c) 2012 Google Inc. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 from compiler.ast import Const 5 from compiler.ast import Const
6 from compiler.ast import Dict 6 from compiler.ast import Dict
7 from compiler.ast import Discard 7 from compiler.ast import Discard
8 from compiler.ast import List 8 from compiler.ast import List
9 from compiler.ast import Module 9 from compiler.ast import Module
10 from compiler.ast import Node 10 from compiler.ast import Node
11 from compiler.ast import Stmt 11 from compiler.ast import Stmt
12 import compiler 12 import compiler
13 import copy 13 import copy
14 import gyp.common 14 import gyp.common
15 import multiprocessing 15 import multiprocessing
16 import optparse 16 import optparse
17 import os.path 17 import os.path
18 import re 18 import re
19 import shlex 19 import shlex
20 import signal
20 import subprocess 21 import subprocess
21 import sys 22 import sys
22 import threading 23 import threading
23 import time 24 import time
24 from gyp.common import GypError 25 from gyp.common import GypError
25 26
26 27
27 # A list of types that are treated as linkable. 28 # A list of types that are treated as linkable.
28 linkable_types = ['executable', 'shared_library', 'loadable_module'] 29 linkable_types = ['executable', 'shared_library', 'loadable_module']
29 30
(...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after
449 def CallLoadTargetBuildFile(global_flags, 450 def CallLoadTargetBuildFile(global_flags,
450 build_file_path, data, 451 build_file_path, data,
451 aux_data, variables, 452 aux_data, variables,
452 includes, depth, check): 453 includes, depth, check):
453 """Wrapper around LoadTargetBuildFile for parallel processing. 454 """Wrapper around LoadTargetBuildFile for parallel processing.
454 455
455 This wrapper is used when LoadTargetBuildFile is executed in 456 This wrapper is used when LoadTargetBuildFile is executed in
456 a worker process. 457 a worker process.
457 """ 458 """
458 459
459 # Apply globals so that the worker process behaves the same. 460 try:
460 for key, value in global_flags.iteritems(): 461 signal.signal(signal.SIGINT, signal.SIG_IGN)
461 globals()[key] = value
462 462
463 # Save the keys so we can return data that changed. 463 # Apply globals so that the worker process behaves the same.
464 data_keys = set(data) 464 for key, value in global_flags.iteritems():
465 aux_data_keys = set(aux_data) 465 globals()[key] = value
466 466
467 result = LoadTargetBuildFile(build_file_path, data, 467 # Save the keys so we can return data that changed.
468 aux_data, variables, 468 data_keys = set(data)
469 includes, depth, check, False) 469 aux_data_keys = set(aux_data)
470 if not result:
471 return result
472 470
473 (build_file_path, dependencies) = result 471 result = LoadTargetBuildFile(build_file_path, data,
472 aux_data, variables,
473 includes, depth, check, False)
474 if not result:
475 return result
474 476
475 data_out = {} 477 (build_file_path, dependencies) = result
476 for key in data:
477 if key == 'target_build_files':
478 continue
479 if key not in data_keys:
480 data_out[key] = data[key]
481 aux_data_out = {}
482 for key in aux_data:
483 if key not in aux_data_keys:
484 aux_data_out[key] = aux_data[key]
485 478
486 # This gets serialized and sent back to the main process via a pipe. 479 data_out = {}
487 # It's handled in LoadTargetBuildFileCallback. 480 for key in data:
488 return (build_file_path, 481 if key == 'target_build_files':
489 data_out, 482 continue
490 aux_data_out, 483 if key not in data_keys:
491 dependencies) 484 data_out[key] = data[key]
485 aux_data_out = {}
486 for key in aux_data:
487 if key not in aux_data_keys:
488 aux_data_out[key] = aux_data[key]
489
490 # This gets serialized and sent back to the main process via a pipe.
491 # It's handled in LoadTargetBuildFileCallback.
492 return (build_file_path,
493 data_out,
494 aux_data_out,
495 dependencies)
496 except Exception, e:
497 print "Exception: ", e
498 return None
499
500
501 class ParallelProcessingError(Exception):
502 pass
492 503
493 504
494 class ParallelState(object): 505 class ParallelState(object):
495 """Class to keep track of state when processing input files in parallel. 506 """Class to keep track of state when processing input files in parallel.
496 507
497 If build files are loaded in parallel, use this to keep track of 508 If build files are loaded in parallel, use this to keep track of
498 state during farming out and processing parallel jobs. It's stored 509 state during farming out and processing parallel jobs. It's stored
499 in a global so that the callback function can have access to it. 510 in a global so that the callback function can have access to it.
500 """ 511 """
501 512
502 def __init__(self): 513 def __init__(self):
503 # The multiprocessing pool. 514 # The multiprocessing pool.
504 self.pool = None 515 self.pool = None
505 # The condition variable used to protect this object and notify 516 # The condition variable used to protect this object and notify
506 # the main loop when there might be more data to process. 517 # the main loop when there might be more data to process.
507 self.condition = None 518 self.condition = None
508 # The "data" dict that was passed to LoadTargetBuildFileParallel 519 # The "data" dict that was passed to LoadTargetBuildFileParallel
509 self.data = None 520 self.data = None
510 # The "aux_data" dict that was passed to LoadTargetBuildFileParallel 521 # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
511 self.aux_data = None 522 self.aux_data = None
512 # The number of parallel calls outstanding; decremented when a response 523 # The number of parallel calls outstanding; decremented when a response
513 # was received. 524 # was received.
514 self.pending = 0 525 self.pending = 0
515 # The set of all build files that have been scheduled, so we don't 526 # The set of all build files that have been scheduled, so we don't
516 # schedule the same one twice. 527 # schedule the same one twice.
517 self.scheduled = set() 528 self.scheduled = set()
518 # A list of dependency build file paths that haven't been scheduled yet. 529 # A list of dependency build file paths that haven't been scheduled yet.
519 self.dependencies = [] 530 self.dependencies = []
531 # Flag to indicate if there was an error in a child process.
532 self.error = False
520 533
521 def LoadTargetBuildFileCallback(self, result): 534 def LoadTargetBuildFileCallback(self, result):
522 """Handle the results of running LoadTargetBuildFile in another process. 535 """Handle the results of running LoadTargetBuildFile in another process.
523 """ 536 """
537 self.condition.acquire()
538 if not result:
539 self.error = True
540 self.condition.notify()
541 self.condition.release()
542 return
524 (build_file_path0, data0, aux_data0, dependencies0) = result 543 (build_file_path0, data0, aux_data0, dependencies0) = result
525 self.condition.acquire()
526 self.data['target_build_files'].add(build_file_path0) 544 self.data['target_build_files'].add(build_file_path0)
527 for key in data0: 545 for key in data0:
528 self.data[key] = data0[key] 546 self.data[key] = data0[key]
529 for key in aux_data0: 547 for key in aux_data0:
530 self.aux_data[key] = aux_data0[key] 548 self.aux_data[key] = aux_data0[key]
531 for new_dependency in dependencies0: 549 for new_dependency in dependencies0:
532 if new_dependency not in self.scheduled: 550 if new_dependency not in self.scheduled:
533 self.scheduled.add(new_dependency) 551 self.scheduled.add(new_dependency)
534 self.dependencies.append(new_dependency) 552 self.dependencies.append(new_dependency)
535 self.pending -= 1 553 self.pending -= 1
536 self.condition.notify() 554 self.condition.notify()
537 self.condition.release() 555 self.condition.release()
538 556
539 557
540 def LoadTargetBuildFileParallel(build_file_path, data, aux_data, 558 def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
541 variables, includes, depth, check): 559 variables, includes, depth, check):
542 parallel_state = ParallelState() 560 parallel_state = ParallelState()
543 parallel_state.condition = threading.Condition() 561 parallel_state.condition = threading.Condition()
544 parallel_state.dependencies = [build_file_path] 562 parallel_state.dependencies = [build_file_path]
545 parallel_state.scheduled = set([build_file_path]) 563 parallel_state.scheduled = set([build_file_path])
546 parallel_state.pending = 0 564 parallel_state.pending = 0
547 parallel_state.data = data 565 parallel_state.data = data
548 parallel_state.aux_data = aux_data 566 parallel_state.aux_data = aux_data
549 567
550 parallel_state.condition.acquire() 568 try:
551 while parallel_state.dependencies or parallel_state.pending: 569 parallel_state.condition.acquire()
552 if not parallel_state.dependencies: 570 while parallel_state.dependencies or parallel_state.pending:
553 parallel_state.condition.wait() 571 if parallel_state.error:
554 continue 572 break
573 if not parallel_state.dependencies:
574 parallel_state.condition.wait()
575 continue
555 576
556 dependency = parallel_state.dependencies.pop() 577 dependency = parallel_state.dependencies.pop()
557 578
558 parallel_state.pending += 1 579 parallel_state.pending += 1
559 data_in = {} 580 data_in = {}
560 data_in['target_build_files'] = data['target_build_files'] 581 data_in['target_build_files'] = data['target_build_files']
561 aux_data_in = {} 582 aux_data_in = {}
562 global_flags = { 583 global_flags = {
563 'path_sections': globals()['path_sections'], 584 'path_sections': globals()['path_sections'],
564 'non_configuration_keys': globals()['non_configuration_keys'], 585 'non_configuration_keys': globals()['non_configuration_keys'],
565 'absolute_build_file_paths': globals()['absolute_build_file_paths'], 586 'absolute_build_file_paths': globals()['absolute_build_file_paths'],
566 'multiple_toolsets': globals()['multiple_toolsets']} 587 'multiple_toolsets': globals()['multiple_toolsets']}
567 588
568 if not parallel_state.pool: 589 if not parallel_state.pool:
569 parallel_state.pool = multiprocessing.Pool(8) 590 parallel_state.pool = multiprocessing.Pool(8)
570 parallel_state.pool.apply_async( 591 parallel_state.pool.apply_async(
571 CallLoadTargetBuildFile, 592 CallLoadTargetBuildFile,
572 args = (global_flags, dependency, 593 args = (global_flags, dependency,
573 data_in, aux_data_in, 594 data_in, aux_data_in,
574 variables, includes, depth, check), 595 variables, includes, depth, check),
575 callback = parallel_state.LoadTargetBuildFileCallback) 596 callback = parallel_state.LoadTargetBuildFileCallback)
597 except KeyboardInterrupt, e:
598 parallel_state.pool.terminate()
599 raise e
576 600
577 parallel_state.condition.release() 601 parallel_state.condition.release()
602 if parallel_state.error:
603 sys.exit()
578 604
579 605
580 # Look for the bracket that matches the first bracket seen in a 606 # Look for the bracket that matches the first bracket seen in a
581 # string, and return the start and end as a tuple. For example, if 607 # string, and return the start and end as a tuple. For example, if
582 # the input is something like "<(foo <(bar)) blah", then it would 608 # the input is something like "<(foo <(bar)) blah", then it would
583 # return (1, 13), indicating the entire string except for the leading 609 # return (1, 13), indicating the entire string except for the leading
584 # "<" and trailing " blah". 610 # "<" and trailing " blah".
585 def FindEnclosingBracketGroup(input): 611 def FindEnclosingBracketGroup(input):
586 brackets = { '}': '{', 612 brackets = { '}': '{',
587 ']': '[', 613 ']': '[',
(...skipping 2040 matching lines...) Expand 10 before | Expand all | Expand 10 after
2628 ValidateRunAsInTarget(target, target_dict, build_file) 2654 ValidateRunAsInTarget(target, target_dict, build_file)
2629 ValidateActionsInTarget(target, target_dict, build_file) 2655 ValidateActionsInTarget(target, target_dict, build_file)
2630 2656
2631 # Generators might not expect ints. Turn them into strs. 2657 # Generators might not expect ints. Turn them into strs.
2632 TurnIntIntoStrInDict(data) 2658 TurnIntIntoStrInDict(data)
2633 2659
2634 # TODO(mark): Return |data| for now because the generator needs a list of 2660 # TODO(mark): Return |data| for now because the generator needs a list of
2635 # build files that came in. In the future, maybe it should just accept 2661 # build files that came in. In the future, maybe it should just accept
2636 # a list, and not the whole data dict. 2662 # a list, and not the whole data dict.
2637 return [flat_list, targets, data] 2663 return [flat_list, targets, data]
OLDNEW
« no previous file with comments | « pylib/gyp/generator/ninja.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698