Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(45)

Side by Side Diff: pylib/gyp/input.py

Issue 773883002: Cache data for included files in the multiprocess load codepath (Closed) Base URL: http://gyp.googlecode.com/svn/trunk
Patch Set: Created 6 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright (c) 2012 Google Inc. All rights reserved. 1 # Copyright (c) 2012 Google Inc. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 from compiler.ast import Const 5 from compiler.ast import Const
6 from compiler.ast import Dict 6 from compiler.ast import Dict
7 from compiler.ast import Discard 7 from compiler.ast import Discard
8 from compiler.ast import List 8 from compiler.ast import List
9 from compiler.ast import Module 9 from compiler.ast import Module
10 from compiler.ast import Node 10 from compiler.ast import Node
(...skipping 344 matching lines...) Expand 10 before | Expand all | Expand 10 after
355 if depth: 355 if depth:
356 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a 356 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
357 # temporary measure. This should really be addressed by keeping all paths 357 # temporary measure. This should really be addressed by keeping all paths
358 # in POSIX until actual project generation. 358 # in POSIX until actual project generation.
359 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) 359 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
360 if d == '': 360 if d == '':
361 variables['DEPTH'] = '.' 361 variables['DEPTH'] = '.'
362 else: 362 else:
363 variables['DEPTH'] = d.replace('\\', '/') 363 variables['DEPTH'] = d.replace('\\', '/')
364 364
365 if build_file_path in data['target_build_files']: 365 # The 'target_build_files' key is only set when loading target build files in
366 # Already loaded. 366 # the non-parallel code path, where LoadTargetBuildFile is called
367 return False 367 # recursively. In the parallel code path, we don't need to check whether the
368 data['target_build_files'].add(build_file_path) 368 # |build_file_path| has already been loaded, because the 'scheduled' set in
369 # ParallelState guarantees that we never load the same |build_file_path|
370 # twice.
371 if 'target_build_files' in data:
372 if build_file_path in data['target_build_files']:
373 # Already loaded.
374 return False
375 data['target_build_files'].add(build_file_path)
369 376
370 gyp.DebugOutput(gyp.DEBUG_INCLUDES, 377 gyp.DebugOutput(gyp.DEBUG_INCLUDES,
371 "Loading Target Build File '%s'", build_file_path) 378 "Loading Target Build File '%s'", build_file_path)
372 379
373 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, 380 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
374 includes, True, check) 381 includes, True, check)
375 382
376 # Store DEPTH for later use in generators. 383 # Store DEPTH for later use in generators.
377 build_file_data['_DEPTH'] = depth 384 build_file_data['_DEPTH'] = depth
378 385
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
449 try: 456 try:
450 LoadTargetBuildFile(dependency, data, aux_data, variables, 457 LoadTargetBuildFile(dependency, data, aux_data, variables,
451 includes, depth, check, load_dependencies) 458 includes, depth, check, load_dependencies)
452 except Exception, e: 459 except Exception, e:
453 gyp.common.ExceptionAppend( 460 gyp.common.ExceptionAppend(
454 e, 'while loading dependencies of %s' % build_file_path) 461 e, 'while loading dependencies of %s' % build_file_path)
455 raise 462 raise
456 else: 463 else:
457 return (build_file_path, dependencies) 464 return (build_file_path, dependencies)
458 465
459 466 per_process_data = {}
scottmg 2014/12/02 22:50:52 please move these up to the top
467 per_process_aux_data = {}
460 def CallLoadTargetBuildFile(global_flags, 468 def CallLoadTargetBuildFile(global_flags,
461 build_file_path, data, 469 build_file_path, variables,
462 aux_data, variables,
463 includes, depth, check, 470 includes, depth, check,
464 generator_input_info): 471 generator_input_info):
465 """Wrapper around LoadTargetBuildFile for parallel processing. 472 """Wrapper around LoadTargetBuildFile for parallel processing.
466 473
467 This wrapper is used when LoadTargetBuildFile is executed in 474 This wrapper is used when LoadTargetBuildFile is executed in
468 a worker process. 475 a worker process.
469 """ 476 """
470 477
scottmg 2014/12/02 22:50:52 can you add global per_process_data global pe
471 try: 478 try:
472 signal.signal(signal.SIGINT, signal.SIG_IGN) 479 signal.signal(signal.SIGINT, signal.SIG_IGN)
473 480
474 # Apply globals so that the worker process behaves the same. 481 # Apply globals so that the worker process behaves the same.
475 for key, value in global_flags.iteritems(): 482 for key, value in global_flags.iteritems():
476 globals()[key] = value 483 globals()[key] = value
477 484
478 # Save the keys so we can return data that changed.
479 data_keys = set(data)
480 aux_data_keys = set(aux_data)
481
482 SetGeneratorGlobals(generator_input_info) 485 SetGeneratorGlobals(generator_input_info)
483 result = LoadTargetBuildFile(build_file_path, data, 486 result = LoadTargetBuildFile(build_file_path, per_process_data,
484 aux_data, variables, 487 per_process_aux_data, variables,
485 includes, depth, check, False) 488 includes, depth, check, False)
486 if not result: 489 if not result:
487 return result 490 return result
488 491
489 (build_file_path, dependencies) = result 492 (build_file_path, dependencies) = result
490 493
491 data_out = {} 494 # We can safely delete the build_file_data from per_process_data because it
492 for key in data: 495 # will never be referenced by this process again, so we don't need to keep
493 if key == 'target_build_files': 496 # it in the cache.
494 continue 497 build_file_data = per_process_data[build_file_path]
scottmg 2014/12/02 22:50:52 build_file_data = per_process_data.pop(build_file_
495 if key not in data_keys: 498 del per_process_data[build_file_path]
496 data_out[key] = data[key]
497 aux_data_out = {}
498 for key in aux_data:
499 if key not in aux_data_keys:
500 aux_data_out[key] = aux_data[key]
501 499
502 # This gets serialized and sent back to the main process via a pipe. 500 # This gets serialized and sent back to the main process via a pipe.
503 # It's handled in LoadTargetBuildFileCallback. 501 # It's handled in LoadTargetBuildFileCallback.
504 return (build_file_path, 502 return (build_file_path,
505 data_out, 503 build_file_data,
506 aux_data_out,
507 dependencies) 504 dependencies)
508 except GypError, e: 505 except GypError, e:
509 sys.stderr.write("gyp: %s\n" % e) 506 sys.stderr.write("gyp: %s\n" % e)
510 return None 507 return None
511 except Exception, e: 508 except Exception, e:
512 print >>sys.stderr, 'Exception:', e 509 print >>sys.stderr, 'Exception:', e
513 print >>sys.stderr, traceback.format_exc() 510 print >>sys.stderr, traceback.format_exc()
514 return None 511 return None
515 512
516 513
(...skipping 10 matching lines...) Expand all
527 """ 524 """
528 525
529 def __init__(self): 526 def __init__(self):
530 # The multiprocessing pool. 527 # The multiprocessing pool.
531 self.pool = None 528 self.pool = None
532 # The condition variable used to protect this object and notify 529 # The condition variable used to protect this object and notify
533 # the main loop when there might be more data to process. 530 # the main loop when there might be more data to process.
534 self.condition = None 531 self.condition = None
535 # The "data" dict that was passed to LoadTargetBuildFileParallel 532 # The "data" dict that was passed to LoadTargetBuildFileParallel
536 self.data = None 533 self.data = None
537 # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
538 self.aux_data = None
539 # The number of parallel calls outstanding; decremented when a response 534 # The number of parallel calls outstanding; decremented when a response
540 # was received. 535 # was received.
541 self.pending = 0 536 self.pending = 0
542 # The set of all build files that have been scheduled, so we don't 537 # The set of all build files that have been scheduled, so we don't
543 # schedule the same one twice. 538 # schedule the same one twice.
544 self.scheduled = set() 539 self.scheduled = set()
545 # A list of dependency build file paths that haven't been scheduled yet. 540 # A list of dependency build file paths that haven't been scheduled yet.
546 self.dependencies = [] 541 self.dependencies = []
547 # Flag to indicate if there was an error in a child process. 542 # Flag to indicate if there was an error in a child process.
548 self.error = False 543 self.error = False
549 544
550 def LoadTargetBuildFileCallback(self, result): 545 def LoadTargetBuildFileCallback(self, result):
551 """Handle the results of running LoadTargetBuildFile in another process. 546 """Handle the results of running LoadTargetBuildFile in another process.
552 """ 547 """
553 self.condition.acquire() 548 self.condition.acquire()
554 if not result: 549 if not result:
555 self.error = True 550 self.error = True
556 self.condition.notify() 551 self.condition.notify()
557 self.condition.release() 552 self.condition.release()
558 return 553 return
559 (build_file_path0, data0, aux_data0, dependencies0) = result 554 (build_file_path0, build_file_data0, dependencies0) = result
555 self.data[build_file_path0] = build_file_data0
560 self.data['target_build_files'].add(build_file_path0) 556 self.data['target_build_files'].add(build_file_path0)
561 for key in data0:
562 self.data[key] = data0[key]
563 for key in aux_data0:
564 self.aux_data[key] = aux_data0[key]
565 for new_dependency in dependencies0: 557 for new_dependency in dependencies0:
566 if new_dependency not in self.scheduled: 558 if new_dependency not in self.scheduled:
567 self.scheduled.add(new_dependency) 559 self.scheduled.add(new_dependency)
568 self.dependencies.append(new_dependency) 560 self.dependencies.append(new_dependency)
569 self.pending -= 1 561 self.pending -= 1
570 self.condition.notify() 562 self.condition.notify()
571 self.condition.release() 563 self.condition.release()
572 564
573 565
574 def LoadTargetBuildFilesParallel(build_files, data, aux_data, 566 def LoadTargetBuildFilesParallel(build_files, data, aux_data,
(...skipping 13 matching lines...) Expand all
588 while parallel_state.dependencies or parallel_state.pending: 580 while parallel_state.dependencies or parallel_state.pending:
589 if parallel_state.error: 581 if parallel_state.error:
590 break 582 break
591 if not parallel_state.dependencies: 583 if not parallel_state.dependencies:
592 parallel_state.condition.wait() 584 parallel_state.condition.wait()
593 continue 585 continue
594 586
595 dependency = parallel_state.dependencies.pop() 587 dependency = parallel_state.dependencies.pop()
596 588
597 parallel_state.pending += 1 589 parallel_state.pending += 1
598 data_in = {}
599 data_in['target_build_files'] = data['target_build_files']
600 aux_data_in = {}
601 global_flags = { 590 global_flags = {
602 'path_sections': globals()['path_sections'], 591 'path_sections': globals()['path_sections'],
603 'non_configuration_keys': globals()['non_configuration_keys'], 592 'non_configuration_keys': globals()['non_configuration_keys'],
604 'multiple_toolsets': globals()['multiple_toolsets']} 593 'multiple_toolsets': globals()['multiple_toolsets']}
605 594
606 if not parallel_state.pool: 595 if not parallel_state.pool:
607 parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count()) 596 parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
608 parallel_state.pool.apply_async( 597 parallel_state.pool.apply_async(
609 CallLoadTargetBuildFile, 598 CallLoadTargetBuildFile,
610 args = (global_flags, dependency, 599 args = (global_flags, dependency,
611 data_in, aux_data_in,
612 variables, includes, depth, check, generator_input_info), 600 variables, includes, depth, check, generator_input_info),
613 callback = parallel_state.LoadTargetBuildFileCallback) 601 callback = parallel_state.LoadTargetBuildFileCallback)
614 except KeyboardInterrupt, e: 602 except KeyboardInterrupt, e:
615 parallel_state.pool.terminate() 603 parallel_state.pool.terminate()
616 raise e 604 raise e
617 605
618 parallel_state.condition.release() 606 parallel_state.condition.release()
619 607
620 parallel_state.pool.close() 608 parallel_state.pool.close()
621 parallel_state.pool.join() 609 parallel_state.pool.join()
(...skipping 2235 matching lines...) Expand 10 before | Expand all | Expand 10 after
2857 ValidateRunAsInTarget(target, target_dict, build_file) 2845 ValidateRunAsInTarget(target, target_dict, build_file)
2858 ValidateActionsInTarget(target, target_dict, build_file) 2846 ValidateActionsInTarget(target, target_dict, build_file)
2859 2847
2860 # Generators might not expect ints. Turn them into strs. 2848 # Generators might not expect ints. Turn them into strs.
2861 TurnIntIntoStrInDict(data) 2849 TurnIntIntoStrInDict(data)
2862 2850
2863 # TODO(mark): Return |data| for now because the generator needs a list of 2851 # TODO(mark): Return |data| for now because the generator needs a list of
2864 # build files that came in. In the future, maybe it should just accept 2852 # build files that came in. In the future, maybe it should just accept
2865 # a list, and not the whole data dict. 2853 # a list, and not the whole data dict.
2866 return [flat_list, targets, data] 2854 return [flat_list, targets, data]
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698