| Index: tools/isolate_driver.py
|
| diff --git a/tools/isolate_driver.py b/tools/isolate_driver.py
|
| index bf1103a4df01d79276887cce479228a50a19afe7..4db823e444935fec4f6d95d4951246b6f2cbdd23 100755
|
| --- a/tools/isolate_driver.py
|
| +++ b/tools/isolate_driver.py
|
| @@ -20,21 +20,170 @@
|
| """
|
|
|
| import StringIO
|
| +import glob
|
| import logging
|
| import os
|
| import posixpath
|
| import subprocess
|
| import sys
|
| +import time
|
|
|
| TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
|
| SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client')
|
| SRC_DIR = os.path.dirname(TOOLS_DIR)
|
|
|
| -import ninja_parser
|
| -
|
| sys.path.insert(0, SWARMING_CLIENT_DIR)
|
|
|
| import isolate_format
|
| +
|
| +
|
| +def load_ninja_recursively(build_dir, ninja_path, build_steps):
|
| + """Crudely extracts all the subninja and build referenced in ninja_path.
|
| +
|
| + In particular, it ignores rule and variable declarations. The goal is to be
|
| + performant (well, as much as python can be performant) which is currently in
|
| + the <200ms range for a complete chromium tree. As such the code is laid out
|
| + for performance instead of readability.
|
| + """
|
| + logging.debug('Loading %s', ninja_path)
|
| + try:
|
| + with open(os.path.join(build_dir, ninja_path), 'rb') as f:
|
| + line = None
|
| + merge_line = ''
|
| + subninja = []
|
| + for line in f:
|
| + line = line.rstrip()
|
| + if not line:
|
| + continue
|
| +
|
| + if line[-1] == '$':
|
| + # The next line needs to be merged in.
|
| + merge_line += line[:-1]
|
| + continue
|
| +
|
| + if merge_line:
|
| + line = merge_line + line
|
| + merge_line = ''
|
| +
|
| + statement = line[:line.find(' ')]
|
| + if statement == 'build':
|
| + # Save the dependency list as a raw string. Only the lines needed will
|
| + # be processed with raw_build_to_deps(). This saves a good 70ms of
|
| + # processing time.
|
| + build_target, dependencies = line[6:].split(': ', 1)
|
| + # Interestingly, trying to be smart and only saving the build steps
|
| + # with the intended extensions ('', '.stamp', '.so') slows down
|
| + # parsing even if 90% of the build rules can be skipped.
|
| + # On Windows, a single step may generate two target, so split items
|
| + # accordingly. It has only been seen for .exe/.exe.pdb combos.
|
| + for i in build_target.strip().split():
|
| + build_steps[i] = dependencies
|
| + elif statement == 'subninja':
|
| + subninja.append(line[9:])
|
| + except IOError:
|
| + print >> sys.stderr, 'Failed to open %s' % ninja_path
|
| + raise
|
| +
|
| + total = 1
|
| + for rel_path in subninja:
|
| + try:
|
| + # Load each of the files referenced.
|
| + # TODO(maruel): Skip the files known to not be needed. It saves an aweful
|
| + # lot of processing time.
|
| + total += load_ninja_recursively(build_dir, rel_path, build_steps)
|
| + except IOError:
|
| + print >> sys.stderr, '... as referenced by %s' % ninja_path
|
| + raise
|
| + return total
|
| +
|
| +
|
| +def load_ninja(build_dir):
|
| + """Loads the tree of .ninja files in build_dir."""
|
| + build_steps = {}
|
| + total = load_ninja_recursively(build_dir, 'build.ninja', build_steps)
|
| + logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps))
|
| + return build_steps
|
| +
|
| +
|
| +def using_blacklist(item):
|
| + """Returns True if an item should be analyzed.
|
| +
|
| + Ignores many rules that are assumed to not depend on a dynamic library. If
|
| + the assumption doesn't hold true anymore for a file format, remove it from
|
| + this list. This is simply an optimization.
|
| + """
|
| + IGNORED = (
|
| + '.a', '.cc', '.css', '.def', '.h', '.html', '.js', '.json', '.manifest',
|
| + '.o', '.obj', '.pak', '.png', '.pdb', '.strings', '.txt',
|
| + )
|
| + # ninja files use native path format.
|
| + ext = os.path.splitext(item)[1]
|
| + if ext in IGNORED:
|
| + return False
|
| + # Special case Windows, keep .dll.lib but discard .lib.
|
| + if item.endswith('.dll.lib'):
|
| + return True
|
| + if ext == '.lib':
|
| + return False
|
| + return item not in ('', '|', '||')
|
| +
|
| +
|
| +def raw_build_to_deps(item):
|
| + """Converts a raw ninja build statement into the list of interesting
|
| + dependencies.
|
| + """
|
| + # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
|
| + # .dll.lib, .exe and empty.
|
| + # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
|
| + return filter(using_blacklist, item.split(' ')[1:])
|
| +
|
| +
|
| +def recurse(target, build_steps, rules_seen):
|
| + """Recursively returns all the interesting dependencies for root_item."""
|
| + out = []
|
| + if rules_seen is None:
|
| + rules_seen = set()
|
| + if target in rules_seen:
|
| + # TODO(maruel): Figure out how it happens.
|
| + logging.warning('Circular dependency for %s!', target)
|
| + return []
|
| + rules_seen.add(target)
|
| + try:
|
| + dependencies = raw_build_to_deps(build_steps[target])
|
| + except KeyError:
|
| + logging.info('Failed to find a build step to generate: %s', target)
|
| + return []
|
| + logging.debug('recurse(%s) -> %s', target, dependencies)
|
| + for dependency in dependencies:
|
| + out.append(dependency)
|
| + dependency_raw_dependencies = build_steps.get(dependency)
|
| + if dependency_raw_dependencies:
|
| + for i in raw_build_to_deps(dependency_raw_dependencies):
|
| + out.extend(recurse(i, build_steps, rules_seen))
|
| + else:
|
| + logging.info('Failed to find a build step to generate: %s', dependency)
|
| + return out
|
| +
|
| +
|
| +def post_process_deps(build_dir, dependencies):
|
| + """Processes the dependency list with OS specific rules."""
|
| + def filter_item(i):
|
| + if i.endswith('.so.TOC'):
|
| + # Remove only the suffix .TOC, not the .so!
|
| + return i[:-4]
|
| + if i.endswith('.dylib.TOC'):
|
| + # Remove only the suffix .TOC, not the .dylib!
|
| + return i[:-4]
|
| + if i.endswith('.dll.lib'):
|
| + # Remove only the suffix .lib, not the .dll!
|
| + return i[:-4]
|
| + return i
|
| +
|
| + # Check for execute access. This gets rid of all the phony rules.
|
| + return [
|
| + i for i in map(filter_item, dependencies)
|
| + if os.access(os.path.join(build_dir, i), os.X_OK)
|
| + ]
|
|
|
|
|
| def create_wrapper(args, isolate_index, isolated_index):
|
| @@ -69,9 +218,8 @@
|
| # primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
|
| # complain to maruel@.
|
| target = isolate[:-len('.isolate')] + '_run'
|
| - build_steps = ninja_parser.load_ninja(build_dir)
|
| - binary_deps = ninja_parser.post_process_deps(
|
| - build_dir, ninja_parser.recurse(build_dir, target, build_steps))
|
| + build_steps = load_ninja(build_dir)
|
| + binary_deps = post_process_deps(build_dir, recurse(target, build_steps, None))
|
| logging.debug(
|
| 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps))
|
|
|
| @@ -107,16 +255,20 @@
|
| args = sys.argv[1:]
|
| isolate = None
|
| isolated = None
|
| + is_component = False
|
| for i, arg in enumerate(args):
|
| if arg == '--isolate':
|
| isolate = i + 1
|
| if arg == '--isolated':
|
| isolated = i + 1
|
| + if arg == 'component=shared_library':
|
| + is_component = True
|
| if isolate is None or isolated is None:
|
| print >> sys.stderr, 'Internal failure'
|
| return 1
|
|
|
| - create_wrapper(args, isolate, isolated)
|
| + if is_component:
|
| + create_wrapper(args, isolate, isolated)
|
|
|
| swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client')
|
| sys.stdout.flush()
|
|
|