Index: trunk/src/tools/isolate_driver.py |
=================================================================== |
--- trunk/src/tools/isolate_driver.py (revision 263069) |
+++ trunk/src/tools/isolate_driver.py (working copy) |
@@ -8,16 +8,16 @@ |
Creates a wrapping .isolate which 'includes' the original one, that can be |
consumed by tools/swarming_client/isolate.py. Path variables are determined |
based on the current working directory. The relative_cwd in the .isolated file |
-is determined based on the .isolate file that declare the 'command' variable to |
-be used so the wrapping .isolate doesn't affect this value. |
+is determined based on *the .isolate file that declare the 'command' variable to |
+be used* so the wrapping .isolate doesn't affect this value. |
-This script loads build.ninja and processes it to determine all the executables |
-referenced by the isolated target. It adds them in the wrapping .isolate file. |
+It packages all the dynamic libraries found in this wrapping .isolate. This is |
+inefficient and non-deterministic. In the very near future, it will parse |
+build.ninja, find back the root target and find all the dynamic libraries that |
+are marked as a dependency to this target. |
""" |
-import StringIO |
import glob |
-import logging |
import os |
import posixpath |
import subprocess |
@@ -33,151 +33,56 @@ |
import isolate_format |
-def load_ninja_recursively(build_dir, ninja_path, build_steps): |
- """Crudely extracts all the subninja and build referenced in ninja_path. |
- In particular, it ignores rule and variable declarations. The goal is to be |
- performant (well, as much as python can be performant) which is currently in |
- the <200ms range for a complete chromium tree. As such the code is laid out |
- for performance instead of readability. |
- """ |
- logging.debug('Loading %s', ninja_path) |
- try: |
- with open(os.path.join(build_dir, ninja_path), 'rb') as f: |
- line = None |
- merge_line = '' |
- subninja = [] |
- for line in f: |
- line = line.rstrip() |
- if not line: |
- continue |
+# Location to grab binaries based on the OS. |
+DYNAMIC_LIBRARIES = { |
+ 'darwin': '*.dylib', |
+ 'linux2': 'lib/*.so', |
+ 'win32': '*.dll', |
+} |
- if line[-1] == '$': |
- # The next line needs to be merged in. |
- merge_line += line[:-1] |
- continue |
- if merge_line: |
- line = merge_line + line |
- merge_line = '' |
+def get_dynamic_libs(build_dir): |
+ """Finds all the dynamic libs to map. |
- statement = line[:line.find(' ')] |
- if statement == 'build': |
- # Save the dependency list as a raw string. Only the lines needed will |
- # be processed with raw_build_to_deps(). This saves a good 70ms of |
- # processing time. |
- build_target, dependencies = line[6:].split(': ', 1) |
- # Interestingly, trying to be smart and only saving the build steps |
- # with the intended extensions ('', '.stamp', '.so') slows down |
- # parsing even if 90% of the build rules can be skipped. |
- # On Windows, a single step may generate two target, so split items |
- # accordingly. It has only been seen for .exe/.exe.pdb combos. |
- for i in build_target.strip().split(): |
- build_steps[i] = dependencies |
- elif statement == 'subninja': |
- subninja.append(line[9:]) |
- except IOError: |
- print >> sys.stderr, 'Failed to open %s' % ninja_path |
- raise |
- |
- total = 1 |
- for rel_path in subninja: |
+ Returns: |
+ list of relative path, e.g. [../out/Debug/lib/libuser_prefs.so]. |
+ """ |
+ items = set() |
+ root = os.path.join(build_dir, DYNAMIC_LIBRARIES[sys.platform]) |
+ for i in glob.iglob(root): |
try: |
- # Load each of the files referenced. |
- # TODO(maruel): Skip the files known to not be needed. It saves an aweful |
- # lot of processing time. |
- total += load_ninja_recursively(build_dir, rel_path, build_steps) |
+ # Will throw on Windows if another process is writing to this file. |
+ open(i).close() |
+ items.add((i, os.stat(i).st_size)) |
except IOError: |
- print >> sys.stderr, '... as referenced by %s' % ninja_path |
- raise |
- return total |
+ continue |
+ # The following sleep value was carefully selected via random guessing. The |
+ # goal is to detect files that are being linked by checking their file size |
+ # after a small delay. |
+ # |
+ # This happens as other build targets can be built simultaneously. For |
+ # example, base_unittests.isolated is being processed but dynamic libraries |
+ # for chrome are currently being linked. |
+ # |
+ # TODO(maruel): Obviously, this must go away and be replaced with a proper |
+ # ninja parser but we need something now. http://crbug.com/333473 |
+ time.sleep(10) |
-def load_ninja(build_dir): |
- """Loads the tree of .ninja files in build_dir.""" |
- build_steps = {} |
- total = load_ninja_recursively(build_dir, 'build.ninja', build_steps) |
- logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps)) |
- return build_steps |
+ for item in sorted(items): |
+ file_name, file_size = item |
+ try: |
+ open(file_name).close() |
+ if os.stat(file_name).st_size != file_size: |
+ items.remove(item) |
+ except IOError: |
+ items.remove(item) |
+ continue |
+ return [i[0].replace(os.path.sep, '/') for i in items] |
-def using_blacklist(item): |
- """Returns True if an item should be analyzed. |
- Ignores many rules that are assumed to not depend on a dynamic library. If |
- the assumption doesn't hold true anymore for a file format, remove it from |
- this list. This is simply an optimization. |
- """ |
- IGNORED = ( |
- '.a', '.cc', '.css', '.def', '.h', '.html', '.js', '.json', '.manifest', |
- '.o', '.obj', '.pak', '.png', '.pdb', '.strings', '.txt', |
- ) |
- # ninja files use native path format. |
- ext = os.path.splitext(item)[1] |
- if ext in IGNORED: |
- return False |
- # Special case Windows, keep .dll.lib but discard .lib. |
- if item.endswith('.dll.lib'): |
- return True |
- if ext == '.lib': |
- return False |
- return item not in ('', '|', '||') |
- |
- |
-def raw_build_to_deps(item): |
- """Converts a raw ninja build statement into the list of interesting |
- dependencies. |
- """ |
- # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC, |
- # .dll.lib, .exe and empty. |
- # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc. |
- return filter(using_blacklist, item.split(' ')[1:]) |
- |
- |
-def recurse(target, build_steps, rules_seen): |
- """Recursively returns all the interesting dependencies for root_item.""" |
- out = [] |
- if rules_seen is None: |
- rules_seen = set() |
- if target in rules_seen: |
- # TODO(maruel): Figure out how it happens. |
- logging.warning('Circular dependency for %s!', target) |
- return [] |
- rules_seen.add(target) |
- try: |
- dependencies = raw_build_to_deps(build_steps[target]) |
- except KeyError: |
- logging.info('Failed to find a build step to generate: %s', target) |
- return [] |
- logging.debug('recurse(%s) -> %s', target, dependencies) |
- for dependency in dependencies: |
- out.append(dependency) |
- dependency_raw_dependencies = build_steps.get(dependency) |
- if dependency_raw_dependencies: |
- for i in raw_build_to_deps(dependency_raw_dependencies): |
- out.extend(recurse(i, build_steps, rules_seen)) |
- else: |
- logging.info('Failed to find a build step to generate: %s', dependency) |
- return out |
- |
- |
-def post_process_deps(dependencies): |
- """Processes the dependency list with OS specific rules.""" |
- def filter_item(i): |
- if i.endswith('.so.TOC'): |
- # Remove only the suffix .TOC, not the .so! |
- return i[:-4] |
- if i.endswith('.dylib.TOC'): |
- # Remove only the suffix .TOC, not the .dylib! |
- return i[:-4] |
- if i.endswith('.dll.lib'): |
- # Remove only the suffix .lib, not the .dll! |
- return i[:-4] |
- return i |
- |
- return map(filter_item, dependencies) |
- |
- |
def create_wrapper(args, isolate_index, isolated_index): |
"""Creates a wrapper .isolate that add dynamic libs. |
@@ -206,24 +111,19 @@ |
isolate_relpath = os.path.relpath( |
'.', temp_isolate_dir).replace(os.path.sep, '/') |
- # It's a big assumption here that the name of the isolate file matches the |
- # primary target. Fix accordingly if this doesn't hold true. |
- target = isolate[:-len('.isolate')] |
- build_steps = load_ninja(build_dir) |
- binary_deps = post_process_deps(recurse(target, build_steps, None)) |
- logging.debug( |
- 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps)) |
+ # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so']. |
+ rebased_libs = [ |
+ '<(PRODUCT_DIR)/%s' % i[len(build_dir)+1:] |
+ for i in get_dynamic_libs(build_dir) |
+ ] |
# Now do actual wrapping .isolate. |
- isolate_dict = { |
+ out = { |
'includes': [ |
posixpath.join(isolate_relpath, isolate), |
], |
'variables': { |
- # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so']. |
- isolate_format.KEY_TRACKED: sorted( |
- '<(PRODUCT_DIR)/%s' % i.replace(os.path.sep, '/') |
- for i in binary_deps), |
+ isolate_format.KEY_TRACKED: rebased_libs, |
}, |
} |
if not os.path.isdir(temp_isolate_dir): |
@@ -231,18 +131,14 @@ |
comment = ( |
'# Warning: this file was AUTOGENERATED.\n' |
'# DO NO EDIT.\n') |
- out = StringIO.StringIO() |
- isolate_format.print_all(comment, isolate_dict, out) |
- isolate_content = out.getvalue() |
with open(temp_isolate, 'wb') as f: |
- f.write(isolate_content) |
- logging.info('Added %d dynamic libs', len(binary_deps)) |
- logging.debug('%s', isolate_content) |
+ isolate_format.print_all(comment, out, f) |
+ if '--verbose' in args: |
+ print('Added %d dynamic libs' % len(dynamic_libs)) |
args[isolate_index] = temp_isolate |
def main(): |
- logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s') |
args = sys.argv[1:] |
isolate = None |
isolated = None |
@@ -258,7 +154,9 @@ |
print >> sys.stderr, 'Internal failure' |
return 1 |
- if is_component: |
+ # Implement a ninja parser. |
+ # http://crbug.com/360223 |
+ if is_component and False: |
create_wrapper(args, isolate, isolated) |
swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client') |