OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2015 the V8 project authors. All rights reserved. | 2 # Copyright 2015 the V8 project authors. All rights reserved. |
| 3 # Copyright 2014 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 4 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 5 # found in the LICENSE file. |
5 | 6 |
6 """Adaptor script called through build/isolate.gypi. | 7 """Adaptor script called through build/isolate.gypi. |
7 | 8 |
8 Slimmed down version of chromium's isolate driver that doesn't process dynamic | 9 Creates a wrapping .isolate which 'includes' the original one, that can be |
9 dependencies. | 10 consumed by tools/swarming_client/isolate.py. Path variables are determined |
| 11 based on the current working directory. The relative_cwd in the .isolated file |
| 12 is determined based on the .isolate file that declare the 'command' variable to |
| 13 be used so the wrapping .isolate doesn't affect this value. |
| 14 |
| 15 This script loads build.ninja and processes it to determine all the executables |
| 16 referenced by the isolated target. It adds them in the wrapping .isolate file. |
| 17 |
| 18 WARNING: The target to use for build.ninja analysis is the base name of the |
| 19 .isolate file plus '_run'. For example, 'foo_test.isolate' would have the target |
| 20 'foo_test_run' analysed. |
10 """ | 21 """ |
11 | 22 |
| 23 import errno |
| 24 import glob |
12 import json | 25 import json |
13 import logging | 26 import logging |
14 import os | 27 import os |
| 28 import posixpath |
| 29 import StringIO |
15 import subprocess | 30 import subprocess |
16 import sys | 31 import sys |
| 32 import time |
17 | 33 |
18 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__)) | 34 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__)) |
| 35 SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client') |
| 36 SRC_DIR = os.path.dirname(TOOLS_DIR) |
| 37 |
| 38 sys.path.insert(0, SWARMING_CLIENT_DIR) |
| 39 |
| 40 import isolate_format |
| 41 |
| 42 |
| 43 def load_ninja_recursively(build_dir, ninja_path, build_steps): |
| 44 """Crudely extracts all the subninja and build referenced in ninja_path. |
| 45 |
| 46 In particular, it ignores rule and variable declarations. The goal is to be |
| 47 performant (well, as much as python can be performant) which is currently in |
| 48 the <200ms range for a complete chromium tree. As such the code is laid out |
| 49 for performance instead of readability. |
| 50 """ |
| 51 logging.debug('Loading %s', ninja_path) |
| 52 try: |
| 53 with open(os.path.join(build_dir, ninja_path), 'rb') as f: |
| 54 line = None |
| 55 merge_line = '' |
| 56 subninja = [] |
| 57 for line in f: |
| 58 line = line.rstrip() |
| 59 if not line: |
| 60 continue |
| 61 |
| 62 if line[-1] == '$': |
| 63 # The next line needs to be merged in. |
| 64 merge_line += line[:-1] |
| 65 continue |
| 66 |
| 67 if merge_line: |
| 68 line = merge_line + line |
| 69 merge_line = '' |
| 70 |
| 71 statement = line[:line.find(' ')] |
| 72 if statement == 'build': |
| 73 # Save the dependency list as a raw string. Only the lines needed will |
| 74 # be processed with raw_build_to_deps(). This saves a good 70ms of |
| 75 # processing time. |
| 76 build_target, dependencies = line[6:].split(': ', 1) |
| 77 # Interestingly, trying to be smart and only saving the build steps |
| 78 # with the intended extensions ('', '.stamp', '.so') slows down |
| 79 # parsing even if 90% of the build rules can be skipped. |
| 80 # On Windows, a single step may generate two target, so split items |
| 81 # accordingly. It has only been seen for .exe/.exe.pdb combos. |
| 82 for i in build_target.strip().split(): |
| 83 build_steps[i] = dependencies |
| 84 elif statement == 'subninja': |
| 85 subninja.append(line[9:]) |
| 86 except IOError: |
| 87 print >> sys.stderr, 'Failed to open %s' % ninja_path |
| 88 raise |
| 89 |
| 90 total = 1 |
| 91 for rel_path in subninja: |
| 92 try: |
| 93 # Load each of the files referenced. |
| 94 # TODO(maruel): Skip the files known to not be needed. It saves an aweful |
| 95 # lot of processing time. |
| 96 total += load_ninja_recursively(build_dir, rel_path, build_steps) |
| 97 except IOError: |
| 98 print >> sys.stderr, '... as referenced by %s' % ninja_path |
| 99 raise |
| 100 return total |
| 101 |
| 102 |
| 103 def load_ninja(build_dir): |
| 104 """Loads the tree of .ninja files in build_dir.""" |
| 105 build_steps = {} |
| 106 total = load_ninja_recursively(build_dir, 'build.ninja', build_steps) |
| 107 logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps)) |
| 108 return build_steps |
| 109 |
| 110 |
| 111 def using_blacklist(item): |
| 112 """Returns True if an item should be analyzed. |
| 113 |
| 114 Ignores many rules that are assumed to not depend on a dynamic library. If |
| 115 the assumption doesn't hold true anymore for a file format, remove it from |
| 116 this list. This is simply an optimization. |
| 117 """ |
| 118 # *.json is ignored below, *.isolated.gen.json is an exception, it is produced |
| 119 # by isolate_driver.py in 'test_isolation_mode==prepare'. |
| 120 if item.endswith('.isolated.gen.json'): |
| 121 return True |
| 122 IGNORED = ( |
| 123 '.a', '.cc', '.css', '.dat', '.def', '.frag', '.h', '.html', '.isolate', |
| 124 '.js', '.json', '.manifest', '.o', '.obj', '.pak', '.png', '.pdb', '.py', |
| 125 '.strings', '.test', '.txt', '.vert', |
| 126 ) |
| 127 # ninja files use native path format. |
| 128 ext = os.path.splitext(item)[1] |
| 129 if ext in IGNORED: |
| 130 return False |
| 131 # Special case Windows, keep .dll.lib but discard .lib. |
| 132 if item.endswith('.dll.lib'): |
| 133 return True |
| 134 if ext == '.lib': |
| 135 return False |
| 136 return item not in ('', '|', '||') |
| 137 |
| 138 |
| 139 def raw_build_to_deps(item): |
| 140 """Converts a raw ninja build statement into the list of interesting |
| 141 dependencies. |
| 142 """ |
| 143 # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC, |
| 144 # .dll.lib, .exe and empty. |
| 145 # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc. |
| 146 return filter(using_blacklist, item.split(' ')[1:]) |
| 147 |
| 148 |
| 149 def collect_deps(target, build_steps, dependencies_added, rules_seen): |
| 150 """Recursively adds all the interesting dependencies for |target| |
| 151 into |dependencies_added|. |
| 152 """ |
| 153 if rules_seen is None: |
| 154 rules_seen = set() |
| 155 if target in rules_seen: |
| 156 # TODO(maruel): Figure out how it happens. |
| 157 logging.warning('Circular dependency for %s!', target) |
| 158 return |
| 159 rules_seen.add(target) |
| 160 try: |
| 161 dependencies = raw_build_to_deps(build_steps[target]) |
| 162 except KeyError: |
| 163 logging.info('Failed to find a build step to generate: %s', target) |
| 164 return |
| 165 logging.debug('collect_deps(%s) -> %s', target, dependencies) |
| 166 for dependency in dependencies: |
| 167 dependencies_added.add(dependency) |
| 168 collect_deps(dependency, build_steps, dependencies_added, rules_seen) |
| 169 |
| 170 |
| 171 def post_process_deps(build_dir, dependencies): |
| 172 """Processes the dependency list with OS specific rules.""" |
| 173 def filter_item(i): |
| 174 if i.endswith('.so.TOC'): |
| 175 # Remove only the suffix .TOC, not the .so! |
| 176 return i[:-4] |
| 177 if i.endswith('.dylib.TOC'): |
| 178 # Remove only the suffix .TOC, not the .dylib! |
| 179 return i[:-4] |
| 180 if i.endswith('.dll.lib'): |
| 181 # Remove only the suffix .lib, not the .dll! |
| 182 return i[:-4] |
| 183 return i |
| 184 |
| 185 def is_exe(i): |
| 186 # This script is only for adding new binaries that are created as part of |
| 187 # the component build. |
| 188 ext = os.path.splitext(i)[1] |
| 189 # On POSIX, executables have no extension. |
| 190 if ext not in ('', '.dll', '.dylib', '.exe', '.nexe', '.so'): |
| 191 return False |
| 192 if os.path.isabs(i): |
| 193 # In some rare case, there's dependency set explicitly on files outside |
| 194 # the checkout. |
| 195 return False |
| 196 |
| 197 # Check for execute access and strip directories. This gets rid of all the |
| 198 # phony rules. |
| 199 p = os.path.join(build_dir, i) |
| 200 return os.access(p, os.X_OK) and not os.path.isdir(p) |
| 201 |
| 202 return filter(is_exe, map(filter_item, dependencies)) |
| 203 |
| 204 |
| 205 def create_wrapper(args, isolate_index, isolated_index): |
| 206 """Creates a wrapper .isolate that add dynamic libs. |
| 207 |
| 208 The original .isolate is not modified. |
| 209 """ |
| 210 cwd = os.getcwd() |
| 211 isolate = args[isolate_index] |
| 212 # The code assumes the .isolate file is always specified path-less in cwd. Fix |
| 213 # if this assumption doesn't hold true. |
| 214 assert os.path.basename(isolate) == isolate, isolate |
| 215 |
| 216 # This will look like ../out/Debug. This is based against cwd. Note that this |
| 217 # must equal the value provided as PRODUCT_DIR. |
| 218 build_dir = os.path.dirname(args[isolated_index]) |
| 219 |
| 220 # This will look like chrome/unit_tests.isolate. It is based against SRC_DIR. |
| 221 # It's used to calculate temp_isolate. |
| 222 src_isolate = os.path.relpath(os.path.join(cwd, isolate), SRC_DIR) |
| 223 |
| 224 # The wrapping .isolate. This will look like |
| 225 # ../out/Debug/gen/chrome/unit_tests.isolate. |
| 226 temp_isolate = os.path.join(build_dir, 'gen', src_isolate) |
| 227 temp_isolate_dir = os.path.dirname(temp_isolate) |
| 228 |
| 229 # Relative path between the new and old .isolate file. |
| 230 isolate_relpath = os.path.relpath( |
| 231 '.', temp_isolate_dir).replace(os.path.sep, '/') |
| 232 |
| 233 # It's a big assumption here that the name of the isolate file matches the |
| 234 # primary target '_run'. Fix accordingly if this doesn't hold true, e.g. |
| 235 # complain to maruel@. |
| 236 target = isolate[:-len('.isolate')] + '_run' |
| 237 build_steps = load_ninja(build_dir) |
| 238 binary_deps = set() |
| 239 collect_deps(target, build_steps, binary_deps, None) |
| 240 binary_deps = post_process_deps(build_dir, binary_deps) |
| 241 logging.debug( |
| 242 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps)) |
| 243 |
| 244 # Now do actual wrapping .isolate. |
| 245 isolate_dict = { |
| 246 'includes': [ |
| 247 posixpath.join(isolate_relpath, isolate), |
| 248 ], |
| 249 'variables': { |
| 250 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so']. |
| 251 'files': sorted( |
| 252 '<(PRODUCT_DIR)/%s' % i.replace(os.path.sep, '/') |
| 253 for i in binary_deps), |
| 254 }, |
| 255 } |
| 256 # Some .isolate files have the same temp directory and the build system may |
| 257 # run this script in parallel so make directories safely here. |
| 258 try: |
| 259 os.makedirs(temp_isolate_dir) |
| 260 except OSError as e: |
| 261 if e.errno != errno.EEXIST: |
| 262 raise |
| 263 comment = ( |
| 264 '# Warning: this file was AUTOGENERATED.\n' |
| 265 '# DO NO EDIT.\n') |
| 266 out = StringIO.StringIO() |
| 267 isolate_format.print_all(comment, isolate_dict, out) |
| 268 isolate_content = out.getvalue() |
| 269 with open(temp_isolate, 'wb') as f: |
| 270 f.write(isolate_content) |
| 271 logging.info('Added %d dynamic libs', len(binary_deps)) |
| 272 logging.debug('%s', isolate_content) |
| 273 args[isolate_index] = temp_isolate |
19 | 274 |
20 | 275 |
21 def prepare_isolate_call(args, output): | 276 def prepare_isolate_call(args, output): |
22 """Gathers all information required to run isolate.py later. | 277 """Gathers all information required to run isolate.py later. |
23 | 278 |
24 Dumps it as JSON to |output| file. | 279 Dumps it as JSON to |output| file. |
25 """ | 280 """ |
26 with open(output, 'wb') as f: | 281 with open(output, 'wb') as f: |
27 json.dump({ | 282 json.dump({ |
28 'args': args, | 283 'args': args, |
29 'dir': os.getcwd(), | 284 'dir': os.getcwd(), |
30 'version': 1, | 285 'version': 1, |
31 }, f, indent=2, sort_keys=True) | 286 }, f, indent=2, sort_keys=True) |
32 | 287 |
| 288 |
33 def rebase_directories(args, abs_base): | 289 def rebase_directories(args, abs_base): |
34 """Rebases all paths to be relative to abs_base.""" | 290 """Rebases all paths to be relative to abs_base.""" |
35 def replace(index): | 291 def replace(index): |
36 args[index] = os.path.relpath(os.path.abspath(args[index]), abs_base) | 292 args[index] = os.path.relpath(os.path.abspath(args[index]), abs_base) |
37 for i, arg in enumerate(args): | 293 for i, arg in enumerate(args): |
38 if arg in ['--isolate', '--isolated']: | 294 if arg in ['--isolate', '--isolated']: |
39 replace(i + 1) | 295 replace(i + 1) |
40 if arg == '--path-variable': | 296 if arg == '--path-variable': |
41 # Path variables have a triple form: --path-variable NAME <path>. | 297 # Path variables have a triple form: --path-variable NAME <path>. |
42 replace(i + 2) | 298 replace(i + 2) |
43 | 299 |
| 300 |
44 def main(): | 301 def main(): |
45 logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s') | 302 logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s') |
46 if len(sys.argv) < 2: | |
47 print >> sys.stderr, 'Internal failure; mode required' | |
48 return 1 | |
49 mode = sys.argv[1] | |
50 args = sys.argv[1:] | 303 args = sys.argv[1:] |
| 304 mode = args[0] if args else None |
51 isolate = None | 305 isolate = None |
52 isolated = None | 306 isolated = None |
53 for i, arg in enumerate(args): | 307 for i, arg in enumerate(args): |
54 if arg == '--isolate': | 308 if arg == '--isolate': |
55 isolate = i + 1 | 309 isolate = i + 1 |
56 if arg == '--isolated': | 310 if arg == '--isolated': |
57 isolated = i + 1 | 311 isolated = i + 1 |
58 if not isolate or not isolated: | 312 if isolate is None or isolated is None or not mode: |
59 print >> sys.stderr, 'Internal failure' | 313 print >> sys.stderr, 'Internal failure' |
60 return 1 | 314 return 1 |
61 | 315 |
62 # Make sure all paths are relative to the isolate file. This is an | 316 # Make sure all paths are relative to the isolate file. This is an |
63 # expectation of the go binaries. In gn, this script is not called | 317 # expectation of the go binaries. In gn, this script is not called |
64 # relative to the isolate file, but relative to the product dir. | 318 # relative to the isolate file, but relative to the product dir. |
65 new_base = os.path.abspath(os.path.dirname(args[isolate])) | 319 new_base = os.path.abspath(os.path.dirname(args[isolate])) |
66 rebase_directories(args, new_base) | 320 rebase_directories(args, new_base) |
67 assert args[isolate] == os.path.basename(args[isolate]) | 321 assert args[isolate] == os.path.basename(args[isolate]) |
68 os.chdir(new_base) | 322 os.chdir(new_base) |
69 | 323 |
| 324 create_wrapper(args, isolate, isolated) |
| 325 |
70 # In 'prepare' mode just collect all required information for postponed | 326 # In 'prepare' mode just collect all required information for postponed |
71 # isolated.py invocation later, store it in *.isolated.gen.json file. | 327 # isolated.py invocation later, store it in *.isolated.gen.json file. |
72 if mode == 'prepare': | 328 if mode == 'prepare': |
73 prepare_isolate_call(args[1:], args[isolated] + '.gen.json') | 329 prepare_isolate_call(args[1:], args[isolated] + '.gen.json') |
74 return 0 | 330 return 0 |
75 | 331 |
76 swarming_client = os.path.join(TOOLS_DIR, 'swarming_client') | 332 swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client') |
77 sys.stdout.flush() | 333 sys.stdout.flush() |
78 return subprocess.call( | 334 result = subprocess.call( |
79 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args) | 335 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args) |
| 336 return result |
80 | 337 |
81 | 338 |
82 if __name__ == '__main__': | 339 if __name__ == '__main__': |
83 sys.exit(main()) | 340 sys.exit(main()) |
OLD | NEW |