Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 #!/usr/bin/env python | |
|
Michael Achenbach
2015/09/25 10:26:26
Bluntly copied from chromium/src/tools. crbug.com/
M-A Ruel
2015/09/25 12:50:11
I don't think you need this script as you don't ha
Michael Achenbach
2015/09/25 13:21:35
Right, thanks for the hint. I removed the file aga
| |
| 2 # Copyright 2015 the V8 project authors. All rights reserved. | |
| 3 # Copyright 2014 The Chromium Authors. All rights reserved. | |
| 4 # Use of this source code is governed by a BSD-style license that can be | |
| 5 # found in the LICENSE file. | |
| 6 | |
| 7 """Adaptor script called through build/isolate.gypi. | |
| 8 | |
| 9 Creates a wrapping .isolate which 'includes' the original one, that can be | |
| 10 consumed by tools/swarming_client/isolate.py. Path variables are determined | |
| 11 based on the current working directory. The relative_cwd in the .isolated file | |
| 12 is determined based on the .isolate file that declare the 'command' variable to | |
| 13 be used so the wrapping .isolate doesn't affect this value. | |
| 14 | |
| 15 This script loads build.ninja and processes it to determine all the executables | |
| 16 referenced by the isolated target. It adds them in the wrapping .isolate file. | |
| 17 | |
| 18 WARNING: The target to use for build.ninja analysis is the base name of the | |
| 19 .isolate file plus '_run'. For example, 'foo_test.isolate' would have the target | |
| 20 'foo_test_run' analysed. | |
| 21 """ | |
| 22 | |
| 23 import glob | |
| 24 import json | |
| 25 import logging | |
| 26 import os | |
| 27 import posixpath | |
| 28 import StringIO | |
| 29 import subprocess | |
| 30 import sys | |
| 31 import time | |
| 32 | |
| 33 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__)) | |
| 34 SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client') | |
| 35 SRC_DIR = os.path.dirname(TOOLS_DIR) | |
| 36 | |
| 37 sys.path.insert(0, SWARMING_CLIENT_DIR) | |
| 38 | |
| 39 import isolate_format | |
| 40 | |
| 41 | |
| 42 def load_ninja_recursively(build_dir, ninja_path, build_steps): | |
| 43 """Crudely extracts all the subninja and build referenced in ninja_path. | |
| 44 | |
| 45 In particular, it ignores rule and variable declarations. The goal is to be | |
| 46 performant (well, as much as python can be performant) which is currently in | |
| 47 the <200ms range for a complete chromium tree. As such the code is laid out | |
| 48 for performance instead of readability. | |
| 49 """ | |
| 50 logging.debug('Loading %s', ninja_path) | |
| 51 try: | |
| 52 with open(os.path.join(build_dir, ninja_path), 'rb') as f: | |
| 53 line = None | |
| 54 merge_line = '' | |
| 55 subninja = [] | |
| 56 for line in f: | |
| 57 line = line.rstrip() | |
| 58 if not line: | |
| 59 continue | |
| 60 | |
| 61 if line[-1] == '$': | |
| 62 # The next line needs to be merged in. | |
| 63 merge_line += line[:-1] | |
| 64 continue | |
| 65 | |
| 66 if merge_line: | |
| 67 line = merge_line + line | |
| 68 merge_line = '' | |
| 69 | |
| 70 statement = line[:line.find(' ')] | |
| 71 if statement == 'build': | |
| 72 # Save the dependency list as a raw string. Only the lines needed will | |
| 73 # be processed with raw_build_to_deps(). This saves a good 70ms of | |
| 74 # processing time. | |
| 75 build_target, dependencies = line[6:].split(': ', 1) | |
| 76 # Interestingly, trying to be smart and only saving the build steps | |
| 77 # with the intended extensions ('', '.stamp', '.so') slows down | |
| 78 # parsing even if 90% of the build rules can be skipped. | |
| 79 # On Windows, a single step may generate two target, so split items | |
| 80 # accordingly. It has only been seen for .exe/.exe.pdb combos. | |
| 81 for i in build_target.strip().split(): | |
| 82 build_steps[i] = dependencies | |
| 83 elif statement == 'subninja': | |
| 84 subninja.append(line[9:]) | |
| 85 except IOError: | |
| 86 print >> sys.stderr, 'Failed to open %s' % ninja_path | |
| 87 raise | |
| 88 | |
| 89 total = 1 | |
| 90 for rel_path in subninja: | |
| 91 try: | |
| 92 # Load each of the files referenced. | |
| 93 # TODO(maruel): Skip the files known to not be needed. It saves an aweful | |
| 94 # lot of processing time. | |
| 95 total += load_ninja_recursively(build_dir, rel_path, build_steps) | |
| 96 except IOError: | |
| 97 print >> sys.stderr, '... as referenced by %s' % ninja_path | |
| 98 raise | |
| 99 return total | |
| 100 | |
| 101 | |
| 102 def load_ninja(build_dir): | |
| 103 """Loads the tree of .ninja files in build_dir.""" | |
| 104 build_steps = {} | |
| 105 total = load_ninja_recursively(build_dir, 'build.ninja', build_steps) | |
| 106 logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps)) | |
| 107 return build_steps | |
| 108 | |
| 109 | |
| 110 def using_blacklist(item): | |
| 111 """Returns True if an item should be analyzed. | |
| 112 | |
| 113 Ignores many rules that are assumed to not depend on a dynamic library. If | |
| 114 the assumption doesn't hold true anymore for a file format, remove it from | |
| 115 this list. This is simply an optimization. | |
| 116 """ | |
| 117 # *.json is ignored below, *.isolated.gen.json is an exception, it is produced | |
| 118 # by isolate_driver.py in 'test_isolation_mode==prepare'. | |
| 119 if item.endswith('.isolated.gen.json'): | |
| 120 return True | |
| 121 IGNORED = ( | |
| 122 '.a', '.cc', '.css', '.dat', '.def', '.frag', '.h', '.html', '.isolate', | |
| 123 '.js', '.json', '.manifest', '.o', '.obj', '.pak', '.png', '.pdb', '.py', | |
| 124 '.strings', '.test', '.txt', '.vert', | |
| 125 ) | |
| 126 # ninja files use native path format. | |
| 127 ext = os.path.splitext(item)[1] | |
| 128 if ext in IGNORED: | |
| 129 return False | |
| 130 # Special case Windows, keep .dll.lib but discard .lib. | |
| 131 if item.endswith('.dll.lib'): | |
| 132 return True | |
| 133 if ext == '.lib': | |
| 134 return False | |
| 135 return item not in ('', '|', '||') | |
| 136 | |
| 137 | |
| 138 def raw_build_to_deps(item): | |
| 139 """Converts a raw ninja build statement into the list of interesting | |
| 140 dependencies. | |
| 141 """ | |
| 142 # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC, | |
| 143 # .dll.lib, .exe and empty. | |
| 144 # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc. | |
| 145 return filter(using_blacklist, item.split(' ')[1:]) | |
| 146 | |
| 147 | |
| 148 def collect_deps(target, build_steps, dependencies_added, rules_seen): | |
| 149 """Recursively adds all the interesting dependencies for |target| | |
| 150 into |dependencies_added|. | |
| 151 """ | |
| 152 if rules_seen is None: | |
| 153 rules_seen = set() | |
| 154 if target in rules_seen: | |
| 155 # TODO(maruel): Figure out how it happens. | |
| 156 logging.warning('Circular dependency for %s!', target) | |
| 157 return | |
| 158 rules_seen.add(target) | |
| 159 try: | |
| 160 dependencies = raw_build_to_deps(build_steps[target]) | |
| 161 except KeyError: | |
| 162 logging.info('Failed to find a build step to generate: %s', target) | |
| 163 return | |
| 164 logging.debug('collect_deps(%s) -> %s', target, dependencies) | |
| 165 for dependency in dependencies: | |
| 166 dependencies_added.add(dependency) | |
| 167 collect_deps(dependency, build_steps, dependencies_added, rules_seen) | |
| 168 | |
| 169 | |
| 170 def post_process_deps(build_dir, dependencies): | |
| 171 """Processes the dependency list with OS specific rules.""" | |
| 172 def filter_item(i): | |
| 173 if i.endswith('.so.TOC'): | |
| 174 # Remove only the suffix .TOC, not the .so! | |
| 175 return i[:-4] | |
| 176 if i.endswith('.dylib.TOC'): | |
| 177 # Remove only the suffix .TOC, not the .dylib! | |
| 178 return i[:-4] | |
| 179 if i.endswith('.dll.lib'): | |
| 180 # Remove only the suffix .lib, not the .dll! | |
| 181 return i[:-4] | |
| 182 return i | |
| 183 | |
| 184 def is_exe(i): | |
| 185 # This script is only for adding new binaries that are created as part of | |
| 186 # the component build. | |
| 187 ext = os.path.splitext(i)[1] | |
| 188 # On POSIX, executables have no extension. | |
| 189 if ext not in ('', '.dll', '.dylib', '.exe', '.nexe', '.so'): | |
| 190 return False | |
| 191 if os.path.isabs(i): | |
| 192 # In some rare case, there's dependency set explicitly on files outside | |
| 193 # the checkout. | |
| 194 return False | |
| 195 | |
| 196 # Check for execute access and strip directories. This gets rid of all the | |
| 197 # phony rules. | |
| 198 p = os.path.join(build_dir, i) | |
| 199 return os.access(p, os.X_OK) and not os.path.isdir(p) | |
| 200 | |
| 201 return filter(is_exe, map(filter_item, dependencies)) | |
| 202 | |
| 203 | |
| 204 def create_wrapper(args, isolate_index, isolated_index): | |
| 205 """Creates a wrapper .isolate that add dynamic libs. | |
| 206 | |
| 207 The original .isolate is not modified. | |
| 208 """ | |
| 209 cwd = os.getcwd() | |
| 210 isolate = args[isolate_index] | |
| 211 # The code assumes the .isolate file is always specified path-less in cwd. Fix | |
| 212 # if this assumption doesn't hold true. | |
| 213 assert os.path.basename(isolate) == isolate, isolate | |
| 214 | |
| 215 # This will look like ../out/Debug. This is based against cwd. Note that this | |
| 216 # must equal the value provided as PRODUCT_DIR. | |
| 217 build_dir = os.path.dirname(args[isolated_index]) | |
| 218 | |
| 219 # This will look like chrome/unit_tests.isolate. It is based against SRC_DIR. | |
| 220 # It's used to calculate temp_isolate. | |
| 221 src_isolate = os.path.relpath(os.path.join(cwd, isolate), SRC_DIR) | |
| 222 | |
| 223 # The wrapping .isolate. This will look like | |
| 224 # ../out/Debug/gen/chrome/unit_tests.isolate. | |
| 225 temp_isolate = os.path.join(build_dir, 'gen', src_isolate) | |
| 226 temp_isolate_dir = os.path.dirname(temp_isolate) | |
| 227 | |
| 228 # Relative path between the new and old .isolate file. | |
| 229 isolate_relpath = os.path.relpath( | |
| 230 '.', temp_isolate_dir).replace(os.path.sep, '/') | |
| 231 | |
| 232 # It's a big assumption here that the name of the isolate file matches the | |
| 233 # primary target '_run'. Fix accordingly if this doesn't hold true, e.g. | |
| 234 # complain to maruel@. | |
| 235 target = isolate[:-len('.isolate')] + '_run' | |
| 236 build_steps = load_ninja(build_dir) | |
| 237 binary_deps = set() | |
| 238 collect_deps(target, build_steps, binary_deps, None) | |
| 239 binary_deps = post_process_deps(build_dir, binary_deps) | |
| 240 logging.debug( | |
| 241 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps)) | |
| 242 | |
| 243 # Now do actual wrapping .isolate. | |
| 244 isolate_dict = { | |
| 245 'includes': [ | |
| 246 posixpath.join(isolate_relpath, isolate), | |
| 247 ], | |
| 248 'variables': { | |
| 249 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so']. | |
| 250 'files': sorted( | |
| 251 '<(PRODUCT_DIR)/%s' % i.replace(os.path.sep, '/') | |
| 252 for i in binary_deps), | |
| 253 }, | |
| 254 } | |
| 255 if not os.path.isdir(temp_isolate_dir): | |
| 256 os.makedirs(temp_isolate_dir) | |
| 257 comment = ( | |
| 258 '# Warning: this file was AUTOGENERATED.\n' | |
| 259 '# DO NO EDIT.\n') | |
| 260 out = StringIO.StringIO() | |
| 261 isolate_format.print_all(comment, isolate_dict, out) | |
| 262 isolate_content = out.getvalue() | |
| 263 with open(temp_isolate, 'wb') as f: | |
| 264 f.write(isolate_content) | |
| 265 logging.info('Added %d dynamic libs', len(binary_deps)) | |
| 266 logging.debug('%s', isolate_content) | |
| 267 args[isolate_index] = temp_isolate | |
| 268 | |
| 269 | |
| 270 def prepare_isolate_call(args, output): | |
| 271 """Gathers all information required to run isolate.py later. | |
| 272 | |
| 273 Dumps it as JSON to |output| file. | |
| 274 """ | |
| 275 with open(output, 'wb') as f: | |
| 276 json.dump({ | |
| 277 'args': args, | |
| 278 'dir': os.getcwd(), | |
| 279 'version': 1, | |
| 280 }, f, indent=2, sort_keys=True) | |
| 281 | |
| 282 | |
| 283 def main(): | |
| 284 logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s') | |
| 285 args = sys.argv[1:] | |
| 286 mode = args[0] if args else None | |
| 287 isolate = None | |
| 288 isolated = None | |
| 289 for i, arg in enumerate(args): | |
| 290 if arg == '--isolate': | |
| 291 isolate = i + 1 | |
| 292 if arg == '--isolated': | |
| 293 isolated = i + 1 | |
| 294 if isolate is None or isolated is None or not mode: | |
| 295 print >> sys.stderr, 'Internal failure' | |
| 296 return 1 | |
| 297 | |
| 298 create_wrapper(args, isolate, isolated) | |
| 299 | |
| 300 # In 'prepare' mode just collect all required information for postponed | |
| 301 # isolated.py invocation later, store it in *.isolated.gen.json file. | |
| 302 if mode == 'prepare': | |
| 303 prepare_isolate_call(args[1:], args[isolated] + '.gen.json') | |
| 304 return 0 | |
| 305 | |
| 306 swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client') | |
| 307 sys.stdout.flush() | |
| 308 result = subprocess.call( | |
| 309 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args) | |
| 310 return result | |
| 311 | |
| 312 | |
| 313 if __name__ == '__main__': | |
| 314 sys.exit(main()) | |
| OLD | NEW |