Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(380)

Side by Side Diff: tools/isolate_driver.py

Issue 2538493004: Remove isolate_driver.py. (Closed)
Patch Set: Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/OWNERS ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 """Adaptor script called through build/isolate.gypi.
7
8 Creates a wrapping .isolate which 'includes' the original one, that can be
9 consumed by tools/swarming_client/isolate.py. Path variables are determined
10 based on the current working directory. The relative_cwd in the .isolated file
11 is determined based on the .isolate file that declare the 'command' variable to
12 be used so the wrapping .isolate doesn't affect this value.
13
14 This script loads build.ninja and processes it to determine all the executables
15 referenced by the isolated target. It adds them in the wrapping .isolate file.
16
17 WARNING: The target to use for build.ninja analysis is the base name of the
18 .isolate file plus '_run'. For example, 'foo_test.isolate' would have the target
19 'foo_test_run' analysed.
20 """
21
22 import errno
23 import glob
24 import json
25 import logging
26 import os
27 import posixpath
28 import StringIO
29 import subprocess
30 import sys
31 import time
32
33 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
34 SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client')
35 SRC_DIR = os.path.dirname(TOOLS_DIR)
36
37 sys.path.insert(0, SWARMING_CLIENT_DIR)
38
39 import isolate_format
40
41
42 def load_ninja_recursively(build_dir, ninja_path, build_steps):
43 """Crudely extracts all the subninja and build referenced in ninja_path.
44
45 In particular, it ignores rule and variable declarations. The goal is to be
46 performant (well, as much as python can be performant) which is currently in
47 the <200ms range for a complete chromium tree. As such the code is laid out
48 for performance instead of readability.
49 """
50 logging.debug('Loading %s', ninja_path)
51 try:
52 with open(os.path.join(build_dir, ninja_path), 'rb') as f:
53 line = None
54 merge_line = ''
55 subninja = []
56 for line in f:
57 line = line.rstrip()
58 if not line:
59 continue
60
61 if line[-1] == '$':
62 # The next line needs to be merged in.
63 merge_line += line[:-1]
64 continue
65
66 if merge_line:
67 line = merge_line + line
68 merge_line = ''
69
70 statement = line[:line.find(' ')]
71 if statement == 'build':
72 # Save the dependency list as a raw string. Only the lines needed will
73 # be processed with raw_build_to_deps(). This saves a good 70ms of
74 # processing time.
75 build_target, dependencies = line[6:].split(': ', 1)
76 # Interestingly, trying to be smart and only saving the build steps
77 # with the intended extensions ('', '.stamp', '.so') slows down
78 # parsing even if 90% of the build rules can be skipped.
79 # On Windows, a single step may generate two target, so split items
80 # accordingly. It has only been seen for .exe/.exe.pdb combos.
81 for i in build_target.strip().split():
82 build_steps[i] = dependencies
83 elif statement == 'subninja':
84 subninja.append(line[9:])
85 except IOError:
86 print >> sys.stderr, 'Failed to open %s' % ninja_path
87 raise
88
89 total = 1
90 for rel_path in subninja:
91 try:
92 # Load each of the files referenced.
93 # TODO(maruel): Skip the files known to not be needed. It saves an aweful
94 # lot of processing time.
95 total += load_ninja_recursively(build_dir, rel_path, build_steps)
96 except IOError:
97 print >> sys.stderr, '... as referenced by %s' % ninja_path
98 raise
99 return total
100
101
102 def load_ninja(build_dir):
103 """Loads the tree of .ninja files in build_dir."""
104 build_steps = {}
105 total = load_ninja_recursively(build_dir, 'build.ninja', build_steps)
106 logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps))
107 return build_steps
108
109
110 def using_blacklist(item):
111 """Returns True if an item should be analyzed.
112
113 Ignores many rules that are assumed to not depend on a dynamic library. If
114 the assumption doesn't hold true anymore for a file format, remove it from
115 this list. This is simply an optimization.
116 """
117 # *.json is ignored below, *.isolated.gen.json is an exception, it is produced
118 # by isolate_driver.py in 'test_isolation_mode==prepare'.
119 if item.endswith('.isolated.gen.json'):
120 return True
121 IGNORED = (
122 '.a', '.cc', '.css', '.dat', '.def', '.frag', '.h', '.html', '.isolate',
123 '.js', '.json', '.manifest', '.o', '.obj', '.pak', '.png', '.pdb', '.py',
124 '.strings', '.test', '.txt', '.vert',
125 )
126 # ninja files use native path format.
127 ext = os.path.splitext(item)[1]
128 if ext in IGNORED:
129 return False
130 # Special case Windows, keep .dll.lib but discard .lib.
131 if item.endswith('.dll.lib'):
132 return True
133 if ext == '.lib':
134 return False
135 return item not in ('', '|', '||')
136
137
138 def raw_build_to_deps(item):
139 """Converts a raw ninja build statement into the list of interesting
140 dependencies.
141 """
142 # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
143 # .dll.lib, .exe and empty.
144 # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
145 return filter(using_blacklist, item.split(' ')[1:])
146
147
148 def collect_deps(target, build_steps, dependencies_added, rules_seen):
149 """Recursively adds all the interesting dependencies for |target|
150 into |dependencies_added|.
151 """
152 if rules_seen is None:
153 rules_seen = set()
154 if target in rules_seen:
155 # TODO(maruel): Figure out how it happens.
156 logging.warning('Circular dependency for %s!', target)
157 return
158 rules_seen.add(target)
159 try:
160 dependencies = raw_build_to_deps(build_steps[target])
161 except KeyError:
162 logging.info('Failed to find a build step to generate: %s', target)
163 return
164 logging.debug('collect_deps(%s) -> %s', target, dependencies)
165 for dependency in dependencies:
166 dependencies_added.add(dependency)
167 collect_deps(dependency, build_steps, dependencies_added, rules_seen)
168
169
170 def post_process_deps(build_dir, dependencies):
171 """Processes the dependency list with OS specific rules."""
172 def filter_item(i):
173 if i.endswith('.so.TOC'):
174 # Remove only the suffix .TOC, not the .so!
175 return i[:-4]
176 if i.endswith('.dylib.TOC'):
177 # Remove only the suffix .TOC, not the .dylib!
178 return i[:-4]
179 if i.endswith('.dll.lib'):
180 # Remove only the suffix .lib, not the .dll!
181 return i[:-4]
182 return i
183
184 def is_exe(i):
185 # This script is only for adding new binaries that are created as part of
186 # the component build.
187 ext = os.path.splitext(i)[1]
188 # On POSIX, executables have no extension.
189 if ext not in ('', '.dll', '.dylib', '.exe', '.nexe', '.so'):
190 return False
191 if os.path.isabs(i):
192 # In some rare case, there's dependency set explicitly on files outside
193 # the checkout.
194 return False
195
196 # Check for execute access and strip directories. This gets rid of all the
197 # phony rules.
198 p = os.path.join(build_dir, i)
199 return os.access(p, os.X_OK) and not os.path.isdir(p)
200
201 return filter(is_exe, map(filter_item, dependencies))
202
203
204 def create_wrapper(args, isolate_index, isolated_index):
205 """Creates a wrapper .isolate that add dynamic libs.
206
207 The original .isolate is not modified.
208 """
209 cwd = os.getcwd()
210 isolate = args[isolate_index]
211 # The code assumes the .isolate file is always specified path-less in cwd. Fix
212 # if this assumption doesn't hold true.
213 assert os.path.basename(isolate) == isolate, isolate
214
215 # This will look like ../out/Debug. This is based against cwd. Note that this
216 # must equal the value provided as PRODUCT_DIR.
217 build_dir = os.path.dirname(args[isolated_index])
218
219 # This will look like chrome/unit_tests.isolate. It is based against SRC_DIR.
220 # It's used to calculate temp_isolate.
221 src_isolate = os.path.relpath(os.path.join(cwd, isolate), SRC_DIR)
222
223 # The wrapping .isolate. This will look like
224 # ../out/Debug/gen/chrome/unit_tests.isolate.
225 temp_isolate = os.path.join(build_dir, 'gen', src_isolate)
226 temp_isolate_dir = os.path.dirname(temp_isolate)
227
228 # Relative path between the new and old .isolate file.
229 isolate_relpath = os.path.relpath(
230 '.', temp_isolate_dir).replace(os.path.sep, '/')
231
232 # It's a big assumption here that the name of the isolate file matches the
233 # primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
234 # complain to maruel@.
235 target = isolate[:-len('.isolate')] + '_run'
236 build_steps = load_ninja(build_dir)
237 binary_deps = set()
238 collect_deps(target, build_steps, binary_deps, None)
239 binary_deps = post_process_deps(build_dir, binary_deps)
240 logging.debug(
241 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps))
242
243 # Now do actual wrapping .isolate.
244 isolate_dict = {
245 'includes': [
246 posixpath.join(isolate_relpath, isolate),
247 ],
248 'variables': {
249 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so'].
250 'files': sorted(
251 '<(PRODUCT_DIR)/%s' % i.replace(os.path.sep, '/')
252 for i in binary_deps),
253 },
254 }
255 # Some .isolate files have the same temp directory and the build system may
256 # run this script in parallel so make directories safely here.
257 try:
258 os.makedirs(temp_isolate_dir)
259 except OSError as e:
260 if e.errno != errno.EEXIST:
261 raise
262 comment = (
263 '# Warning: this file was AUTOGENERATED.\n'
264 '# DO NO EDIT.\n')
265 out = StringIO.StringIO()
266 isolate_format.print_all(comment, isolate_dict, out)
267 isolate_content = out.getvalue()
268 with open(temp_isolate, 'wb') as f:
269 f.write(isolate_content)
270 logging.info('Added %d dynamic libs', len(binary_deps))
271 logging.debug('%s', isolate_content)
272 args[isolate_index] = temp_isolate
273
274
275 def prepare_isolate_call(args, output):
276 """Gathers all information required to run isolate.py later.
277
278 Dumps it as JSON to |output| file.
279 """
280 with open(output, 'wb') as f:
281 json.dump({
282 'args': args,
283 'dir': os.getcwd(),
284 'version': 1,
285 }, f, indent=2, sort_keys=True)
286
287
288 def main():
289 logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s')
290 args = sys.argv[1:]
291 mode = args[0] if args else None
292 isolate = None
293 isolated = None
294 for i, arg in enumerate(args):
295 if arg == '--isolate':
296 isolate = i + 1
297 if arg == '--isolated':
298 isolated = i + 1
299 if isolate is None or isolated is None or not mode:
300 print >> sys.stderr, 'Internal failure'
301 return 1
302
303 create_wrapper(args, isolate, isolated)
304
305 # In 'prepare' mode just collect all required information for postponed
306 # isolated.py invocation later, store it in *.isolated.gen.json file.
307 if mode == 'prepare':
308 prepare_isolate_call(args[1:], args[isolated] + '.gen.json')
309 return 0
310
311 swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client')
312 sys.stdout.flush()
313 result = subprocess.call(
314 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args)
315 return result
316
317
318 if __name__ == '__main__':
319 sys.exit(main())
OLDNEW
« no previous file with comments | « tools/OWNERS ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698