OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Adaptor script called through build/isolate.gypi. | 6 """Adaptor script called through build/isolate.gypi. |
7 | 7 |
8 Creates a wrapping .isolate which 'includes' the original one, that can be | 8 Creates a wrapping .isolate which 'includes' the original one, that can be |
9 consumed by tools/swarming_client/isolate.py. Path variables are determined | 9 consumed by tools/swarming_client/isolate.py. Path variables are determined |
10 based on the current working directory. The relative_cwd in the .isolated file | 10 based on the current working directory. The relative_cwd in the .isolated file |
11 is determined based on the .isolate file that declare the 'command' variable to | 11 is determined based on *the .isolate file that declare the 'command' variable to |
12 be used so the wrapping .isolate doesn't affect this value. | 12 be used* so the wrapping .isolate doesn't affect this value. |
13 | 13 |
14 This script loads build.ninja and processes it to determine all the executables | 14 It packages all the dynamic libraries found in this wrapping .isolate. This is |
15 referenced by the isolated target. It adds them in the wrapping .isolate file. | 15 inefficient and non-deterministic. In the very near future, it will parse |
| 16 build.ninja, find back the root target and find all the dynamic libraries that |
| 17 are marked as a dependency to this target. |
16 """ | 18 """ |
17 | 19 |
18 import StringIO | |
19 import glob | 20 import glob |
20 import logging | |
21 import os | 21 import os |
22 import posixpath | 22 import posixpath |
23 import subprocess | 23 import subprocess |
24 import sys | 24 import sys |
25 import time | 25 import time |
26 | 26 |
27 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__)) | 27 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__)) |
28 SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client') | 28 SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client') |
29 SRC_DIR = os.path.dirname(TOOLS_DIR) | 29 SRC_DIR = os.path.dirname(TOOLS_DIR) |
30 | 30 |
31 sys.path.insert(0, SWARMING_CLIENT_DIR) | 31 sys.path.insert(0, SWARMING_CLIENT_DIR) |
32 | 32 |
33 import isolate_format | 33 import isolate_format |
34 | 34 |
35 | 35 |
36 def load_ninja_recursively(build_dir, ninja_path, build_steps): | |
37 """Crudely extracts all the subninja and build referenced in ninja_path. | |
38 | 36 |
39 In particular, it ignores rule and variable declarations. The goal is to be | 37 # Location to grab binaries based on the OS. |
40 performant (well, as much as python can be performant) which is currently in | 38 DYNAMIC_LIBRARIES = { |
41 the <200ms range for a complete chromium tree. As such the code is laid out | 39 'darwin': '*.dylib', |
42 for performance instead of readability. | 40 'linux2': 'lib/*.so', |
43 """ | 41 'win32': '*.dll', |
44 logging.debug('Loading %s', ninja_path) | 42 } |
45 try: | |
46 with open(os.path.join(build_dir, ninja_path), 'rb') as f: | |
47 line = None | |
48 merge_line = '' | |
49 subninja = [] | |
50 for line in f: | |
51 line = line.rstrip() | |
52 if not line: | |
53 continue | |
54 | |
55 if line[-1] == '$': | |
56 # The next line needs to be merged in. | |
57 merge_line += line[:-1] | |
58 continue | |
59 | |
60 if merge_line: | |
61 line = merge_line + line | |
62 merge_line = '' | |
63 | |
64 statement = line[:line.find(' ')] | |
65 if statement == 'build': | |
66 # Save the dependency list as a raw string. Only the lines needed will | |
67 # be processed with raw_build_to_deps(). This saves a good 70ms of | |
68 # processing time. | |
69 build_target, dependencies = line[6:].split(': ', 1) | |
70 # Interestingly, trying to be smart and only saving the build steps | |
71 # with the intended extensions ('', '.stamp', '.so') slows down | |
72 # parsing even if 90% of the build rules can be skipped. | |
73 # On Windows, a single step may generate two target, so split items | |
74 # accordingly. It has only been seen for .exe/.exe.pdb combos. | |
75 for i in build_target.strip().split(): | |
76 build_steps[i] = dependencies | |
77 elif statement == 'subninja': | |
78 subninja.append(line[9:]) | |
79 except IOError: | |
80 print >> sys.stderr, 'Failed to open %s' % ninja_path | |
81 raise | |
82 | |
83 total = 1 | |
84 for rel_path in subninja: | |
85 try: | |
86 # Load each of the files referenced. | |
87 # TODO(maruel): Skip the files known to not be needed. It saves an aweful | |
88 # lot of processing time. | |
89 total += load_ninja_recursively(build_dir, rel_path, build_steps) | |
90 except IOError: | |
91 print >> sys.stderr, '... as referenced by %s' % ninja_path | |
92 raise | |
93 return total | |
94 | 43 |
95 | 44 |
96 def load_ninja(build_dir): | 45 def get_dynamic_libs(build_dir): |
97 """Loads the tree of .ninja files in build_dir.""" | 46 """Finds all the dynamic libs to map. |
98 build_steps = {} | |
99 total = load_ninja_recursively(build_dir, 'build.ninja', build_steps) | |
100 logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps)) | |
101 return build_steps | |
102 | 47 |
| 48 Returns: |
| 49 list of relative path, e.g. [../out/Debug/lib/libuser_prefs.so]. |
| 50 """ |
| 51 items = set() |
| 52 root = os.path.join(build_dir, DYNAMIC_LIBRARIES[sys.platform]) |
| 53 for i in glob.iglob(root): |
| 54 try: |
| 55 # Will throw on Windows if another process is writing to this file. |
| 56 open(i).close() |
| 57 items.add((i, os.stat(i).st_size)) |
| 58 except IOError: |
| 59 continue |
103 | 60 |
104 def using_blacklist(item): | 61 # The following sleep value was carefully selected via random guessing. The |
105 """Returns True if an item should be analyzed. | 62 # goal is to detect files that are being linked by checking their file size |
| 63 # after a small delay. |
| 64 # |
| 65 # This happens as other build targets can be built simultaneously. For |
| 66 # example, base_unittests.isolated is being processed but dynamic libraries |
| 67 # for chrome are currently being linked. |
| 68 # |
| 69 # TODO(maruel): Obviously, this must go away and be replaced with a proper |
| 70 # ninja parser but we need something now. http://crbug.com/333473 |
| 71 time.sleep(10) |
106 | 72 |
107 Ignores many rules that are assumed to not depend on a dynamic library. If | 73 for item in sorted(items): |
108 the assumption doesn't hold true anymore for a file format, remove it from | 74 file_name, file_size = item |
109 this list. This is simply an optimization. | 75 try: |
110 """ | 76 open(file_name).close() |
111 IGNORED = ( | 77 if os.stat(file_name).st_size != file_size: |
112 '.a', '.cc', '.css', '.def', '.h', '.html', '.js', '.json', '.manifest', | 78 items.remove(item) |
113 '.o', '.obj', '.pak', '.png', '.pdb', '.strings', '.txt', | 79 except IOError: |
114 ) | 80 items.remove(item) |
115 # ninja files use native path format. | 81 continue |
116 ext = os.path.splitext(item)[1] | |
117 if ext in IGNORED: | |
118 return False | |
119 # Special case Windows, keep .dll.lib but discard .lib. | |
120 if item.endswith('.dll.lib'): | |
121 return True | |
122 if ext == '.lib': | |
123 return False | |
124 return item not in ('', '|', '||') | |
125 | 82 |
126 | 83 return [i[0].replace(os.path.sep, '/') for i in items] |
127 def raw_build_to_deps(item): | |
128 """Converts a raw ninja build statement into the list of interesting | |
129 dependencies. | |
130 """ | |
131 # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC, | |
132 # .dll.lib, .exe and empty. | |
133 # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc. | |
134 return filter(using_blacklist, item.split(' ')[1:]) | |
135 | |
136 | |
137 def recurse(target, build_steps, rules_seen): | |
138 """Recursively returns all the interesting dependencies for root_item.""" | |
139 out = [] | |
140 if rules_seen is None: | |
141 rules_seen = set() | |
142 if target in rules_seen: | |
143 # TODO(maruel): Figure out how it happens. | |
144 logging.warning('Circular dependency for %s!', target) | |
145 return [] | |
146 rules_seen.add(target) | |
147 try: | |
148 dependencies = raw_build_to_deps(build_steps[target]) | |
149 except KeyError: | |
150 logging.info('Failed to find a build step to generate: %s', target) | |
151 return [] | |
152 logging.debug('recurse(%s) -> %s', target, dependencies) | |
153 for dependency in dependencies: | |
154 out.append(dependency) | |
155 dependency_raw_dependencies = build_steps.get(dependency) | |
156 if dependency_raw_dependencies: | |
157 for i in raw_build_to_deps(dependency_raw_dependencies): | |
158 out.extend(recurse(i, build_steps, rules_seen)) | |
159 else: | |
160 logging.info('Failed to find a build step to generate: %s', dependency) | |
161 return out | |
162 | |
163 | |
164 def post_process_deps(dependencies): | |
165 """Processes the dependency list with OS specific rules.""" | |
166 def filter_item(i): | |
167 if i.endswith('.so.TOC'): | |
168 # Remove only the suffix .TOC, not the .so! | |
169 return i[:-4] | |
170 if i.endswith('.dylib.TOC'): | |
171 # Remove only the suffix .TOC, not the .dylib! | |
172 return i[:-4] | |
173 if i.endswith('.dll.lib'): | |
174 # Remove only the suffix .lib, not the .dll! | |
175 return i[:-4] | |
176 return i | |
177 | |
178 return map(filter_item, dependencies) | |
179 | 84 |
180 | 85 |
181 def create_wrapper(args, isolate_index, isolated_index): | 86 def create_wrapper(args, isolate_index, isolated_index): |
182 """Creates a wrapper .isolate that add dynamic libs. | 87 """Creates a wrapper .isolate that add dynamic libs. |
183 | 88 |
184 The original .isolate is not modified. | 89 The original .isolate is not modified. |
185 """ | 90 """ |
186 cwd = os.getcwd() | 91 cwd = os.getcwd() |
187 isolate = args[isolate_index] | 92 isolate = args[isolate_index] |
188 # The code assumes the .isolate file is always specified path-less in cwd. Fix | 93 # The code assumes the .isolate file is always specified path-less in cwd. Fix |
(...skipping 10 matching lines...) Expand all Loading... |
199 | 104 |
200 # The wrapping .isolate. This will look like | 105 # The wrapping .isolate. This will look like |
201 # ../out/Debug/gen/chrome/unit_tests.isolate. | 106 # ../out/Debug/gen/chrome/unit_tests.isolate. |
202 temp_isolate = os.path.join(build_dir, 'gen', src_isolate) | 107 temp_isolate = os.path.join(build_dir, 'gen', src_isolate) |
203 temp_isolate_dir = os.path.dirname(temp_isolate) | 108 temp_isolate_dir = os.path.dirname(temp_isolate) |
204 | 109 |
205 # Relative path between the new and old .isolate file. | 110 # Relative path between the new and old .isolate file. |
206 isolate_relpath = os.path.relpath( | 111 isolate_relpath = os.path.relpath( |
207 '.', temp_isolate_dir).replace(os.path.sep, '/') | 112 '.', temp_isolate_dir).replace(os.path.sep, '/') |
208 | 113 |
209 # It's a big assumption here that the name of the isolate file matches the | 114 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so']. |
210 # primary target. Fix accordingly if this doesn't hold true. | 115 rebased_libs = [ |
211 target = isolate[:-len('.isolate')] | 116 '<(PRODUCT_DIR)/%s' % i[len(build_dir)+1:] |
212 build_steps = load_ninja(build_dir) | 117 for i in get_dynamic_libs(build_dir) |
213 binary_deps = post_process_deps(recurse(target, build_steps, None)) | 118 ] |
214 logging.debug( | |
215 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps)) | |
216 | 119 |
217 # Now do actual wrapping .isolate. | 120 # Now do actual wrapping .isolate. |
218 isolate_dict = { | 121 out = { |
219 'includes': [ | 122 'includes': [ |
220 posixpath.join(isolate_relpath, isolate), | 123 posixpath.join(isolate_relpath, isolate), |
221 ], | 124 ], |
222 'variables': { | 125 'variables': { |
223 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so']. | 126 isolate_format.KEY_TRACKED: rebased_libs, |
224 isolate_format.KEY_TRACKED: sorted( | |
225 '<(PRODUCT_DIR)/%s' % i.replace(os.path.sep, '/') | |
226 for i in binary_deps), | |
227 }, | 127 }, |
228 } | 128 } |
229 if not os.path.isdir(temp_isolate_dir): | 129 if not os.path.isdir(temp_isolate_dir): |
230 os.makedirs(temp_isolate_dir) | 130 os.makedirs(temp_isolate_dir) |
231 comment = ( | 131 comment = ( |
232 '# Warning: this file was AUTOGENERATED.\n' | 132 '# Warning: this file was AUTOGENERATED.\n' |
233 '# DO NO EDIT.\n') | 133 '# DO NO EDIT.\n') |
234 out = StringIO.StringIO() | |
235 isolate_format.print_all(comment, isolate_dict, out) | |
236 isolate_content = out.getvalue() | |
237 with open(temp_isolate, 'wb') as f: | 134 with open(temp_isolate, 'wb') as f: |
238 f.write(isolate_content) | 135 isolate_format.print_all(comment, out, f) |
239 logging.info('Added %d dynamic libs', len(binary_deps)) | 136 if '--verbose' in args: |
240 logging.debug('%s', isolate_content) | 137 print('Added %d dynamic libs' % len(dynamic_libs)) |
241 args[isolate_index] = temp_isolate | 138 args[isolate_index] = temp_isolate |
242 | 139 |
243 | 140 |
244 def main(): | 141 def main(): |
245 logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s') | |
246 args = sys.argv[1:] | 142 args = sys.argv[1:] |
247 isolate = None | 143 isolate = None |
248 isolated = None | 144 isolated = None |
249 is_component = False | 145 is_component = False |
250 for i, arg in enumerate(args): | 146 for i, arg in enumerate(args): |
251 if arg == '--isolate': | 147 if arg == '--isolate': |
252 isolate = i + 1 | 148 isolate = i + 1 |
253 if arg == '--isolated': | 149 if arg == '--isolated': |
254 isolated = i + 1 | 150 isolated = i + 1 |
255 if arg == 'component=shared_library': | 151 if arg == 'component=shared_library': |
256 is_component = True | 152 is_component = True |
257 if isolate is None or isolated is None: | 153 if isolate is None or isolated is None: |
258 print >> sys.stderr, 'Internal failure' | 154 print >> sys.stderr, 'Internal failure' |
259 return 1 | 155 return 1 |
260 | 156 |
261 if is_component: | 157 # Implement a ninja parser. |
| 158 # http://crbug.com/360223 |
| 159 if is_component and False: |
262 create_wrapper(args, isolate, isolated) | 160 create_wrapper(args, isolate, isolated) |
263 | 161 |
264 swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client') | 162 swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client') |
265 sys.stdout.flush() | 163 sys.stdout.flush() |
266 result = subprocess.call( | 164 result = subprocess.call( |
267 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args) | 165 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args) |
268 return result | 166 return result |
269 | 167 |
270 | 168 |
271 if __name__ == '__main__': | 169 if __name__ == '__main__': |
272 sys.exit(main()) | 170 sys.exit(main()) |
OLD | NEW |