Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(988)

Side by Side Diff: tools/isolate_driver.py

Issue 272113002: Revert of Enable the ninja parsing code all the time. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « chrome/unit_tests.isolate ('k') | tools/ninja_parser.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved. 2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Adaptor script called through build/isolate.gypi. 6 """Adaptor script called through build/isolate.gypi.
7 7
8 Creates a wrapping .isolate which 'includes' the original one, that can be 8 Creates a wrapping .isolate which 'includes' the original one, that can be
9 consumed by tools/swarming_client/isolate.py. Path variables are determined 9 consumed by tools/swarming_client/isolate.py. Path variables are determined
10 based on the current working directory. The relative_cwd in the .isolated file 10 based on the current working directory. The relative_cwd in the .isolated file
11 is determined based on the .isolate file that declare the 'command' variable to 11 is determined based on the .isolate file that declare the 'command' variable to
12 be used so the wrapping .isolate doesn't affect this value. 12 be used so the wrapping .isolate doesn't affect this value.
13 13
14 This script loads build.ninja and processes it to determine all the executables 14 This script loads build.ninja and processes it to determine all the executables
15 referenced by the isolated target. It adds them in the wrapping .isolate file. 15 referenced by the isolated target. It adds them in the wrapping .isolate file.
16 16
17 WARNING: The target to use for build.ninja analysis is the base name of the 17 WARNING: The target to use for build.ninja analysis is the base name of the
18 .isolate file plus '_run'. For example, 'foo_test.isolate' would have the target 18 .isolate file plus '_run'. For example, 'foo_test.isolate' would have the target
19 'foo_test_run' analysed. 19 'foo_test_run' analysed.
20 """ 20 """
21 21
22 import StringIO 22 import StringIO
23 import glob
23 import logging 24 import logging
24 import os 25 import os
25 import posixpath 26 import posixpath
26 import subprocess 27 import subprocess
27 import sys 28 import sys
29 import time
28 30
29 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__)) 31 TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
30 SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client') 32 SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client')
31 SRC_DIR = os.path.dirname(TOOLS_DIR) 33 SRC_DIR = os.path.dirname(TOOLS_DIR)
32 34
33 import ninja_parser
34
35 sys.path.insert(0, SWARMING_CLIENT_DIR) 35 sys.path.insert(0, SWARMING_CLIENT_DIR)
36 36
37 import isolate_format 37 import isolate_format
38 38
39 39
40 def load_ninja_recursively(build_dir, ninja_path, build_steps):
41 """Crudely extracts all the subninja and build referenced in ninja_path.
42
43 In particular, it ignores rule and variable declarations. The goal is to be
44 performant (well, as much as python can be performant) which is currently in
45 the <200ms range for a complete chromium tree. As such the code is laid out
46 for performance instead of readability.
47 """
48 logging.debug('Loading %s', ninja_path)
49 try:
50 with open(os.path.join(build_dir, ninja_path), 'rb') as f:
51 line = None
52 merge_line = ''
53 subninja = []
54 for line in f:
55 line = line.rstrip()
56 if not line:
57 continue
58
59 if line[-1] == '$':
60 # The next line needs to be merged in.
61 merge_line += line[:-1]
62 continue
63
64 if merge_line:
65 line = merge_line + line
66 merge_line = ''
67
68 statement = line[:line.find(' ')]
69 if statement == 'build':
70 # Save the dependency list as a raw string. Only the lines needed will
71 # be processed with raw_build_to_deps(). This saves a good 70ms of
72 # processing time.
73 build_target, dependencies = line[6:].split(': ', 1)
74 # Interestingly, trying to be smart and only saving the build steps
75 # with the intended extensions ('', '.stamp', '.so') slows down
76 # parsing even if 90% of the build rules can be skipped.
77 # On Windows, a single step may generate two target, so split items
78 # accordingly. It has only been seen for .exe/.exe.pdb combos.
79 for i in build_target.strip().split():
80 build_steps[i] = dependencies
81 elif statement == 'subninja':
82 subninja.append(line[9:])
83 except IOError:
84 print >> sys.stderr, 'Failed to open %s' % ninja_path
85 raise
86
87 total = 1
88 for rel_path in subninja:
89 try:
90 # Load each of the files referenced.
91 # TODO(maruel): Skip the files known to not be needed. It saves an aweful
92 # lot of processing time.
93 total += load_ninja_recursively(build_dir, rel_path, build_steps)
94 except IOError:
95 print >> sys.stderr, '... as referenced by %s' % ninja_path
96 raise
97 return total
98
99
100 def load_ninja(build_dir):
101 """Loads the tree of .ninja files in build_dir."""
102 build_steps = {}
103 total = load_ninja_recursively(build_dir, 'build.ninja', build_steps)
104 logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps))
105 return build_steps
106
107
108 def using_blacklist(item):
109 """Returns True if an item should be analyzed.
110
111 Ignores many rules that are assumed to not depend on a dynamic library. If
112 the assumption doesn't hold true anymore for a file format, remove it from
113 this list. This is simply an optimization.
114 """
115 IGNORED = (
116 '.a', '.cc', '.css', '.def', '.h', '.html', '.js', '.json', '.manifest',
117 '.o', '.obj', '.pak', '.png', '.pdb', '.strings', '.txt',
118 )
119 # ninja files use native path format.
120 ext = os.path.splitext(item)[1]
121 if ext in IGNORED:
122 return False
123 # Special case Windows, keep .dll.lib but discard .lib.
124 if item.endswith('.dll.lib'):
125 return True
126 if ext == '.lib':
127 return False
128 return item not in ('', '|', '||')
129
130
131 def raw_build_to_deps(item):
132 """Converts a raw ninja build statement into the list of interesting
133 dependencies.
134 """
135 # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
136 # .dll.lib, .exe and empty.
137 # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
138 return filter(using_blacklist, item.split(' ')[1:])
139
140
141 def recurse(target, build_steps, rules_seen):
142 """Recursively returns all the interesting dependencies for root_item."""
143 out = []
144 if rules_seen is None:
145 rules_seen = set()
146 if target in rules_seen:
147 # TODO(maruel): Figure out how it happens.
148 logging.warning('Circular dependency for %s!', target)
149 return []
150 rules_seen.add(target)
151 try:
152 dependencies = raw_build_to_deps(build_steps[target])
153 except KeyError:
154 logging.info('Failed to find a build step to generate: %s', target)
155 return []
156 logging.debug('recurse(%s) -> %s', target, dependencies)
157 for dependency in dependencies:
158 out.append(dependency)
159 dependency_raw_dependencies = build_steps.get(dependency)
160 if dependency_raw_dependencies:
161 for i in raw_build_to_deps(dependency_raw_dependencies):
162 out.extend(recurse(i, build_steps, rules_seen))
163 else:
164 logging.info('Failed to find a build step to generate: %s', dependency)
165 return out
166
167
168 def post_process_deps(build_dir, dependencies):
169 """Processes the dependency list with OS specific rules."""
170 def filter_item(i):
171 if i.endswith('.so.TOC'):
172 # Remove only the suffix .TOC, not the .so!
173 return i[:-4]
174 if i.endswith('.dylib.TOC'):
175 # Remove only the suffix .TOC, not the .dylib!
176 return i[:-4]
177 if i.endswith('.dll.lib'):
178 # Remove only the suffix .lib, not the .dll!
179 return i[:-4]
180 return i
181
182 # Check for execute access. This gets rid of all the phony rules.
183 return [
184 i for i in map(filter_item, dependencies)
185 if os.access(os.path.join(build_dir, i), os.X_OK)
186 ]
187
188
40 def create_wrapper(args, isolate_index, isolated_index): 189 def create_wrapper(args, isolate_index, isolated_index):
41 """Creates a wrapper .isolate that add dynamic libs. 190 """Creates a wrapper .isolate that add dynamic libs.
42 191
43 The original .isolate is not modified. 192 The original .isolate is not modified.
44 """ 193 """
45 cwd = os.getcwd() 194 cwd = os.getcwd()
46 isolate = args[isolate_index] 195 isolate = args[isolate_index]
47 # The code assumes the .isolate file is always specified path-less in cwd. Fix 196 # The code assumes the .isolate file is always specified path-less in cwd. Fix
48 # if this assumption doesn't hold true. 197 # if this assumption doesn't hold true.
49 assert os.path.basename(isolate) == isolate, isolate 198 assert os.path.basename(isolate) == isolate, isolate
(...skipping 12 matching lines...) Expand all
62 temp_isolate_dir = os.path.dirname(temp_isolate) 211 temp_isolate_dir = os.path.dirname(temp_isolate)
63 212
64 # Relative path between the new and old .isolate file. 213 # Relative path between the new and old .isolate file.
65 isolate_relpath = os.path.relpath( 214 isolate_relpath = os.path.relpath(
66 '.', temp_isolate_dir).replace(os.path.sep, '/') 215 '.', temp_isolate_dir).replace(os.path.sep, '/')
67 216
68 # It's a big assumption here that the name of the isolate file matches the 217 # It's a big assumption here that the name of the isolate file matches the
69 # primary target '_run'. Fix accordingly if this doesn't hold true, e.g. 218 # primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
70 # complain to maruel@. 219 # complain to maruel@.
71 target = isolate[:-len('.isolate')] + '_run' 220 target = isolate[:-len('.isolate')] + '_run'
72 build_steps = ninja_parser.load_ninja(build_dir) 221 build_steps = load_ninja(build_dir)
73 binary_deps = ninja_parser.post_process_deps( 222 binary_deps = post_process_deps(build_dir, recurse(target, build_steps, None))
74 build_dir, ninja_parser.recurse(build_dir, target, build_steps))
75 logging.debug( 223 logging.debug(
76 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps)) 224 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps))
77 225
78 # Now do actual wrapping .isolate. 226 # Now do actual wrapping .isolate.
79 isolate_dict = { 227 isolate_dict = {
80 'includes': [ 228 'includes': [
81 posixpath.join(isolate_relpath, isolate), 229 posixpath.join(isolate_relpath, isolate),
82 ], 230 ],
83 'variables': { 231 'variables': {
84 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so']. 232 # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so'].
(...skipping 15 matching lines...) Expand all
100 logging.info('Added %d dynamic libs', len(binary_deps)) 248 logging.info('Added %d dynamic libs', len(binary_deps))
101 logging.debug('%s', isolate_content) 249 logging.debug('%s', isolate_content)
102 args[isolate_index] = temp_isolate 250 args[isolate_index] = temp_isolate
103 251
104 252
105 def main(): 253 def main():
106 logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s') 254 logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s')
107 args = sys.argv[1:] 255 args = sys.argv[1:]
108 isolate = None 256 isolate = None
109 isolated = None 257 isolated = None
258 is_component = False
110 for i, arg in enumerate(args): 259 for i, arg in enumerate(args):
111 if arg == '--isolate': 260 if arg == '--isolate':
112 isolate = i + 1 261 isolate = i + 1
113 if arg == '--isolated': 262 if arg == '--isolated':
114 isolated = i + 1 263 isolated = i + 1
264 if arg == 'component=shared_library':
265 is_component = True
115 if isolate is None or isolated is None: 266 if isolate is None or isolated is None:
116 print >> sys.stderr, 'Internal failure' 267 print >> sys.stderr, 'Internal failure'
117 return 1 268 return 1
118 269
119 create_wrapper(args, isolate, isolated) 270 if is_component:
271 create_wrapper(args, isolate, isolated)
120 272
121 swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client') 273 swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client')
122 sys.stdout.flush() 274 sys.stdout.flush()
123 result = subprocess.call( 275 result = subprocess.call(
124 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args) 276 [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args)
125 return result 277 return result
126 278
127 279
128 if __name__ == '__main__': 280 if __name__ == '__main__':
129 sys.exit(main()) 281 sys.exit(main())
OLDNEW
« no previous file with comments | « chrome/unit_tests.isolate ('k') | tools/ninja_parser.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698