OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # Copyright 2017 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 | |
6 """Tool for finding the cause of APK bloat. | |
7 | |
8 Run diagnose_apk_bloat.py -h for detailed usage help. | |
9 """ | |
10 | |
11 import argparse | |
12 import collections | |
13 from contextlib import contextmanager | |
14 import distutils.spawn | |
15 import json | |
16 import multiprocessing | |
17 import os | |
18 import re | |
19 import shutil | |
20 import subprocess | |
21 import sys | |
22 import tempfile | |
23 import zipfile | |
24 | |
25 _COMMIT_COUNT_WARN_THRESHOLD = 15 | |
26 _ALLOWED_CONSECUTIVE_FAILURES = 2 | |
27 _DIFF_DETAILS_LINES_THRESHOLD = 100 | |
28 _BUILDER_URL = \ | |
29 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' | |
30 _SRC_ROOT = os.path.abspath( | |
31 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) | |
32 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') | |
33 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') | |
34 _DEFAULT_ANDROID_TARGET = 'monochrome_public_apk' | |
35 | |
36 _global_restore_checkout_func = None | |
37 | |
38 | |
39 def _SetRestoreFunc(subrepo): | |
40 branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD'], subrepo) | |
41 global _global_restore_checkout_func | |
42 _global_restore_checkout_func = lambda: _GitCmd(['checkout', branch], subrepo) | |
43 | |
44 | |
45 _DiffResult = collections.namedtuple('DiffResult', ['name', 'value', 'units']) | |
46 | |
47 | |
48 class BaseDiff(object): | |
49 """Base class capturing binary size diffs.""" | |
50 def __init__(self, name): | |
51 self.name = name | |
52 self.banner = '\n' + '*' * 30 + name + '*' * 30 | |
53 | |
54 def AppendResults(self, logfile): | |
55 """Print and write diff results to an open |logfile|.""" | |
56 _PrintAndWriteToFile(logfile, self.banner) | |
57 _PrintAndWriteToFile(logfile, 'Summary:') | |
58 _PrintAndWriteToFile(logfile, self.Summary()) | |
59 _PrintAndWriteToFile(logfile, '\nDetails:') | |
60 _PrintAndWriteToFile(logfile, self.DetailedResults()) | |
61 | |
62 @property | |
63 def summary_stat(self): | |
64 return None | |
65 | |
66 def Summary(self): | |
67 """A short description that summarizes the source of binary size bloat.""" | |
68 raise NotImplementedError() | |
69 | |
70 def DetailedResults(self): | |
71 """An iterable description of the cause of binary size bloat.""" | |
72 raise NotImplementedError() | |
73 | |
74 def ProduceDiff(self, before_dir, after_dir): | |
75 """Prepare a binary size diff with ready to print results.""" | |
76 raise NotImplementedError() | |
77 | |
78 def RunDiff(self, logfile, before_dir, after_dir): | |
79 self.ProduceDiff(before_dir, after_dir) | |
80 self.AppendResults(logfile) | |
81 | |
82 | |
83 class NativeDiff(BaseDiff): | |
84 _RE_SUMMARY = re.compile( | |
85 r'.*(Section Sizes .*? object files added, \d+ removed).*', | |
86 flags=re.DOTALL) | |
87 _RE_SUMMARY_STAT = re.compile( | |
88 r'Section Sizes \(Total=(?P<value>\d+) (?P<units>\w+)\)') | |
89 _SUMMARY_STAT_NAME = 'Native Library Delta' | |
90 | |
91 def __init__(self, size_name, supersize_path): | |
92 self._size_name = size_name | |
93 self._supersize_path = supersize_path | |
94 self._diff = [] | |
95 super(NativeDiff, self).__init__('Native Diff') | |
96 | |
97 @property | |
98 def summary_stat(self): | |
99 m = NativeDiff._RE_SUMMARY_STAT.search(self._diff) | |
100 if m: | |
101 return _DiffResult( | |
102 NativeDiff._SUMMARY_STAT_NAME, m.group('value'), m.group('units')) | |
103 return None | |
104 | |
105 def DetailedResults(self): | |
106 return self._diff.splitlines() | |
107 | |
108 def Summary(self): | |
109 return NativeDiff._RE_SUMMARY.match(self._diff).group(1) | |
110 | |
111 def ProduceDiff(self, before_dir, after_dir): | |
112 before_size = os.path.join(before_dir, self._size_name) | |
113 after_size = os.path.join(after_dir, self._size_name) | |
114 cmd = [self._supersize_path, 'diff', before_size, after_size] | |
115 self._diff = _RunCmd(cmd)[0].replace('{', '{{').replace('}', '}}') | |
116 | |
117 | |
118 class ResourceSizesDiff(BaseDiff): | |
119 _RESOURCE_SIZES_PATH = os.path.join( | |
120 _SRC_ROOT, 'build', 'android', 'resource_sizes.py') | |
121 | |
122 def __init__(self, apk_name, slow_options=False): | |
123 self._apk_name = apk_name | |
124 self._slow_options = slow_options | |
125 self._diff = None # Set by |ProduceDiff()| | |
126 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff') | |
127 | |
128 @property | |
129 def summary_stat(self): | |
130 for s in self._diff: | |
131 if 'normalized' in s.name: | |
132 return s | |
133 return None | |
134 | |
135 def DetailedResults(self): | |
136 return ['{:>+10,} {} {}'.format(value, units, name) | |
137 for name, value, units in self._diff] | |
138 | |
139 def Summary(self): | |
140 return 'Normalized APK size: {:+,} {}'.format( | |
141 self.summary_stat.value, self.summary_stat.units) | |
142 | |
143 def ProduceDiff(self, before_dir, after_dir): | |
144 before = self._RunResourceSizes(before_dir) | |
145 after = self._RunResourceSizes(after_dir) | |
146 diff = [] | |
147 for section, section_dict in after.iteritems(): | |
148 for subsection, v in section_dict.iteritems(): | |
149 # Ignore entries when resource_sizes.py chartjson format has changed. | |
150 if (section not in before or | |
151 subsection not in before[section] or | |
152 v['units'] != before[section][subsection]['units']): | |
153 _Print('Found differing dict structures for resource_sizes.py, ' | |
154 'skipping {} {}', section, subsection) | |
155 else: | |
156 diff.append( | |
157 _DiffResult( | |
158 '%s %s' % (section, subsection), | |
159 v['value'] - before[section][subsection]['value'], | |
160 v['units'])) | |
161 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True) | |
162 | |
163 def _RunResourceSizes(self, archive_dir): | |
164 apk_path = os.path.join(archive_dir, self._apk_name) | |
165 chartjson_file = os.path.join(archive_dir, 'results-chart.json') | |
166 cmd = [self._RESOURCE_SIZES_PATH, apk_path,'--output-dir', archive_dir, | |
167 '--no-output-dir', '--chartjson'] | |
168 if self._slow_options: | |
169 cmd += ['--estimate-patch-size'] | |
170 else: | |
171 cmd += ['--no-static-initializer-check'] | |
172 _RunCmd(cmd) | |
173 with open(chartjson_file) as f: | |
174 chartjson = json.load(f) | |
175 return chartjson['charts'] | |
176 | |
177 | |
178 class _BuildHelper(object): | |
179 """Helper class for generating and building targets.""" | |
180 def __init__(self, args): | |
181 self.cloud = args.cloud | |
182 self.enable_chrome_android_internal = args.enable_chrome_android_internal | |
183 self.extra_gn_args_str = '' | |
184 self.max_jobs = args.max_jobs | |
185 self.max_load_average = args.max_load_average | |
186 self.output_directory = args.output_directory | |
187 self.target = args.target | |
188 self.target_os = args.target_os | |
189 self.use_goma = args.use_goma | |
190 self._SetDefaults() | |
191 | |
192 @property | |
193 def abs_apk_path(self): | |
194 return os.path.join(self.output_directory, self.apk_path) | |
195 | |
196 @property | |
197 def apk_name(self): | |
198 # Only works on apk targets that follow: my_great_apk naming convention. | |
199 apk_name = ''.join(s.title() for s in self.target.split('_')[:-1]) + '.apk' | |
200 return apk_name.replace('Webview', 'WebView') | |
201 | |
202 @property | |
203 def apk_path(self): | |
204 return os.path.join('apks', self.apk_name) | |
205 | |
206 @property | |
207 def main_lib_path(self): | |
208 # TODO(estevenson): Get this from GN instead of hardcoding. | |
209 if self.IsLinux(): | |
210 return 'chrome' | |
211 elif 'monochrome' in self.target: | |
212 return 'lib.unstripped/libmonochrome.so' | |
213 else: | |
214 return 'lib.unstripped/libchrome.so' | |
215 | |
216 @property | |
217 def abs_main_lib_path(self): | |
218 return os.path.join(self.output_directory, self.main_lib_path) | |
219 | |
220 @property | |
221 def download_bucket(self): | |
222 return 'gs://chrome-perf/%s Builder/' % self.target_os.title() | |
223 | |
224 @property | |
225 def download_output_dir(self): | |
226 return 'out/Release' if self.IsAndroid() else 'full-build-linux' | |
227 | |
228 @property | |
229 def map_file_path(self): | |
230 return self.main_lib_path + '.map.gz' | |
231 | |
232 @property | |
233 def size_name(self): | |
234 return os.path.splitext(os.path.basename(self.main_lib_path))[0] + '.size' | |
235 | |
236 def _SetDefaults(self): | |
237 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma')) | |
238 self.use_goma = self.use_goma or has_goma_dir | |
239 self.max_load_average = (self.max_load_average or | |
240 str(multiprocessing.cpu_count())) | |
241 if not self.max_jobs: | |
242 self.max_jobs = '10000' if self.use_goma else '500' | |
243 | |
244 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')): | |
245 self.extra_gn_args_str = ' is_chrome_branded=true' | |
246 else: | |
247 self.extra_gn_args_str = (' exclude_unwind_tables=true ' | |
248 'ffmpeg_branding="Chrome" proprietary_codecs=true') | |
249 self.target = self.target if self.IsAndroid() else 'chrome' | |
250 | |
251 def _GenGnCmd(self): | |
252 gn_args = 'is_official_build=true symbol_level=1' | |
253 gn_args += ' use_goma=%s' % str(self.use_goma).lower() | |
254 gn_args += ' target_os="%s"' % self.target_os | |
255 gn_args += (' enable_chrome_android_internal=%s' % | |
256 str(self.enable_chrome_android_internal).lower()) | |
257 gn_args += self.extra_gn_args_str | |
258 return ['gn', 'gen', self.output_directory, '--args=%s' % gn_args] | |
259 | |
260 def _GenNinjaCmd(self): | |
261 cmd = ['ninja', '-C', self.output_directory] | |
262 cmd += ['-j', self.max_jobs] if self.max_jobs else [] | |
263 cmd += ['-l', self.max_load_average] if self.max_load_average else [] | |
264 cmd += [self.target] | |
265 return cmd | |
266 | |
267 def Run(self): | |
268 """Run GN gen/ninja build and return the process returncode.""" | |
269 _Print('Building: {}.', self.target) | |
270 retcode = _RunCmd( | |
271 self._GenGnCmd(), print_stdout=True, exit_on_failure=False)[1] | |
272 if retcode: | |
273 return retcode | |
274 return _RunCmd( | |
275 self._GenNinjaCmd(), print_stdout=True, exit_on_failure=False)[1] | |
276 | |
277 def DownloadUrl(self, rev): | |
278 return self.download_bucket + 'full-build-linux_%s.zip' % rev | |
279 | |
280 def IsAndroid(self): | |
281 return self.target_os == 'android' | |
282 | |
283 def IsLinux(self): | |
284 return self.target_os == 'linux' | |
285 | |
286 def IsCloud(self): | |
287 return self.cloud | |
288 | |
289 | |
290 class _BuildArchive(object): | |
291 """Class for managing a directory with build results and build metadata.""" | |
292 def __init__(self, rev, base_archive_dir, build, subrepo): | |
293 self.build = build | |
294 self.dir = os.path.join(base_archive_dir, rev) | |
295 metadata_path = os.path.join(self.dir, 'metadata.txt') | |
296 self.rev = rev | |
297 self.metadata = _GenerateMetadata([self], build, metadata_path, subrepo) | |
298 | |
299 def ArchiveBuildResults(self, supersize_path): | |
300 """Save build artifacts necessary for diffing.""" | |
301 _Print('Saving build results to: {}', self.dir) | |
302 _EnsureDirsExist(self.dir) | |
303 build = self.build | |
304 self._ArchiveFile(build.abs_main_lib_path) | |
305 tool_prefix = _FindToolPrefix(build.output_directory) | |
306 size_path = os.path.join(self.dir, build.size_name) | |
307 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file', | |
308 build.abs_main_lib_path, '--tool-prefix', tool_prefix, | |
309 '--output-directory', build.output_directory, | |
310 '--no-source-paths'] | |
311 if build.IsAndroid(): | |
312 supersize_cmd += ['--apk-file', build.abs_apk_path] | |
313 self._ArchiveFile(build.abs_apk_path) | |
314 | |
315 _RunCmd(supersize_cmd) | |
316 _WriteMetadata(self.metadata) | |
317 | |
318 def Exists(self): | |
319 return _MetadataExists(self.metadata) | |
320 | |
321 def _ArchiveFile(self, filename): | |
322 if not os.path.exists(filename): | |
323 _Die('missing expected file: {}', filename) | |
324 shutil.copy(filename, self.dir) | |
325 | |
326 | |
327 class _DiffArchiveManager(object): | |
328 """Class for maintaining BuildArchives and their related diff artifacts.""" | |
329 def __init__(self, revs, archive_dir, diffs, build, subrepo): | |
330 self.archive_dir = archive_dir | |
331 self.build = build | |
332 self.build_archives = [_BuildArchive(rev, archive_dir, build, subrepo) | |
333 for rev in revs] | |
334 self.diffs = diffs | |
335 self.subrepo = subrepo | |
336 self._summary_stats = [] | |
337 | |
338 def IterArchives(self): | |
339 return iter(self.build_archives) | |
340 | |
341 def MaybeDiff(self, before_id, after_id): | |
342 """Perform diffs given two build archives.""" | |
343 before = self.build_archives[before_id] | |
344 after = self.build_archives[after_id] | |
345 diff_path = self._DiffFilePath(before, after) | |
346 if not self._CanDiff(before, after): | |
347 _Print('Skipping diff for {} due to missing build archives.', diff_path) | |
348 return | |
349 | |
350 metadata_path = self._DiffMetadataPath(before, after) | |
351 metadata = _GenerateMetadata( | |
352 [before, after], self.build, metadata_path, self.subrepo) | |
353 if _MetadataExists(metadata): | |
354 _Print('Skipping diff for {} and {}. Matching diff already exists: {}', | |
355 before.rev, after.rev, diff_path) | |
356 else: | |
357 if os.path.exists(diff_path): | |
358 os.remove(diff_path) | |
359 with open(diff_path, 'a') as diff_file: | |
360 for d in self.diffs: | |
361 d.RunDiff(diff_file, before.dir, after.dir) | |
362 _Print('\nSee detailed diff results here: {}.', diff_path) | |
363 _WriteMetadata(metadata) | |
364 self._AddDiffSummaryStat(before, after) | |
365 | |
366 def Summarize(self): | |
367 if self._summary_stats: | |
368 path = os.path.join(self.archive_dir, 'last_diff_summary.txt') | |
369 with open(path, 'w') as f: | |
370 stats = sorted( | |
371 self._summary_stats, key=lambda x: x[0].value, reverse=True) | |
372 _PrintAndWriteToFile(f, '\nDiff Summary') | |
373 for s, before, after in stats: | |
374 _PrintAndWriteToFile(f, '{:>+10} {} {} for range: {}..{}', | |
375 s.value, s.units, s.name, before, after) | |
376 | |
377 def _AddDiffSummaryStat(self, before, after): | |
378 stat = None | |
379 if self.build.IsAndroid(): | |
380 summary_diff_type = ResourceSizesDiff | |
381 else: | |
382 summary_diff_type = NativeDiff | |
383 for d in self.diffs: | |
384 if isinstance(d, summary_diff_type): | |
385 stat = d.summary_stat | |
386 if stat: | |
387 self._summary_stats.append((stat, before.rev, after.rev)) | |
388 | |
389 def _CanDiff(self, before, after): | |
390 return before.Exists() and after.Exists() | |
391 | |
392 def _DiffFilePath(self, before, after): | |
393 return os.path.join(self._DiffDir(before, after), 'diff_results.txt') | |
394 | |
395 def _DiffMetadataPath(self, before, after): | |
396 return os.path.join(self._DiffDir(before, after), 'metadata.txt') | |
397 | |
398 def _DiffDir(self, before, after): | |
399 archive_range = '%s..%s' % (before.rev, after.rev) | |
400 diff_path = os.path.join(self.archive_dir, 'diffs', archive_range) | |
401 _EnsureDirsExist(diff_path) | |
402 return diff_path | |
403 | |
404 | |
405 def _EnsureDirsExist(path): | |
406 if not os.path.exists(path): | |
407 os.makedirs(path) | |
408 | |
409 | |
410 def _GenerateMetadata(archives, build, path, subrepo): | |
411 return { | |
412 'revs': [a.rev for a in archives], | |
413 'archive_dirs': [a.dir for a in archives], | |
414 'target': build.target, | |
415 'target_os': build.target_os, | |
416 'is_cloud': build.IsCloud(), | |
417 'subrepo': subrepo, | |
418 'path': path, | |
419 'gn_args': { | |
420 'extra_gn_args_str': build.extra_gn_args_str, | |
421 'enable_chrome_android_internal': build.enable_chrome_android_internal, | |
422 } | |
423 } | |
424 | |
425 | |
426 def _WriteMetadata(metadata): | |
427 with open(metadata['path'], 'w') as f: | |
428 json.dump(metadata, f) | |
429 | |
430 | |
431 def _MetadataExists(metadata): | |
432 old_metadata = {} | |
433 path = metadata['path'] | |
434 if os.path.exists(path): | |
435 with open(path, 'r') as f: | |
436 old_metadata = json.load(f) | |
437 ret = len(metadata) == len(old_metadata) | |
438 ret &= all(v == old_metadata[k] | |
439 for k, v in metadata.items() if k != 'gn_args') | |
440 # GN args don't matter when artifacts are downloaded. For local builds | |
441 # they need to be the same so that diffs are accurate (differing GN args | |
442 # will change the final APK/native library). | |
443 if not metadata['is_cloud']: | |
444 ret &= metadata['gn_args'] == old_metadata['gn_args'] | |
445 return ret | |
446 return False | |
447 | |
448 | |
449 def _RunCmd(cmd, print_stdout=False, exit_on_failure=True): | |
450 """Convenience function for running commands. | |
451 | |
452 Args: | |
453 cmd: the command to run. | |
454 print_stdout: if this is True, then the stdout of the process will be | |
455 printed instead of returned. | |
456 exit_on_failure: die if an error occurs when this is True. | |
457 | |
458 Returns: | |
459 Tuple of (process stdout, process returncode). | |
460 """ | |
461 cmd_str = ' '.join(c for c in cmd) | |
462 _Print('Running: {}', cmd_str) | |
463 proc_stdout = sys.stdout if print_stdout else subprocess.PIPE | |
464 | |
465 proc = subprocess.Popen(cmd, stdout=proc_stdout, stderr=subprocess.PIPE) | |
466 stdout, stderr = proc.communicate() | |
467 | |
468 if proc.returncode and exit_on_failure: | |
469 _Die('command failed: {}\nstderr:\n{}', cmd_str, stderr) | |
470 | |
471 stdout = stdout.strip() if stdout else '' | |
472 return stdout, proc.returncode | |
473 | |
474 | |
475 def _GitCmd(args, subrepo): | |
476 return _RunCmd(['git', '-C', subrepo] + args)[0] | |
477 | |
478 | |
479 def _GclientSyncCmd(rev, subrepo): | |
480 cwd = os.getcwd() | |
481 os.chdir(subrepo) | |
482 _RunCmd(['gclient', 'sync', '-r', 'src@' + rev], print_stdout=True) | |
483 os.chdir(cwd) | |
484 | |
485 | |
486 def _FindToolPrefix(output_directory): | |
487 build_vars_path = os.path.join(output_directory, 'build_vars.txt') | |
488 if os.path.exists(build_vars_path): | |
489 with open(build_vars_path) as f: | |
490 build_vars = dict(l.rstrip().split('=', 1) for l in f if '=' in l) | |
491 # Tool prefix is relative to output dir, rebase to source root. | |
492 tool_prefix = build_vars['android_tool_prefix'] | |
493 while os.path.sep in tool_prefix: | |
494 rebased_tool_prefix = os.path.join(_SRC_ROOT, tool_prefix) | |
495 if os.path.exists(rebased_tool_prefix + 'readelf'): | |
496 return rebased_tool_prefix | |
497 tool_prefix = tool_prefix[tool_prefix.find(os.path.sep) + 1:] | |
498 return '' | |
499 | |
500 | |
501 def _SyncAndBuild(archive, build, subrepo): | |
502 # Simply do a checkout if subrepo is used. | |
503 if subrepo != _SRC_ROOT: | |
504 _GitCmd(['checkout', archive.rev], subrepo) | |
505 else: | |
506 # Move to a detached state since gclient sync doesn't work with local | |
507 # commits on a branch. | |
508 _GitCmd(['checkout', '--detach'], subrepo) | |
509 _GclientSyncCmd(archive.rev, subrepo) | |
510 retcode = build.Run() | |
511 return retcode == 0 | |
512 | |
513 | |
514 def _GenerateRevList(rev, reference_rev, all_in_range, subrepo): | |
515 """Normalize and optionally generate a list of commits in the given range. | |
516 | |
517 Returns: | |
518 A list of revisions ordered from oldest to newest. | |
519 """ | |
520 rev_seq = '%s^..%s' % (reference_rev, rev) | |
521 stdout = _GitCmd(['rev-list', rev_seq], subrepo) | |
522 all_revs = stdout.splitlines()[::-1] | |
523 if all_in_range: | |
524 revs = all_revs | |
525 else: | |
526 revs = [all_revs[0], all_revs[-1]] | |
527 if len(revs) >= _COMMIT_COUNT_WARN_THRESHOLD: | |
528 _VerifyUserAccepts( | |
529 'You\'ve provided a commit range that contains %d commits' % len(revs)) | |
530 return revs | |
531 | |
532 | |
533 def _ValidateRevs(rev, reference_rev, subrepo): | |
534 def git_fatal(args, message): | |
535 devnull = open(os.devnull, 'wb') | |
536 retcode = subprocess.call( | |
537 ['git', '-C', subrepo] + args, stdout=devnull, stderr=subprocess.STDOUT) | |
538 if retcode: | |
539 _Die(message) | |
540 | |
541 if rev == reference_rev: | |
542 _Die('rev and reference-rev cannot be equal') | |
543 no_obj_message = ('%s either doesn\'t exist or your local repo is out of ' | |
544 'date, try "git fetch origin master"') | |
545 git_fatal(['cat-file', '-e', rev], no_obj_message % rev) | |
546 git_fatal(['cat-file', '-e', reference_rev], no_obj_message % reference_rev) | |
547 git_fatal(['merge-base', '--is-ancestor', reference_rev, rev], | |
548 'reference-rev is newer than rev') | |
549 return rev, reference_rev | |
550 | |
551 | |
552 def _VerifyUserAccepts(message): | |
553 _Print(message + 'Do you want to proceed? [y/n]') | |
554 if raw_input('> ').lower() != 'y': | |
555 _global_restore_checkout_func() | |
556 sys.exit() | |
557 | |
558 | |
559 def _EnsureDirectoryClean(subrepo): | |
560 _Print('Checking source directory') | |
561 stdout = _GitCmd(['status', '--porcelain'], subrepo) | |
562 # Ignore untracked files. | |
563 if stdout and stdout[:2] != '??': | |
564 _Print('Failure: please ensure working directory is clean.') | |
565 sys.exit() | |
566 | |
567 | |
568 def _Die(s, *args, **kwargs): | |
569 _Print('Failure: ' + s, *args, **kwargs) | |
570 _global_restore_checkout_func() | |
571 sys.exit(1) | |
572 | |
573 | |
574 def _DownloadBuildArtifacts(archive, build, supersize_path, depot_tools_path): | |
575 """Download artifacts from arm32 chromium perf builder.""" | |
576 if depot_tools_path: | |
577 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py') | |
578 else: | |
579 gsutil_path = distutils.spawn.find_executable('gsutil.py') | |
580 | |
581 if not gsutil_path: | |
582 _Die('gsutil.py not found, please provide path to depot_tools via ' | |
583 '--depot-tools-path or add it to your PATH') | |
584 | |
585 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) | |
586 try: | |
587 _DownloadAndArchive( | |
588 gsutil_path, archive, download_dir, build, supersize_path) | |
589 finally: | |
590 shutil.rmtree(download_dir) | |
591 | |
592 | |
593 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path): | |
594 dl_dst = os.path.join(dl_dir, archive.rev) | |
595 _Print('Downloading build artifacts for {}', archive.rev) | |
596 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to | |
597 # sys.stdout. | |
598 retcode = subprocess.call( | |
599 [gsutil_path, 'cp', build.DownloadUrl(archive.rev), dl_dst], | |
600 stdout=sys.stdout, stderr=subprocess.STDOUT) | |
601 if retcode: | |
602 _Die('unexpected error while downloading {}. It may no longer exist on ' | |
603 'the server or it may not have been uploaded yet (check {}). ' | |
604 'Otherwise, you may not have the correct access permissions.', | |
605 build.DownloadUrl(archive.rev), _BUILDER_URL) | |
606 | |
607 # Files needed for supersize and resource_sizes. Paths relative to out dir. | |
608 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn'] | |
609 if build.IsAndroid(): | |
610 to_extract += ['build_vars.txt', build.apk_path] | |
611 extract_dir = dl_dst + '_' + 'unzipped' | |
612 # Storage bucket stores entire output directory including out/Release prefix. | |
613 _Print('Extracting build artifacts') | |
614 with zipfile.ZipFile(dl_dst, 'r') as z: | |
615 _ExtractFiles(to_extract, build.download_output_dir, extract_dir, z) | |
616 dl_out = os.path.join(extract_dir, build.download_output_dir) | |
617 build.output_directory, output_directory = dl_out, build.output_directory | |
618 archive.ArchiveBuildResults(supersize_path) | |
619 build.output_directory = output_directory | |
620 | |
621 | |
622 def _ExtractFiles(to_extract, prefix, dst, z): | |
623 zip_infos = z.infolist() | |
624 assert all(info.filename.startswith(prefix) for info in zip_infos), ( | |
625 'Storage bucket folder structure doesn\'t start with %s' % prefix) | |
626 to_extract = [os.path.join(prefix, f) for f in to_extract] | |
627 for f in to_extract: | |
628 z.extract(f, path=dst) | |
629 | |
630 | |
631 def _Print(s, *args, **kwargs): | |
632 print s.format(*args, **kwargs) | |
633 | |
634 | |
635 def _PrintAndWriteToFile(logfile, s, *args, **kwargs): | |
636 """Write and print |s| thottling output if |s| is a large list.""" | |
637 if isinstance(s, basestring): | |
638 s = s.format(*args, **kwargs) | |
639 _Print(s) | |
640 logfile.write('%s\n' % s) | |
641 else: | |
642 for l in s[:_DIFF_DETAILS_LINES_THRESHOLD]: | |
643 _Print(l) | |
644 if len(s) > _DIFF_DETAILS_LINES_THRESHOLD: | |
645 _Print('\nOutput truncated, see {} for more.', logfile.name) | |
646 logfile.write('\n'.join(s)) | |
647 | |
648 | |
649 @contextmanager | |
650 def _TmpCopyBinarySizeDir(): | |
651 """Recursively copy files to a temp dir and yield supersize path.""" | |
652 # Needs to be at same level of nesting as the real //tools/binary_size | |
653 # since supersize uses this to find d3 in //third_party. | |
654 tmp_dir = tempfile.mkdtemp(dir=_SRC_ROOT) | |
655 try: | |
656 bs_dir = os.path.join(tmp_dir, 'binary_size') | |
657 shutil.copytree(os.path.join(_SRC_ROOT, 'tools', 'binary_size'), bs_dir) | |
658 yield os.path.join(bs_dir, 'supersize') | |
659 finally: | |
660 shutil.rmtree(tmp_dir) | |
661 | |
662 | |
663 def main(): | |
664 parser = argparse.ArgumentParser( | |
665 description='Find the cause of APK size bloat.') | |
666 parser.add_argument('--archive-directory', | |
667 default=_DEFAULT_ARCHIVE_DIR, | |
668 help='Where results are stored.') | |
669 parser.add_argument('rev', | |
670 help='Find binary size bloat for this commit.') | |
671 parser.add_argument('--reference-rev', | |
672 help='Older rev to diff against. If not supplied, ' | |
673 'the previous commit to rev will be used.') | |
674 parser.add_argument('--all', | |
675 action='store_true', | |
676 help='Build/download all revs from --reference-rev to ' | |
677 'rev and diff the contiguous revisions.') | |
678 parser.add_argument('--include-slow-options', | |
679 action='store_true', | |
680 help='Run some extra steps that take longer to complete. ' | |
681 'This includes apk-patch-size estimation and ' | |
682 'static-initializer counting.') | |
683 parser.add_argument('--cloud', | |
684 action='store_true', | |
685 help='Download build artifacts from perf builders ' | |
686 '(Android only, Googlers only).') | |
687 parser.add_argument('--depot-tools-path', | |
688 help='Custom path to depot tools. Needed for --cloud if ' | |
689 'depot tools isn\'t in your PATH.') | |
690 parser.add_argument('--subrepo', | |
691 help='Specify a subrepo directory to use. Gclient sync ' | |
692 'will be skipped if this option is used and all git ' | |
693 'commands will be executed from the subrepo directory. ' | |
694 'This option doesn\'t work with --cloud.') | |
695 | |
696 build_group = parser.add_argument_group('ninja', 'Args to use with ninja/gn') | |
697 build_group.add_argument('-j', | |
698 dest='max_jobs', | |
699 help='Run N jobs in parallel.') | |
700 build_group.add_argument('-l', | |
701 dest='max_load_average', | |
702 help='Do not start new jobs if the load average is ' | |
703 'greater than N.') | |
704 build_group.add_argument('--no-goma', | |
705 action='store_false', | |
706 dest='use_goma', | |
707 default=True, | |
708 help='Do not use goma when building with ninja.') | |
709 build_group.add_argument('--target-os', | |
710 default='android', | |
711 choices=['android', 'linux'], | |
712 help='target_os gn arg. Default: android.') | |
713 build_group.add_argument('--output-directory', | |
714 default=_DEFAULT_OUT_DIR, | |
715 help='ninja output directory. ' | |
716 'Default: %s.' % _DEFAULT_OUT_DIR) | |
717 build_group.add_argument('--enable-chrome-android-internal', | |
718 action='store_true', | |
719 help='Allow downstream targets to be built.') | |
720 build_group.add_argument('--target', | |
721 default=_DEFAULT_ANDROID_TARGET, | |
722 help='GN APK target to build. Ignored for Linux. ' | |
723 'Default %s.' % _DEFAULT_ANDROID_TARGET) | |
724 if len(sys.argv) == 1: | |
725 parser.print_help() | |
726 sys.exit() | |
727 args = parser.parse_args() | |
728 build = _BuildHelper(args) | |
729 if build.IsCloud() and args.subrepo: | |
730 parser.error('--subrepo doesn\'t work with --cloud') | |
731 | |
732 subrepo = args.subrepo or _SRC_ROOT | |
733 _EnsureDirectoryClean(subrepo) | |
734 _SetRestoreFunc(subrepo) | |
735 if build.IsLinux(): | |
736 _VerifyUserAccepts('Linux diffs have known deficiencies (crbug/717550).') | |
737 | |
738 rev, reference_rev = _ValidateRevs( | |
739 args.rev, args.reference_rev or args.rev + '^', subrepo) | |
740 revs = _GenerateRevList(rev, reference_rev, args.all, subrepo) | |
741 with _TmpCopyBinarySizeDir() as supersize_path: | |
742 diffs = [NativeDiff(build.size_name, supersize_path)] | |
743 if build.IsAndroid(): | |
744 diffs += [ | |
745 ResourceSizesDiff( | |
746 build.apk_name, slow_options=args.include_slow_options) | |
747 ] | |
748 diff_mngr = _DiffArchiveManager( | |
749 revs, args.archive_directory, diffs, build, subrepo) | |
750 consecutive_failures = 0 | |
751 for i, archive in enumerate(diff_mngr.IterArchives()): | |
752 if archive.Exists(): | |
753 _Print('Found matching metadata for {}, skipping build step.', | |
754 archive.rev) | |
755 else: | |
756 if build.IsCloud(): | |
757 _DownloadBuildArtifacts( | |
758 archive, build, supersize_path, args.depot_tools_path) | |
759 else: | |
760 build_success = _SyncAndBuild(archive, build, subrepo) | |
761 if not build_success: | |
762 consecutive_failures += 1 | |
763 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES: | |
764 _Die('{} builds failed in a row, last failure was {}.', | |
765 consecutive_failures, archive.rev) | |
766 else: | |
767 archive.ArchiveBuildResults(supersize_path) | |
768 consecutive_failures = 0 | |
769 | |
770 if i != 0: | |
771 diff_mngr.MaybeDiff(i - 1, i) | |
772 | |
773 diff_mngr.Summarize() | |
774 | |
775 _global_restore_checkout_func() | |
776 | |
777 if __name__ == '__main__': | |
778 sys.exit(main()) | |
779 | |
OLD | NEW |