Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(422)

Side by Side Diff: tools/binary_size/diagnose_apk_bloat.py

Issue 2837953002: diagnose_apk_bloat.py: add native diffs. (Closed)
Patch Set: console -> diff + log file format fix Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2017 The Chromium Authors. All rights reserved. 2 # Copyright 2017 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Tool for finding the cause of APK bloat. 6 """Tool for finding the cause of APK bloat.
7 7
8 Run diagnose_apk_bloat.py -h for detailed usage help. 8 Run diagnose_apk_bloat.py -h for detailed usage help.
9 """ 9 """
10 10
11 import argparse 11 import argparse
12 import collections 12 import collections
13 from contextlib import contextmanager 13 from contextlib import contextmanager
14 import distutils.spawn 14 import distutils.spawn
15 import json 15 import json
16 import multiprocessing 16 import multiprocessing
17 import os 17 import os
18 import re
18 import shutil 19 import shutil
19 import subprocess 20 import subprocess
20 import sys 21 import sys
21 import tempfile 22 import tempfile
22 import zipfile 23 import zipfile
23 24
24 _COMMIT_COUNT_WARN_THRESHOLD = 15 25 _COMMIT_COUNT_WARN_THRESHOLD = 15
25 _ALLOWED_CONSECUTIVE_FAILURES = 2 26 _ALLOWED_CONSECUTIVE_FAILURES = 2
27 _DIFF_DETAILS_LINES_THRESHOLD = 100
26 _BUILDER_URL = \ 28 _BUILDER_URL = \
27 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' 29 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder'
28 _CLOUD_OUT_DIR = os.path.join('out', 'Release') 30 _CLOUD_OUT_DIR = os.path.join('out', 'Release')
29 _SRC_ROOT = os.path.abspath( 31 _SRC_ROOT = os.path.abspath(
30 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) 32 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
31 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') 33 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat')
32 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') 34 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat')
33 _DEFAULT_TARGET = 'monochrome_public_apk' 35 _DEFAULT_TARGET = 'monochrome_public_apk'
34 36
35 37
(...skipping 11 matching lines...) Expand all
47 def __init__(self, name): 49 def __init__(self, name):
48 self.name = name 50 self.name = name
49 self.banner = '\n' + '*' * 30 + name + '*' * 30 51 self.banner = '\n' + '*' * 30 + name + '*' * 30
50 52
51 def AppendResults(self, logfile): 53 def AppendResults(self, logfile):
52 """Print and write diff results to an open |logfile|.""" 54 """Print and write diff results to an open |logfile|."""
53 _PrintAndWriteToFile(logfile, self.banner) 55 _PrintAndWriteToFile(logfile, self.banner)
54 _PrintAndWriteToFile(logfile, 'Summary:') 56 _PrintAndWriteToFile(logfile, 'Summary:')
55 _PrintAndWriteToFile(logfile, self.Summary()) 57 _PrintAndWriteToFile(logfile, self.Summary())
56 _PrintAndWriteToFile(logfile, '\nDetails:') 58 _PrintAndWriteToFile(logfile, '\nDetails:')
57 for l in self.DetailedResults(): 59 _PrintAndWriteToFile(logfile, self.DetailedResults())
58 _PrintAndWriteToFile(logfile, l)
59 60
60 def Summary(self): 61 def Summary(self):
61 """A short description that summarizes the source of binary size bloat.""" 62 """A short description that summarizes the source of binary size bloat."""
62 raise NotImplementedError() 63 raise NotImplementedError()
63 64
64 def DetailedResults(self): 65 def DetailedResults(self):
65 """An iterable description of the cause of binary size bloat.""" 66 """An iterable description of the cause of binary size bloat."""
66 raise NotImplementedError() 67 raise NotImplementedError()
67 68
68 def ProduceDiff(self, archive_dirs): 69 def ProduceDiff(self, archive_dirs):
69 """Prepare a binary size diff with ready to print results.""" 70 """Prepare a binary size diff with ready to print results."""
70 raise NotImplementedError() 71 raise NotImplementedError()
71 72
72 def RunDiff(self, logfile, archive_dirs): 73 def RunDiff(self, logfile, archive_dirs):
73 _Print('Creating {}', self.name)
74 self.ProduceDiff(archive_dirs) 74 self.ProduceDiff(archive_dirs)
75 self.AppendResults(logfile) 75 self.AppendResults(logfile)
76 76
77 77
78 class NativeDiff(BaseDiff):
79 _RE_SUMMARY = re.compile(
80 r'.*(Section Sizes .*? object files added, \d+ removed).*',
81 flags=re.DOTALL)
82
83 def __init__(self, size_name, supersize_path):
84 self._size_name = size_name
85 self._supersize_path = supersize_path
86 self._diff = []
87 super(NativeDiff, self).__init__('Native Diff')
88
89 def DetailedResults(self):
90 return self._diff.splitlines()
91
92 def Summary(self):
93 return NativeDiff._RE_SUMMARY.match(self._diff).group(1)
94
95 def ProduceDiff(self, archive_dirs):
96 size_files = [os.path.join(a, self._size_name)
97 for a in reversed(archive_dirs)]
98 cmd = [self._supersize_path, 'diff'] + size_files
99 self._diff = _RunCmd(cmd)[0].replace('{', '{{').replace('}', '}}')
100
101
78 _ResourceSizesDiffResult = collections.namedtuple( 102 _ResourceSizesDiffResult = collections.namedtuple(
79 'ResourceSizesDiffResult', ['section', 'value', 'units']) 103 'ResourceSizesDiffResult', ['section', 'value', 'units'])
80 104
81 105
82 class ResourceSizesDiff(BaseDiff): 106 class ResourceSizesDiff(BaseDiff):
83 _RESOURCE_SIZES_PATH = os.path.join( 107 _RESOURCE_SIZES_PATH = os.path.join(
84 _SRC_ROOT, 'build', 'android', 'resource_sizes.py') 108 _SRC_ROOT, 'build', 'android', 'resource_sizes.py')
85 109
86 def __init__(self, apk_name, slow_options=False): 110 def __init__(self, apk_name, slow_options=False):
87 self._apk_name = apk_name 111 self._apk_name = apk_name
88 self._slow_options = slow_options 112 self._slow_options = slow_options
89 self._diff = None # Set by |ProduceDiff()| 113 self._diff = None # Set by |ProduceDiff()|
90 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff') 114 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff')
91 115
92 def DetailedResults(self): 116 def DetailedResults(self):
93 for section, value, units in self._diff: 117 return ['{:>+10,} {} {}'.format(value, units, section)
94 yield '{:>+10,} {} {}'.format(value, units, section) 118 for section, value, units in self._diff]
95 119
96 def Summary(self): 120 def Summary(self):
97 for s in self._diff: 121 for s in self._diff:
98 if 'normalized' in s.section: 122 if 'normalized' in s.section:
99 return 'Normalized APK size: {:+,} {}'.format(s.value, s.units) 123 return 'Normalized APK size: {:+,} {}'.format(s.value, s.units)
100 return '' 124 return ''
101 125
102 def ProduceDiff(self, archive_dirs): 126 def ProduceDiff(self, archive_dirs):
103 chartjsons = self._RunResourceSizes(archive_dirs) 127 chartjsons = self._RunResourceSizes(archive_dirs)
104 diff = [] 128 diff = []
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
159 def apk_name(self): 183 def apk_name(self):
160 # Only works on apk targets that follow: my_great_apk naming convention. 184 # Only works on apk targets that follow: my_great_apk naming convention.
161 apk_name = ''.join(s.title() for s in self.target.split('_')[:-1]) + '.apk' 185 apk_name = ''.join(s.title() for s in self.target.split('_')[:-1]) + '.apk'
162 return apk_name.replace('Webview', 'WebView') 186 return apk_name.replace('Webview', 'WebView')
163 187
164 @property 188 @property
165 def apk_path(self): 189 def apk_path(self):
166 return os.path.join('apks', self.apk_name) 190 return os.path.join('apks', self.apk_name)
167 191
168 @property 192 @property
169 def main_lib_name(self): 193 def main_lib_path(self):
170 # TODO(estevenson): Get this from GN instead of hardcoding. 194 # TODO(estevenson): Get this from GN instead of hardcoding.
171 if self.IsLinux(): 195 if self.IsLinux():
172 return 'chrome' 196 return 'chrome'
173 elif 'monochrome' in self.target: 197 elif 'monochrome' in self.target:
174 return 'lib.unstripped/libmonochrome.so' 198 return 'lib.unstripped/libmonochrome.so'
175 else: 199 else:
176 return 'lib.unstripped/libchrome.so' 200 return 'lib.unstripped/libchrome.so'
177 201
178 @property 202 @property
179 def main_lib_path(self): 203 def abs_main_lib_path(self):
180 return os.path.join(self.output_directory, self.main_lib_name) 204 return os.path.join(self.output_directory, self.main_lib_path)
181 205
182 @property 206 @property
183 def map_file_name(self): 207 def map_file_path(self):
184 return self.main_lib_name + '.map.gz' 208 return self.main_lib_path + '.map.gz'
209
210 @property
211 def size_name(self):
212 return os.path.splitext(os.path.basename(self.main_lib_path))[0] + '.size'
185 213
186 def _SetDefaults(self): 214 def _SetDefaults(self):
187 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma')) 215 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma'))
188 self.use_goma = self.use_goma or has_goma_dir 216 self.use_goma = self.use_goma or has_goma_dir
189 self.max_load_average = (self.max_load_average or 217 self.max_load_average = (self.max_load_average or
190 str(multiprocessing.cpu_count())) 218 str(multiprocessing.cpu_count()))
191 if not self.max_jobs: 219 if not self.max_jobs:
192 self.max_jobs = '10000' if self.use_goma else '500' 220 self.max_jobs = '10000' if self.use_goma else '500'
193 221
194 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')): 222 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')):
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
235 263
236 class _BuildArchive(object): 264 class _BuildArchive(object):
237 """Class for managing a directory with build results and build metadata.""" 265 """Class for managing a directory with build results and build metadata."""
238 def __init__(self, rev, base_archive_dir, build, subrepo): 266 def __init__(self, rev, base_archive_dir, build, subrepo):
239 self.build = build 267 self.build = build
240 self.dir = os.path.join(base_archive_dir, rev) 268 self.dir = os.path.join(base_archive_dir, rev)
241 metadata_path = os.path.join(self.dir, 'metadata.txt') 269 metadata_path = os.path.join(self.dir, 'metadata.txt')
242 self.rev = rev 270 self.rev = rev
243 self.metadata = _GenerateMetadata([self], build, metadata_path, subrepo) 271 self.metadata = _GenerateMetadata([self], build, metadata_path, subrepo)
244 272
245 def ArchiveBuildResults(self, bs_dir): 273 def ArchiveBuildResults(self, supersize_path):
246 """Save build artifacts necessary for diffing.""" 274 """Save build artifacts necessary for diffing."""
247 _Print('Saving build results to: {}', self.dir) 275 _Print('Saving build results to: {}', self.dir)
248 _EnsureDirsExist(self.dir) 276 _EnsureDirsExist(self.dir)
249 build = self.build 277 build = self.build
250 self._ArchiveFile(build.main_lib_path) 278 self._ArchiveFile(build.abs_main_lib_path)
251 lib_name_noext = os.path.splitext(os.path.basename(build.main_lib_path))[0]
252 size_path = os.path.join(self.dir, lib_name_noext + '.size')
253 supersize_path = os.path.join(bs_dir, 'supersize')
254 tool_prefix = _FindToolPrefix(build.output_directory) 279 tool_prefix = _FindToolPrefix(build.output_directory)
280 size_path = os.path.join(self.dir, build.size_name)
255 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file', 281 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file',
256 build.main_lib_path, '--tool-prefix', tool_prefix, 282 build.abs_main_lib_path, '--tool-prefix', tool_prefix,
257 '--output-directory', build.output_directory, 283 '--output-directory', build.output_directory,
258 '--no-source-paths'] 284 '--no-source-paths']
259 if build.IsAndroid(): 285 if build.IsAndroid():
260 supersize_cmd += ['--apk-file', build.abs_apk_path] 286 supersize_cmd += ['--apk-file', build.abs_apk_path]
261 self._ArchiveFile(build.abs_apk_path) 287 self._ArchiveFile(build.abs_apk_path)
262 288
263 _RunCmd(supersize_cmd) 289 _RunCmd(supersize_cmd)
264 _WriteMetadata(self.metadata) 290 _WriteMetadata(self.metadata)
265 291
266 def Exists(self): 292 def Exists(self):
(...skipping 27 matching lines...) Expand all
294 _Print('Skipping diff for {} due to missing build archives.', diff_path) 320 _Print('Skipping diff for {} due to missing build archives.', diff_path)
295 return 321 return
296 322
297 metadata_path = self._DiffMetadataPath(archives) 323 metadata_path = self._DiffMetadataPath(archives)
298 metadata = _GenerateMetadata( 324 metadata = _GenerateMetadata(
299 archives, self.build, metadata_path, self.subrepo) 325 archives, self.build, metadata_path, self.subrepo)
300 if _MetadataExists(metadata): 326 if _MetadataExists(metadata):
301 _Print('Skipping diff for {} and {}. Matching diff already exists: {}', 327 _Print('Skipping diff for {} and {}. Matching diff already exists: {}',
302 archives[0].rev, archives[1].rev, diff_path) 328 archives[0].rev, archives[1].rev, diff_path)
303 else: 329 else:
330 if os.path.exists(diff_path):
331 os.remove(diff_path)
304 archive_dirs = [archives[0].dir, archives[1].dir] 332 archive_dirs = [archives[0].dir, archives[1].dir]
305 with open(diff_path, 'a') as diff_file: 333 with open(diff_path, 'a') as diff_file:
306 for d in self.diffs: 334 for d in self.diffs:
307 d.RunDiff(diff_file, archive_dirs) 335 d.RunDiff(diff_file, archive_dirs)
336 _Print('See detailed diff results here: {}.', diff_path)
308 _WriteMetadata(metadata) 337 _WriteMetadata(metadata)
309 338
310 def _CanDiff(self, archives): 339 def _CanDiff(self, archives):
311 return all(a.Exists() for a in archives) 340 return all(a.Exists() for a in archives)
312 341
313 def _DiffFilePath(self, archives): 342 def _DiffFilePath(self, archives):
314 return os.path.join(self._DiffDir(archives), 'diff_results.txt') 343 return os.path.join(self._DiffDir(archives), 'diff_results.txt')
315 344
316 def _DiffMetadataPath(self, archives): 345 def _DiffMetadataPath(self, archives):
317 return os.path.join(self._DiffDir(archives), 'metadata.txt') 346 return os.path.join(self._DiffDir(archives), 'metadata.txt')
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
472 _Print('Failure: please ensure working directory is clean.') 501 _Print('Failure: please ensure working directory is clean.')
473 sys.exit() 502 sys.exit()
474 503
475 504
476 def _Die(s, *args, **kwargs): 505 def _Die(s, *args, **kwargs):
477 _Print('Failure: ' + s, *args, **kwargs) 506 _Print('Failure: ' + s, *args, **kwargs)
478 _global_restore_checkout_func() 507 _global_restore_checkout_func()
479 sys.exit(1) 508 sys.exit(1)
480 509
481 510
482 def _DownloadBuildArtifacts(archive, build, bs_dir, depot_tools_path): 511 def _DownloadBuildArtifacts(archive, build, supersize_path, depot_tools_path):
483 """Download artifacts from arm32 chromium perf builder.""" 512 """Download artifacts from arm32 chromium perf builder."""
484 if depot_tools_path: 513 if depot_tools_path:
485 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py') 514 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py')
486 else: 515 else:
487 gsutil_path = distutils.spawn.find_executable('gsutil.py') 516 gsutil_path = distutils.spawn.find_executable('gsutil.py')
488 517
489 if not gsutil_path: 518 if not gsutil_path:
490 _Die('gsutil.py not found, please provide path to depot_tools via ' 519 _Die('gsutil.py not found, please provide path to depot_tools via '
491 '--depot-tools-path or add it to your PATH') 520 '--depot-tools-path or add it to your PATH')
492 521
493 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) 522 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT)
494 try: 523 try:
495 _DownloadAndArchive(gsutil_path, archive, download_dir, build, bs_dir) 524 _DownloadAndArchive(
525 gsutil_path, archive, download_dir, build, supersize_path)
496 finally: 526 finally:
497 shutil.rmtree(download_dir) 527 shutil.rmtree(download_dir)
498 528
499 529
500 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, bs_dir): 530 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path):
501 dl_file = 'full-build-linux_%s.zip' % archive.rev 531 dl_file = 'full-build-linux_%s.zip' % archive.rev
502 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file 532 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file
503 dl_dst = os.path.join(dl_dir, dl_file) 533 dl_dst = os.path.join(dl_dir, dl_file)
504 _Print('Downloading build artifacts for {}', archive.rev) 534 _Print('Downloading build artifacts for {}', archive.rev)
505 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to 535 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to
506 # sys.stdout. 536 # sys.stdout.
507 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir], 537 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir],
508 stdout=sys.stdout, stderr=subprocess.STDOUT) 538 stdout=sys.stdout, stderr=subprocess.STDOUT)
509 if retcode: 539 if retcode:
510 _Die('unexpected error while downloading {}. It may no longer exist on ' 540 _Die('unexpected error while downloading {}. It may no longer exist on '
511 'the server or it may not have been uploaded yet (check {}). ' 541 'the server or it may not have been uploaded yet (check {}). '
512 'Otherwise, you may not have the correct access permissions.', 542 'Otherwise, you may not have the correct access permissions.',
513 dl_url, _BUILDER_URL) 543 dl_url, _BUILDER_URL)
514 544
515 # Files needed for supersize and resource_sizes. Paths relative to out dir. 545 # Files needed for supersize and resource_sizes. Paths relative to out dir.
516 to_extract = [build.main_lib_name, build.map_file_name, 'args.gn', 546 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn',
517 'build_vars.txt', build.apk_path] 547 'build_vars.txt', build.apk_path]
518 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped') 548 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped')
519 # Storage bucket stores entire output directory including out/Release prefix. 549 # Storage bucket stores entire output directory including out/Release prefix.
520 _Print('Extracting build artifacts') 550 _Print('Extracting build artifacts')
521 with zipfile.ZipFile(dl_dst, 'r') as z: 551 with zipfile.ZipFile(dl_dst, 'r') as z:
522 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z) 552 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z)
523 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR) 553 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR)
524 build.output_directory, output_directory = dl_out, build.output_directory 554 build.output_directory, output_directory = dl_out, build.output_directory
525 archive.ArchiveBuildResults(bs_dir) 555 archive.ArchiveBuildResults(supersize_path)
526 build.output_directory = output_directory 556 build.output_directory = output_directory
527 557
528 558
529 def _ExtractFiles(to_extract, prefix, dst, z): 559 def _ExtractFiles(to_extract, prefix, dst, z):
530 zip_infos = z.infolist() 560 zip_infos = z.infolist()
531 assert all(info.filename.startswith(prefix) for info in zip_infos), ( 561 assert all(info.filename.startswith(prefix) for info in zip_infos), (
532 'Storage bucket folder structure doesn\'t start with %s' % prefix) 562 'Storage bucket folder structure doesn\'t start with %s' % prefix)
533 to_extract = [os.path.join(prefix, f) for f in to_extract] 563 to_extract = [os.path.join(prefix, f) for f in to_extract]
534 for f in to_extract: 564 for f in to_extract:
535 z.extract(f, path=dst) 565 z.extract(f, path=dst)
536 566
537 567
538 def _Print(s, *args, **kwargs): 568 def _Print(s, *args, **kwargs):
539 print s.format(*args, **kwargs) 569 print s.format(*args, **kwargs)
540 570
541 571
542 def _PrintAndWriteToFile(logfile, s): 572 def _PrintAndWriteToFile(logfile, s):
543 """Print |s| to |logfile| and stdout.""" 573 """Write and print |s| thottling output if |s| is a large list."""
544 _Print(s) 574 if isinstance(s, basestring):
545 logfile.write('%s\n' % s) 575 _Print(s)
576 logfile.write('%s\n' % s)
577 else:
578 for l in s[:_DIFF_DETAILS_LINES_THRESHOLD]:
579 _Print(l)
580 if len(s) > _DIFF_DETAILS_LINES_THRESHOLD:
581 _Print('\nOutput truncated, see {} for more.', logfile.name)
582 logfile.write('\n'.join(s))
546 583
547 584
548 @contextmanager 585 @contextmanager
549 def _TmpBinarySizeDir(): 586 def _TmpCopyBinarySizeDir():
550 """Recursively copy files to a temp dir and yield the tmp binary_size dir.""" 587 """Recursively copy files to a temp dir and yield supersize path."""
551 # Needs to be at same level of nesting as the real //tools/binary_size 588 # Needs to be at same level of nesting as the real //tools/binary_size
552 # since supersize uses this to find d3 in //third_party. 589 # since supersize uses this to find d3 in //third_party.
553 tmp_dir = tempfile.mkdtemp(dir=_SRC_ROOT) 590 tmp_dir = tempfile.mkdtemp(dir=_SRC_ROOT)
554 try: 591 try:
555 bs_dir = os.path.join(tmp_dir, 'binary_size') 592 bs_dir = os.path.join(tmp_dir, 'binary_size')
556 shutil.copytree(os.path.join(_SRC_ROOT, 'tools', 'binary_size'), bs_dir) 593 shutil.copytree(os.path.join(_SRC_ROOT, 'tools', 'binary_size'), bs_dir)
557 yield bs_dir 594 yield os.path.join(bs_dir, 'supersize')
558 finally: 595 finally:
559 shutil.rmtree(tmp_dir) 596 shutil.rmtree(tmp_dir)
560 597
561 598
562 def main(): 599 def main():
563 parser = argparse.ArgumentParser( 600 parser = argparse.ArgumentParser(
564 description='Find the cause of APK size bloat.') 601 description='Find the cause of APK size bloat.')
565 parser.add_argument('--archive-directory', 602 parser.add_argument('--archive-directory',
566 default=_DEFAULT_ARCHIVE_DIR, 603 default=_DEFAULT_ARCHIVE_DIR,
567 help='Where results are stored.') 604 help='Where results are stored.')
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
631 if args.subrepo: 668 if args.subrepo:
632 parser.error('--subrepo doesn\'t work with --cloud') 669 parser.error('--subrepo doesn\'t work with --cloud')
633 670
634 subrepo = args.subrepo or _SRC_ROOT 671 subrepo = args.subrepo or _SRC_ROOT
635 _EnsureDirectoryClean(subrepo) 672 _EnsureDirectoryClean(subrepo)
636 _SetRestoreFunc(subrepo) 673 _SetRestoreFunc(subrepo)
637 revs = _GenerateRevList(args.rev, 674 revs = _GenerateRevList(args.rev,
638 args.reference_rev or args.rev + '^', 675 args.reference_rev or args.rev + '^',
639 args.all, 676 args.all,
640 subrepo) 677 subrepo)
641 diffs = [] 678 with _TmpCopyBinarySizeDir() as supersize_path:
642 if build.IsAndroid(): 679 diffs = [NativeDiff(build.size_name, supersize_path)]
643 diffs += [ 680 if build.IsAndroid():
644 ResourceSizesDiff( 681 diffs += [
645 build.apk_name, slow_options=args.include_slow_options) 682 ResourceSizesDiff(
646 ] 683 build.apk_name, slow_options=args.include_slow_options)
647 diff_mngr = _DiffArchiveManager( 684 ]
648 revs, args.archive_directory, diffs, build, subrepo) 685 diff_mngr = _DiffArchiveManager(
649 consecutive_failures = 0 686 revs, args.archive_directory, diffs, build, subrepo)
650 with _TmpBinarySizeDir() as bs_dir: 687 consecutive_failures = 0
651 for i, archive in enumerate(diff_mngr.IterArchives()): 688 for i, archive in enumerate(diff_mngr.IterArchives()):
652 if archive.Exists(): 689 if archive.Exists():
653 _Print('Found matching metadata for {}, skipping build step.', 690 _Print('Found matching metadata for {}, skipping build step.',
654 archive.rev) 691 archive.rev)
655 else: 692 else:
656 if build.IsCloud(): 693 if build.IsCloud():
657 _DownloadBuildArtifacts(archive, build, bs_dir, args.depot_tools_path) 694 _DownloadBuildArtifacts(
695 archive, build, supersize_path, args.depot_tools_path)
658 else: 696 else:
659 build_success = _SyncAndBuild(archive, build, subrepo) 697 build_success = _SyncAndBuild(archive, build, subrepo)
660 if not build_success: 698 if not build_success:
661 consecutive_failures += 1 699 consecutive_failures += 1
662 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES: 700 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES:
663 _Die('{} builds failed in a row, last failure was {}.', 701 _Die('{} builds failed in a row, last failure was {}.',
664 consecutive_failures, archive.rev) 702 consecutive_failures, archive.rev)
665 else: 703 else:
666 archive.ArchiveBuildResults(bs_dir) 704 archive.ArchiveBuildResults(supersize_path)
667 consecutive_failures = 0 705 consecutive_failures = 0
668 706
669 if i != 0: 707 if i != 0:
670 diff_mngr.MaybeDiff(i - 1, i) 708 diff_mngr.MaybeDiff(i - 1, i)
671 709
672 _global_restore_checkout_func() 710 _global_restore_checkout_func()
673 711
674 if __name__ == '__main__': 712 if __name__ == '__main__':
675 sys.exit(main()) 713 sys.exit(main())
676 714
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698