Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(91)

Side by Side Diff: tools/binary_size/diagnose_apk_bloat.py

Issue 2837953002: diagnose_apk_bloat.py: add native diffs. (Closed)
Patch Set: Remove silent flag Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2017 The Chromium Authors. All rights reserved. 2 # Copyright 2017 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Tool for finding the cause of APK bloat. 6 """Tool for finding the cause of APK bloat.
7 7
8 Run diagnose_apk_bloat.py -h for detailed usage help. 8 Run diagnose_apk_bloat.py -h for detailed usage help.
9 """ 9 """
10 10
11 import argparse 11 import argparse
12 import collections 12 import collections
13 from contextlib import contextmanager 13 from contextlib import contextmanager
14 import distutils.spawn 14 import distutils.spawn
15 import json 15 import json
16 import multiprocessing 16 import multiprocessing
17 import os 17 import os
18 import re
18 import shutil 19 import shutil
19 import subprocess 20 import subprocess
20 import sys 21 import sys
21 import tempfile 22 import tempfile
22 import zipfile 23 import zipfile
23 24
24 _COMMIT_COUNT_WARN_THRESHOLD = 15 25 _COMMIT_COUNT_WARN_THRESHOLD = 15
25 _ALLOWED_CONSECUTIVE_FAILURES = 2 26 _ALLOWED_CONSECUTIVE_FAILURES = 2
27 _DIFF_DETAILS_LINES_THRESHOLD = 100
26 _BUILDER_URL = \ 28 _BUILDER_URL = \
27 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' 29 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder'
28 _CLOUD_OUT_DIR = os.path.join('out', 'Release') 30 _CLOUD_OUT_DIR = os.path.join('out', 'Release')
29 _SRC_ROOT = os.path.abspath( 31 _SRC_ROOT = os.path.abspath(
30 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) 32 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
31 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') 33 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat')
32 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') 34 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat')
33 _DEFAULT_TARGET = 'monochrome_public_apk' 35 _DEFAULT_TARGET = 'monochrome_public_apk'
34 36
35 37
(...skipping 11 matching lines...) Expand all
47 def __init__(self, name): 49 def __init__(self, name):
48 self.name = name 50 self.name = name
49 self.banner = '\n' + '*' * 30 + name + '*' * 30 51 self.banner = '\n' + '*' * 30 + name + '*' * 30
50 52
51 def AppendResults(self, logfile): 53 def AppendResults(self, logfile):
52 """Print and write diff results to an open |logfile|.""" 54 """Print and write diff results to an open |logfile|."""
53 _PrintAndWriteToFile(logfile, self.banner) 55 _PrintAndWriteToFile(logfile, self.banner)
54 _PrintAndWriteToFile(logfile, 'Summary:') 56 _PrintAndWriteToFile(logfile, 'Summary:')
55 _PrintAndWriteToFile(logfile, self.Summary()) 57 _PrintAndWriteToFile(logfile, self.Summary())
56 _PrintAndWriteToFile(logfile, '\nDetails:') 58 _PrintAndWriteToFile(logfile, '\nDetails:')
57 for l in self.DetailedResults(): 59 _PrintAndWriteToFile(logfile, self.DetailedResults())
58 _PrintAndWriteToFile(logfile, l)
59 60
60 def Summary(self): 61 def Summary(self):
61 """A short description that summarizes the source of binary size bloat.""" 62 """A short description that summarizes the source of binary size bloat."""
62 raise NotImplementedError() 63 raise NotImplementedError()
63 64
64 def DetailedResults(self): 65 def DetailedResults(self):
65 """An iterable description of the cause of binary size bloat.""" 66 """An iterable description of the cause of binary size bloat."""
66 raise NotImplementedError() 67 raise NotImplementedError()
67 68
68 def ProduceDiff(self, archive_dirs): 69 def ProduceDiff(self, archive_dirs):
69 """Prepare a binary size diff with ready to print results.""" 70 """Prepare a binary size diff with ready to print results."""
70 raise NotImplementedError() 71 raise NotImplementedError()
71 72
72 def RunDiff(self, logfile, archive_dirs): 73 def RunDiff(self, logfile, archive_dirs):
73 _Print('Creating {}', self.name)
74 self.ProduceDiff(archive_dirs) 74 self.ProduceDiff(archive_dirs)
75 self.AppendResults(logfile) 75 self.AppendResults(logfile)
76 76
77 77
78 class NativeDiff(BaseDiff):
79 _RE_SUMMARY = re.compile(
80 r'.*(Section Sizes .*? object files added, \d+ removed).*',
81 flags=re.DOTALL)
82
83 def __init__(self, size_name, supersize_path):
84 self._size_name = size_name
85 self._supersize_path = supersize_path
86 self._diff = []
87 super(NativeDiff, self).__init__('Native Diff')
88
89 def DetailedResults(self):
90 return self._diff.splitlines()
91
92 def Summary(self):
93 return NativeDiff._RE_SUMMARY.match(self._diff).group(1)
94
95 def ProduceDiff(self, archive_dirs):
96 size_files = [os.path.join(a, self._size_name)
97 for a in reversed(archive_dirs)]
98 q = 'Print(Diff(size_info1, size_info2), use_pager=False);'
agrieve 2017/04/24 20:43:21 nit: shouldn't need to specify use_pager=False. It
estevenson 2017/04/24 22:39:40 Switched to diff instead of console. Done.
99 cmd = [self._supersize_path, 'console'] + size_files + ['--query', q]
100 self._diff = _RunCmd(cmd)[0].replace('{', '{{').replace('}', '}}')
101
102
78 _ResourceSizesDiffResult = collections.namedtuple( 103 _ResourceSizesDiffResult = collections.namedtuple(
79 'ResourceSizesDiffResult', ['section', 'value', 'units']) 104 'ResourceSizesDiffResult', ['section', 'value', 'units'])
80 105
81 106
82 class ResourceSizesDiff(BaseDiff): 107 class ResourceSizesDiff(BaseDiff):
83 _RESOURCE_SIZES_PATH = os.path.join( 108 _RESOURCE_SIZES_PATH = os.path.join(
84 _SRC_ROOT, 'build', 'android', 'resource_sizes.py') 109 _SRC_ROOT, 'build', 'android', 'resource_sizes.py')
85 110
86 def __init__(self, apk_name, slow_options=False): 111 def __init__(self, apk_name, slow_options=False):
87 self._apk_name = apk_name 112 self._apk_name = apk_name
88 self._slow_options = slow_options 113 self._slow_options = slow_options
89 self._diff = None # Set by |ProduceDiff()| 114 self._diff = None # Set by |ProduceDiff()|
90 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff') 115 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff')
91 116
92 def DetailedResults(self): 117 def DetailedResults(self):
93 for section, value, units in self._diff: 118 return ['{:>+10,} {} {}'.format(value, units, section)
94 yield '{:>+10,} {} {}'.format(value, units, section) 119 for section, value, units in self._diff]
95 120
96 def Summary(self): 121 def Summary(self):
97 for s in self._diff: 122 for s in self._diff:
98 if 'normalized' in s.section: 123 if 'normalized' in s.section:
99 return 'Normalized APK size: {:+,} {}'.format(s.value, s.units) 124 return 'Normalized APK size: {:+,} {}'.format(s.value, s.units)
100 return '' 125 return ''
101 126
102 def ProduceDiff(self, archive_dirs): 127 def ProduceDiff(self, archive_dirs):
103 chartjsons = self._RunResourceSizes(archive_dirs) 128 chartjsons = self._RunResourceSizes(archive_dirs)
104 diff = [] 129 diff = []
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
159 def apk_name(self): 184 def apk_name(self):
160 # Only works on apk targets that follow: my_great_apk naming convention. 185 # Only works on apk targets that follow: my_great_apk naming convention.
161 apk_name = ''.join(s.title() for s in self.target.split('_')[:-1]) + '.apk' 186 apk_name = ''.join(s.title() for s in self.target.split('_')[:-1]) + '.apk'
162 return apk_name.replace('Webview', 'WebView') 187 return apk_name.replace('Webview', 'WebView')
163 188
164 @property 189 @property
165 def apk_path(self): 190 def apk_path(self):
166 return os.path.join('apks', self.apk_name) 191 return os.path.join('apks', self.apk_name)
167 192
168 @property 193 @property
169 def main_lib_name(self): 194 def main_lib_path(self):
170 # TODO(estevenson): Get this from GN instead of hardcoding. 195 # TODO(estevenson): Get this from GN instead of hardcoding.
171 if self.IsLinux(): 196 if self.IsLinux():
172 return 'chrome' 197 return 'chrome'
173 elif 'monochrome' in self.target: 198 elif 'monochrome' in self.target:
174 return 'lib.unstripped/libmonochrome.so' 199 return 'lib.unstripped/libmonochrome.so'
175 else: 200 else:
176 return 'lib.unstripped/libchrome.so' 201 return 'lib.unstripped/libchrome.so'
177 202
178 @property 203 @property
179 def main_lib_path(self): 204 def abs_main_lib_path(self):
180 return os.path.join(self.output_directory, self.main_lib_name) 205 return os.path.join(self.output_directory, self.main_lib_path)
181 206
182 @property 207 @property
183 def map_file_name(self): 208 def map_file_path(self):
184 return self.main_lib_name + '.map.gz' 209 return self.main_lib_path + '.map.gz'
210
211 @property
212 def size_name(self):
213 return os.path.splitext(os.path.basename(self.main_lib_path))[0] + '.size'
185 214
186 def _SetDefaults(self): 215 def _SetDefaults(self):
187 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma')) 216 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma'))
188 self.use_goma = self.use_goma or has_goma_dir 217 self.use_goma = self.use_goma or has_goma_dir
189 self.max_load_average = (self.max_load_average or 218 self.max_load_average = (self.max_load_average or
190 str(multiprocessing.cpu_count())) 219 str(multiprocessing.cpu_count()))
191 if not self.max_jobs: 220 if not self.max_jobs:
192 self.max_jobs = '10000' if self.use_goma else '500' 221 self.max_jobs = '10000' if self.use_goma else '500'
193 222
194 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')): 223 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')):
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
235 264
236 class _BuildArchive(object): 265 class _BuildArchive(object):
237 """Class for managing a directory with build results and build metadata.""" 266 """Class for managing a directory with build results and build metadata."""
238 def __init__(self, rev, base_archive_dir, build, subrepo): 267 def __init__(self, rev, base_archive_dir, build, subrepo):
239 self.build = build 268 self.build = build
240 self.dir = os.path.join(base_archive_dir, rev) 269 self.dir = os.path.join(base_archive_dir, rev)
241 metadata_path = os.path.join(self.dir, 'metadata.txt') 270 metadata_path = os.path.join(self.dir, 'metadata.txt')
242 self.rev = rev 271 self.rev = rev
243 self.metadata = _GenerateMetadata([self], build, metadata_path, subrepo) 272 self.metadata = _GenerateMetadata([self], build, metadata_path, subrepo)
244 273
245 def ArchiveBuildResults(self, bs_dir): 274 def ArchiveBuildResults(self, supersize_path):
246 """Save build artifacts necessary for diffing.""" 275 """Save build artifacts necessary for diffing."""
247 _Print('Saving build results to: {}', self.dir) 276 _Print('Saving build results to: {}', self.dir)
248 _EnsureDirsExist(self.dir) 277 _EnsureDirsExist(self.dir)
249 build = self.build 278 build = self.build
250 self._ArchiveFile(build.main_lib_path) 279 self._ArchiveFile(build.abs_main_lib_path)
251 lib_name_noext = os.path.splitext(os.path.basename(build.main_lib_path))[0]
252 size_path = os.path.join(self.dir, lib_name_noext + '.size')
253 supersize_path = os.path.join(bs_dir, 'supersize')
254 tool_prefix = _FindToolPrefix(build.output_directory) 280 tool_prefix = _FindToolPrefix(build.output_directory)
281 size_path = os.path.join(self.dir, build.size_name)
255 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file', 282 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file',
256 build.main_lib_path, '--tool-prefix', tool_prefix, 283 build.abs_main_lib_path, '--tool-prefix', tool_prefix,
257 '--output-directory', build.output_directory, 284 '--output-directory', build.output_directory,
258 '--no-source-paths'] 285 '--no-source-paths']
259 if build.IsAndroid(): 286 if build.IsAndroid():
260 supersize_cmd += ['--apk-file', build.abs_apk_path] 287 supersize_cmd += ['--apk-file', build.abs_apk_path]
261 self._ArchiveFile(build.abs_apk_path) 288 self._ArchiveFile(build.abs_apk_path)
262 289
263 _RunCmd(supersize_cmd) 290 _RunCmd(supersize_cmd)
264 _WriteMetadata(self.metadata) 291 _WriteMetadata(self.metadata)
265 292
266 def Exists(self): 293 def Exists(self):
(...skipping 27 matching lines...) Expand all
294 _Print('Skipping diff for {} due to missing build archives.', diff_path) 321 _Print('Skipping diff for {} due to missing build archives.', diff_path)
295 return 322 return
296 323
297 metadata_path = self._DiffMetadataPath(archives) 324 metadata_path = self._DiffMetadataPath(archives)
298 metadata = _GenerateMetadata( 325 metadata = _GenerateMetadata(
299 archives, self.build, metadata_path, self.subrepo) 326 archives, self.build, metadata_path, self.subrepo)
300 if _MetadataExists(metadata): 327 if _MetadataExists(metadata):
301 _Print('Skipping diff for {} and {}. Matching diff already exists: {}', 328 _Print('Skipping diff for {} and {}. Matching diff already exists: {}',
302 archives[0].rev, archives[1].rev, diff_path) 329 archives[0].rev, archives[1].rev, diff_path)
303 else: 330 else:
331 if os.path.exists(diff_path):
332 os.remove(diff_path)
304 archive_dirs = [archives[0].dir, archives[1].dir] 333 archive_dirs = [archives[0].dir, archives[1].dir]
305 with open(diff_path, 'a') as diff_file: 334 with open(diff_path, 'a') as diff_file:
306 for d in self.diffs: 335 for d in self.diffs:
307 d.RunDiff(diff_file, archive_dirs) 336 d.RunDiff(diff_file, archive_dirs)
337 _Print('See detailed diff results here: {}.', diff_path)
308 _WriteMetadata(metadata) 338 _WriteMetadata(metadata)
309 339
310 def _CanDiff(self, archives): 340 def _CanDiff(self, archives):
311 return all(a.Exists() for a in archives) 341 return all(a.Exists() for a in archives)
312 342
313 def _DiffFilePath(self, archives): 343 def _DiffFilePath(self, archives):
314 return os.path.join(self._DiffDir(archives), 'diff_results.txt') 344 return os.path.join(self._DiffDir(archives), 'diff_results.txt')
315 345
316 def _DiffMetadataPath(self, archives): 346 def _DiffMetadataPath(self, archives):
317 return os.path.join(self._DiffDir(archives), 'metadata.txt') 347 return os.path.join(self._DiffDir(archives), 'metadata.txt')
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after
472 _Print('Failure: please ensure working directory is clean.') 502 _Print('Failure: please ensure working directory is clean.')
473 sys.exit() 503 sys.exit()
474 504
475 505
476 def _Die(s, *args, **kwargs): 506 def _Die(s, *args, **kwargs):
477 _Print('Failure: ' + s, *args, **kwargs) 507 _Print('Failure: ' + s, *args, **kwargs)
478 _global_restore_checkout_func() 508 _global_restore_checkout_func()
479 sys.exit(1) 509 sys.exit(1)
480 510
481 511
482 def _DownloadBuildArtifacts(archive, build, bs_dir, depot_tools_path): 512 def _DownloadBuildArtifacts(archive, build, supersize_path, depot_tools_path):
483 """Download artifacts from arm32 chromium perf builder.""" 513 """Download artifacts from arm32 chromium perf builder."""
484 if depot_tools_path: 514 if depot_tools_path:
485 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py') 515 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py')
486 else: 516 else:
487 gsutil_path = distutils.spawn.find_executable('gsutil.py') 517 gsutil_path = distutils.spawn.find_executable('gsutil.py')
488 518
489 if not gsutil_path: 519 if not gsutil_path:
490 _Die('gsutil.py not found, please provide path to depot_tools via ' 520 _Die('gsutil.py not found, please provide path to depot_tools via '
491 '--depot-tools-path or add it to your PATH') 521 '--depot-tools-path or add it to your PATH')
492 522
493 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) 523 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT)
494 try: 524 try:
495 _DownloadAndArchive(gsutil_path, archive, download_dir, build, bs_dir) 525 _DownloadAndArchive(
526 gsutil_path, archive, download_dir, build, supersize_path)
496 finally: 527 finally:
497 shutil.rmtree(download_dir) 528 shutil.rmtree(download_dir)
498 529
499 530
500 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, bs_dir): 531 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path):
501 dl_file = 'full-build-linux_%s.zip' % archive.rev 532 dl_file = 'full-build-linux_%s.zip' % archive.rev
502 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file 533 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file
503 dl_dst = os.path.join(dl_dir, dl_file) 534 dl_dst = os.path.join(dl_dir, dl_file)
504 _Print('Downloading build artifacts for {}', archive.rev) 535 _Print('Downloading build artifacts for {}', archive.rev)
505 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to 536 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to
506 # sys.stdout. 537 # sys.stdout.
507 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir], 538 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir],
508 stdout=sys.stdout, stderr=subprocess.STDOUT) 539 stdout=sys.stdout, stderr=subprocess.STDOUT)
509 if retcode: 540 if retcode:
510 _Die('unexpected error while downloading {}. It may no longer exist on ' 541 _Die('unexpected error while downloading {}. It may no longer exist on '
511 'the server or it may not have been uploaded yet (check {}). ' 542 'the server or it may not have been uploaded yet (check {}). '
512 'Otherwise, you may not have the correct access permissions.', 543 'Otherwise, you may not have the correct access permissions.',
513 dl_url, _BUILDER_URL) 544 dl_url, _BUILDER_URL)
514 545
515 # Files needed for supersize and resource_sizes. Paths relative to out dir. 546 # Files needed for supersize and resource_sizes. Paths relative to out dir.
516 to_extract = [build.main_lib_name, build.map_file_name, 'args.gn', 547 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn',
517 'build_vars.txt', build.apk_path] 548 'build_vars.txt', build.apk_path]
518 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped') 549 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped')
519 # Storage bucket stores entire output directory including out/Release prefix. 550 # Storage bucket stores entire output directory including out/Release prefix.
520 _Print('Extracting build artifacts') 551 _Print('Extracting build artifacts')
521 with zipfile.ZipFile(dl_dst, 'r') as z: 552 with zipfile.ZipFile(dl_dst, 'r') as z:
522 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z) 553 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z)
523 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR) 554 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR)
524 build.output_directory, output_directory = dl_out, build.output_directory 555 build.output_directory, output_directory = dl_out, build.output_directory
525 archive.ArchiveBuildResults(bs_dir) 556 archive.ArchiveBuildResults(supersize_path)
526 build.output_directory = output_directory 557 build.output_directory = output_directory
527 558
528 559
529 def _ExtractFiles(to_extract, prefix, dst, z): 560 def _ExtractFiles(to_extract, prefix, dst, z):
530 zip_infos = z.infolist() 561 zip_infos = z.infolist()
531 assert all(info.filename.startswith(prefix) for info in zip_infos), ( 562 assert all(info.filename.startswith(prefix) for info in zip_infos), (
532 'Storage bucket folder structure doesn\'t start with %s' % prefix) 563 'Storage bucket folder structure doesn\'t start with %s' % prefix)
533 to_extract = [os.path.join(prefix, f) for f in to_extract] 564 to_extract = [os.path.join(prefix, f) for f in to_extract]
534 for f in to_extract: 565 for f in to_extract:
535 z.extract(f, path=dst) 566 z.extract(f, path=dst)
536 567
537 568
538 def _Print(s, *args, **kwargs): 569 def _Print(s, *args, **kwargs):
539 print s.format(*args, **kwargs) 570 print s.format(*args, **kwargs)
540 571
541 572
542 def _PrintAndWriteToFile(logfile, s): 573 def _PrintAndWriteToFile(logfile, s):
543 """Print |s| to |logfile| and stdout.""" 574 """Write and print |s| thottling output if |s| is a large list."""
544 _Print(s) 575 if isinstance(s, basestring):
545 logfile.write('%s\n' % s) 576 _Print(s)
577 else:
578 for l in s[:_DIFF_DETAILS_LINES_THRESHOLD]:
579 _Print(l)
580 if len(s) > _DIFF_DETAILS_LINES_THRESHOLD:
581 _Print('\nOutput truncated, see {} for more.', logfile.name)
582 logfile.write('\n'.join(s))
546 583
547 584
548 @contextmanager 585 @contextmanager
549 def _TmpBinarySizeDir(): 586 def _TmpCopyBinarySizeDir():
550 """Recursively copy files to a temp dir and yield the tmp binary_size dir.""" 587 """Recursively copy files to a temp dir and yield supersize path."""
551 # Needs to be at same level of nesting as the real //tools/binary_size 588 # Needs to be at same level of nesting as the real //tools/binary_size
552 # since supersize uses this to find d3 in //third_party. 589 # since supersize uses this to find d3 in //third_party.
553 tmp_dir = tempfile.mkdtemp(dir=_SRC_ROOT) 590 tmp_dir = tempfile.mkdtemp(dir=_SRC_ROOT)
554 try: 591 try:
555 bs_dir = os.path.join(tmp_dir, 'binary_size') 592 bs_dir = os.path.join(tmp_dir, 'binary_size')
556 shutil.copytree(os.path.join(_SRC_ROOT, 'tools', 'binary_size'), bs_dir) 593 shutil.copytree(os.path.join(_SRC_ROOT, 'tools', 'binary_size'), bs_dir)
557 yield bs_dir 594 yield os.path.join(bs_dir, 'supersize')
558 finally: 595 finally:
559 shutil.rmtree(tmp_dir) 596 shutil.rmtree(tmp_dir)
560 597
561 598
562 def main(): 599 def main():
563 parser = argparse.ArgumentParser( 600 parser = argparse.ArgumentParser(
564 description='Find the cause of APK size bloat.') 601 description='Find the cause of APK size bloat.')
565 parser.add_argument('--archive-directory', 602 parser.add_argument('--archive-directory',
566 default=_DEFAULT_ARCHIVE_DIR, 603 default=_DEFAULT_ARCHIVE_DIR,
567 help='Where results are stored.') 604 help='Where results are stored.')
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
631 if args.subrepo: 668 if args.subrepo:
632 parser.error('--subrepo doesn\'t work with --cloud') 669 parser.error('--subrepo doesn\'t work with --cloud')
633 670
634 subrepo = args.subrepo or _SRC_ROOT 671 subrepo = args.subrepo or _SRC_ROOT
635 _EnsureDirectoryClean(subrepo) 672 _EnsureDirectoryClean(subrepo)
636 _SetRestoreFunc(subrepo) 673 _SetRestoreFunc(subrepo)
637 revs = _GenerateRevList(args.rev, 674 revs = _GenerateRevList(args.rev,
638 args.reference_rev or args.rev + '^', 675 args.reference_rev or args.rev + '^',
639 args.all, 676 args.all,
640 subrepo) 677 subrepo)
641 diffs = [] 678 with _TmpCopyBinarySizeDir() as supersize_path:
642 if build.IsAndroid(): 679 diffs = [NativeDiff(build.size_name, supersize_path)]
643 diffs += [ 680 if build.IsAndroid():
644 ResourceSizesDiff( 681 diffs += [
645 build.apk_name, slow_options=args.include_slow_options) 682 ResourceSizesDiff(
646 ] 683 build.apk_name, slow_options=args.include_slow_options)
647 diff_mngr = _DiffArchiveManager( 684 ]
648 revs, args.archive_directory, diffs, build, subrepo) 685 diff_mngr = _DiffArchiveManager(
649 consecutive_failures = 0 686 revs, args.archive_directory, diffs, build, subrepo)
650 with _TmpBinarySizeDir() as bs_dir: 687 consecutive_failures = 0
651 for i, archive in enumerate(diff_mngr.IterArchives()): 688 for i, archive in enumerate(diff_mngr.IterArchives()):
652 if archive.Exists(): 689 if archive.Exists():
653 _Print('Found matching metadata for {}, skipping build step.', 690 _Print('Found matching metadata for {}, skipping build step.',
654 archive.rev) 691 archive.rev)
655 else: 692 else:
656 if build.IsCloud(): 693 if build.IsCloud():
657 _DownloadBuildArtifacts(archive, build, bs_dir, args.depot_tools_path) 694 _DownloadBuildArtifacts(
695 archive, build, supersize_path, args.depot_tools_path)
658 else: 696 else:
659 build_success = _SyncAndBuild(archive, build, subrepo) 697 build_success = _SyncAndBuild(archive, build, subrepo)
660 if not build_success: 698 if not build_success:
661 consecutive_failures += 1 699 consecutive_failures += 1
662 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES: 700 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES:
663 _Die('{} builds failed in a row, last failure was {}.', 701 _Die('{} builds failed in a row, last failure was {}.',
664 consecutive_failures, archive.rev) 702 consecutive_failures, archive.rev)
665 else: 703 else:
666 archive.ArchiveBuildResults(bs_dir) 704 archive.ArchiveBuildResults(supersize_path)
667 consecutive_failures = 0 705 consecutive_failures = 0
668 706
669 if i != 0: 707 if i != 0:
670 diff_mngr.MaybeDiff(i - 1, i) 708 diff_mngr.MaybeDiff(i - 1, i)
671 709
672 _global_restore_checkout_func() 710 _global_restore_checkout_func()
673 711
674 if __name__ == '__main__': 712 if __name__ == '__main__':
675 sys.exit(main()) 713 sys.exit(main())
676 714
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698