| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2017 The Chromium Authors. All rights reserved. | 2 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Tool for finding the cause of APK bloat. | 6 """Tool for finding the cause of APK bloat. |
| 7 | 7 |
| 8 Run diagnose_apk_bloat.py -h for detailed usage help. | 8 Run diagnose_apk_bloat.py -h for detailed usage help. |
| 9 """ | 9 """ |
| 10 | 10 |
| 11 import argparse | 11 import argparse |
| 12 import collections | 12 import collections |
| 13 import distutils.spawn | 13 import distutils.spawn |
| 14 import itertools | 14 import itertools |
| 15 import json | 15 import json |
| 16 import multiprocessing | 16 import multiprocessing |
| 17 import os | 17 import os |
| 18 import shutil | 18 import shutil |
| 19 import subprocess | 19 import subprocess |
| 20 import sys | 20 import sys |
| 21 import tempfile | 21 import tempfile |
| 22 import zipfile | 22 import zipfile |
| 23 | 23 |
| 24 _COMMIT_COUNT_WARN_THRESHOLD = 15 |
| 25 _ALLOWED_CONSECUTIVE_FAILURES = 2 |
| 24 _BUILDER_URL = \ | 26 _BUILDER_URL = \ |
| 25 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' | 27 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' |
| 26 _CLOUD_OUT_DIR = os.path.join('out', 'Release') | 28 _CLOUD_OUT_DIR = os.path.join('out', 'Release') |
| 27 _SRC_ROOT = os.path.abspath( | 29 _SRC_ROOT = os.path.abspath( |
| 28 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) | 30 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) |
| 29 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') | 31 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') |
| 30 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') | 32 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') |
| 31 _DEFAULT_TARGET = 'monochrome_public_apk' | 33 _DEFAULT_TARGET = 'monochrome_public_apk' |
| 32 | 34 |
| 33 # Global variable for storing the initial branch before the script was launched | 35 |
| 34 # so that it doesn't need to be passed everywhere in case we fail and exit. | 36 _global_restore_checkout_func = None |
| 35 _initial_branch = None | 37 |
| 38 |
| 39 def _RestoreFunc(subrepo): |
| 40 branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD'], subrepo) |
| 41 return lambda: _GitCmd(['checkout', branch], subrepo) |
| 36 | 42 |
| 37 | 43 |
| 38 class BaseDiff(object): | 44 class BaseDiff(object): |
| 39 """Base class capturing binary size diffs.""" | 45 """Base class capturing binary size diffs.""" |
| 40 def __init__(self, name): | 46 def __init__(self, name): |
| 41 self.name = name | 47 self.name = name |
| 42 self.banner = '\n' + '*' * 30 + name + '*' * 30 | 48 self.banner = '\n' + '*' * 30 + name + '*' * 30 |
| 43 self.RunDiff() | |
| 44 | 49 |
| 45 def AppendResults(self, logfile): | 50 def AppendResults(self, logfile): |
| 46 """Print and write diff results to an open |logfile|.""" | 51 """Print and write diff results to an open |logfile|.""" |
| 47 _PrintAndWriteToFile(logfile, self.banner) | 52 _PrintAndWriteToFile(logfile, self.banner) |
| 48 _PrintAndWriteToFile(logfile, 'Summary:') | 53 _PrintAndWriteToFile(logfile, 'Summary:') |
| 49 _PrintAndWriteToFile(logfile, self.Summary()) | 54 _PrintAndWriteToFile(logfile, self.Summary()) |
| 50 _PrintAndWriteToFile(logfile, '\nDetails:') | 55 _PrintAndWriteToFile(logfile, '\nDetails:') |
| 51 for l in self.DetailedResults(): | 56 for l in self.DetailedResults(): |
| 52 _PrintAndWriteToFile(logfile, l) | 57 _PrintAndWriteToFile(logfile, l) |
| 53 | 58 |
| 54 def Summary(self): | 59 def Summary(self): |
| 55 """A short description that summarizes the source of binary size bloat.""" | 60 """A short description that summarizes the source of binary size bloat.""" |
| 56 raise NotImplementedError() | 61 raise NotImplementedError() |
| 57 | 62 |
| 58 def DetailedResults(self): | 63 def DetailedResults(self): |
| 59 """An iterable description of the cause of binary size bloat.""" | 64 """An iterable description of the cause of binary size bloat.""" |
| 60 raise NotImplementedError() | 65 raise NotImplementedError() |
| 61 | 66 |
| 62 def ProduceDiff(self): | 67 def ProduceDiff(self, archive_dirs): |
| 63 """Prepare a binary size diff with ready to print results.""" | 68 """Prepare a binary size diff with ready to print results.""" |
| 64 raise NotImplementedError() | 69 raise NotImplementedError() |
| 65 | 70 |
| 66 def RunDiff(self): | 71 def RunDiff(self, logfile, archive_dirs): |
| 67 _Print('Creating {}', self.name) | 72 _Print('Creating {}', self.name) |
| 68 self.ProduceDiff() | 73 self.ProduceDiff(archive_dirs) |
| 74 self.AppendResults(logfile) |
| 69 | 75 |
| 70 | 76 |
| 71 _ResourceSizesDiffResult = collections.namedtuple( | 77 _ResourceSizesDiffResult = collections.namedtuple( |
| 72 'ResourceSizesDiffResult', ['section', 'value', 'units']) | 78 'ResourceSizesDiffResult', ['section', 'value', 'units']) |
| 73 | 79 |
| 74 | 80 |
| 75 class ResourceSizesDiff(BaseDiff): | 81 class ResourceSizesDiff(BaseDiff): |
| 76 _RESOURCE_SIZES_PATH = os.path.join( | 82 _RESOURCE_SIZES_PATH = os.path.join( |
| 77 _SRC_ROOT, 'build', 'android', 'resource_sizes.py') | 83 _SRC_ROOT, 'build', 'android', 'resource_sizes.py') |
| 78 | 84 |
| 79 def __init__(self, archive_dirs, apk_name, slow_options=False): | 85 def __init__(self, apk_name, slow_options=False): |
| 80 self._archive_dirs = archive_dirs | |
| 81 self._apk_name = apk_name | 86 self._apk_name = apk_name |
| 82 self._slow_options = slow_options | 87 self._slow_options = slow_options |
| 83 self._diff = None # Set by |ProduceDiff()| | 88 self._diff = None # Set by |ProduceDiff()| |
| 84 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff') | 89 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff') |
| 85 | 90 |
| 86 def DetailedResults(self): | 91 def DetailedResults(self): |
| 87 for section, value, units in self._diff: | 92 for section, value, units in self._diff: |
| 88 yield '{:>+10,} {} {}'.format(value, units, section) | 93 yield '{:>+10,} {} {}'.format(value, units, section) |
| 89 | 94 |
| 90 def Summary(self): | 95 def Summary(self): |
| 91 for s in self._diff: | 96 for s in self._diff: |
| 92 if 'normalized' in s.section: | 97 if 'normalized' in s.section: |
| 93 return 'Normalized APK size: {:+,} {}'.format(s.value, s.units) | 98 return 'Normalized APK size: {:+,} {}'.format(s.value, s.units) |
| 94 return '' | 99 return '' |
| 95 | 100 |
| 96 def ProduceDiff(self): | 101 def ProduceDiff(self, archive_dirs): |
| 97 chartjsons = self._RunResourceSizes() | 102 chartjsons = self._RunResourceSizes(archive_dirs) |
| 98 diff = [] | 103 diff = [] |
| 99 with_patch = chartjsons[0]['charts'] | 104 with_patch = chartjsons[0]['charts'] |
| 100 without_patch = chartjsons[1]['charts'] | 105 without_patch = chartjsons[1]['charts'] |
| 101 for section, section_dict in with_patch.iteritems(): | 106 for section, section_dict in with_patch.iteritems(): |
| 102 for subsection, v in section_dict.iteritems(): | 107 for subsection, v in section_dict.iteritems(): |
| 103 # Ignore entries when resource_sizes.py chartjson format has changed. | 108 # Ignore entries when resource_sizes.py chartjson format has changed. |
| 104 if (section not in without_patch or | 109 if (section not in without_patch or |
| 105 subsection not in without_patch[section] or | 110 subsection not in without_patch[section] or |
| 106 v['units'] != without_patch[section][subsection]['units']): | 111 v['units'] != without_patch[section][subsection]['units']): |
| 107 _Print('Found differing dict structures for resource_sizes.py, ' | 112 _Print('Found differing dict structures for resource_sizes.py, ' |
| 108 'skipping {} {}', section, subsection) | 113 'skipping {} {}', section, subsection) |
| 109 else: | 114 else: |
| 110 diff.append( | 115 diff.append( |
| 111 _ResourceSizesDiffResult( | 116 _ResourceSizesDiffResult( |
| 112 '%s %s' % (section, subsection), | 117 '%s %s' % (section, subsection), |
| 113 v['value'] - without_patch[section][subsection]['value'], | 118 v['value'] - without_patch[section][subsection]['value'], |
| 114 v['units'])) | 119 v['units'])) |
| 115 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True) | 120 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True) |
| 116 | 121 |
| 117 def _RunResourceSizes(self): | 122 def _RunResourceSizes(self, archive_dirs): |
| 118 chartjsons = [] | 123 chartjsons = [] |
| 119 for archive_dir in self._archive_dirs: | 124 for archive_dir in archive_dirs: |
| 120 apk_path = os.path.join(archive_dir, self._apk_name) | 125 apk_path = os.path.join(archive_dir, self._apk_name) |
| 121 chartjson_file = os.path.join(archive_dir, 'results-chart.json') | 126 chartjson_file = os.path.join(archive_dir, 'results-chart.json') |
| 122 cmd = [self._RESOURCE_SIZES_PATH, apk_path,'--output-dir', archive_dir, | 127 cmd = [self._RESOURCE_SIZES_PATH, apk_path,'--output-dir', archive_dir, |
| 123 '--no-output-dir', '--chartjson'] | 128 '--no-output-dir', '--chartjson'] |
| 124 if self._slow_options: | 129 if self._slow_options: |
| 125 cmd += ['--estimate-patch-size'] | 130 cmd += ['--estimate-patch-size'] |
| 126 else: | 131 else: |
| 127 cmd += ['--no-static-initializer-check'] | 132 cmd += ['--no-static-initializer-check'] |
| 128 _RunCmd(cmd) | 133 _RunCmd(cmd) |
| 129 with open(chartjson_file) as f: | 134 with open(chartjson_file) as f: |
| 130 chartjsons.append(json.load(f)) | 135 chartjsons.append(json.load(f)) |
| 131 return chartjsons | 136 return chartjsons |
| 132 | 137 |
| 133 | 138 |
| 134 class _BuildHelper(object): | 139 class _BuildHelper(object): |
| 135 """Helper class for generating and building targets.""" | 140 """Helper class for generating and building targets.""" |
| 136 def __init__(self, args): | 141 def __init__(self, args): |
| 142 self.cloud = args.cloud |
| 137 self.enable_chrome_android_internal = args.enable_chrome_android_internal | 143 self.enable_chrome_android_internal = args.enable_chrome_android_internal |
| 138 self.extra_gn_args_str = '' | 144 self.extra_gn_args_str = '' |
| 139 self.max_jobs = args.max_jobs | 145 self.max_jobs = args.max_jobs |
| 140 self.max_load_average = args.max_load_average | 146 self.max_load_average = args.max_load_average |
| 141 self.output_directory = args.output_directory | 147 self.output_directory = args.output_directory |
| 142 self.target = args.target | 148 self.target = args.target |
| 143 self.target_os = args.target_os | 149 self.target_os = args.target_os |
| 144 self.use_goma = args.use_goma | 150 self.use_goma = args.use_goma |
| 145 self._SetDefaults() | 151 self._SetDefaults() |
| 146 | 152 |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 200 return ['gn', 'gen', self.output_directory, '--args=%s' % gn_args] | 206 return ['gn', 'gen', self.output_directory, '--args=%s' % gn_args] |
| 201 | 207 |
| 202 def _GenNinjaCmd(self): | 208 def _GenNinjaCmd(self): |
| 203 cmd = ['ninja', '-C', self.output_directory] | 209 cmd = ['ninja', '-C', self.output_directory] |
| 204 cmd += ['-j', self.max_jobs] if self.max_jobs else [] | 210 cmd += ['-j', self.max_jobs] if self.max_jobs else [] |
| 205 cmd += ['-l', self.max_load_average] if self.max_load_average else [] | 211 cmd += ['-l', self.max_load_average] if self.max_load_average else [] |
| 206 cmd += [self.target] | 212 cmd += [self.target] |
| 207 return cmd | 213 return cmd |
| 208 | 214 |
| 209 def Run(self): | 215 def Run(self): |
| 216 """Run GN gen/ninja build and return the process returncode.""" |
| 210 _Print('Building: {}.', self.target) | 217 _Print('Building: {}.', self.target) |
| 211 _RunCmd(self._GenGnCmd(), print_stdout=True) | 218 retcode = _RunCmd( |
| 212 _RunCmd(self._GenNinjaCmd(), print_stdout=True) | 219 self._GenGnCmd(), print_stdout=True, exit_on_failure=False)[1] |
| 220 if retcode: |
| 221 return retcode |
| 222 return _RunCmd( |
| 223 self._GenNinjaCmd(), print_stdout=True, exit_on_failure=False)[1] |
| 213 | 224 |
| 214 def IsAndroid(self): | 225 def IsAndroid(self): |
| 215 return self.target_os == 'android' | 226 return self.target_os == 'android' |
| 216 | 227 |
| 217 def IsLinux(self): | 228 def IsLinux(self): |
| 218 return self.target_os == 'linux' | 229 return self.target_os == 'linux' |
| 219 | 230 |
| 231 def IsCloud(self): |
| 232 return self.cloud |
| 220 | 233 |
| 221 def _RunCmd(cmd, print_stdout=False): | 234 |
| 235 class _BuildArchive(object): |
| 236 """Class for managing a directory with build results and build metadata.""" |
| 237 def __init__(self, rev, base_archive_dir, build, subrepo): |
| 238 self.build = build |
| 239 self.dir = os.path.join(base_archive_dir, rev) |
| 240 metadata_path = os.path.join(self.dir, 'metadata.txt') |
| 241 self.rev = rev |
| 242 self.metadata = _GenerateMetadata([self], build, metadata_path, subrepo) |
| 243 |
| 244 def ArchiveBuildResults(self): |
| 245 """Save build artifacts necessary for diffing.""" |
| 246 _Print('Saving build results to: {}', self.dir) |
| 247 _EnsureDirsExist(self.dir) |
| 248 build = self.build |
| 249 self._ArchiveFile(build.main_lib_path) |
| 250 lib_name_noext = os.path.splitext(os.path.basename(build.main_lib_path))[0] |
| 251 size_path = os.path.join(self.dir, lib_name_noext + '.size') |
| 252 supersize_path = os.path.join(_SRC_ROOT, 'tools/binary_size/supersize') |
| 253 tool_prefix = _FindToolPrefix(build.output_directory) |
| 254 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file', |
| 255 build.main_lib_path, '--tool-prefix', tool_prefix, |
| 256 '--output-directory', build.output_directory, |
| 257 '--no-source-paths'] |
| 258 if build.IsAndroid(): |
| 259 supersize_cmd += ['--apk-file', build.abs_apk_path] |
| 260 self._ArchiveFile(build.abs_apk_path) |
| 261 |
| 262 _RunCmd(supersize_cmd) |
| 263 _WriteMetadata(self.metadata) |
| 264 |
| 265 def Exists(self): |
| 266 return _MetadataExists(self.metadata) |
| 267 |
| 268 def _ArchiveFile(self, filename): |
| 269 if not os.path.exists(filename): |
| 270 _Die('missing expected file: {}', filename) |
| 271 shutil.copy(filename, self.dir) |
| 272 |
| 273 |
| 274 class _DiffArchiveManager(object): |
| 275 """Class for maintaining BuildArchives and their related diff artifacts.""" |
| 276 def __init__(self, revs, archive_dir, diffs, build, subrepo): |
| 277 self.archive_dir = archive_dir |
| 278 self.build = build |
| 279 self.build_archives = [_BuildArchive(rev, archive_dir, build, subrepo) |
| 280 for rev in revs] |
| 281 self.diffs = diffs |
| 282 self.subrepo = subrepo |
| 283 |
| 284 def IterArchives(self): |
| 285 return iter(self.build_archives) |
| 286 |
| 287 def MaybeDiff(self, first_id, second_id): |
| 288 """Perform diffs given two build archives.""" |
| 289 archives = [ |
| 290 self.build_archives[first_id], self.build_archives[second_id]] |
| 291 diff_path = self._DiffFilePath(archives) |
| 292 if not self._CanDiff(archives): |
| 293 _Print('Skipping diff for {} due to missing build archives.', diff_path) |
| 294 return |
| 295 |
| 296 metadata_path = self._DiffMetadataPath(archives) |
| 297 metadata = _GenerateMetadata( |
| 298 archives, self.build, metadata_path, self.subrepo) |
| 299 if _MetadataExists(metadata): |
| 300 _Print('Skipping diff for {} and {}. Matching diff already exists: {}', |
| 301 archives[0].rev, archives[1].rev, diff_path) |
| 302 else: |
| 303 archive_dirs = [archives[0].dir, archives[1].dir] |
| 304 with open(diff_path, 'a') as diff_file: |
| 305 for d in self.diffs: |
| 306 d.RunDiff(diff_file, archive_dirs) |
| 307 _WriteMetadata(metadata) |
| 308 |
| 309 def _CanDiff(self, archives): |
| 310 return all(a.Exists() for a in archives) |
| 311 |
| 312 def _DiffFilePath(self, archives): |
| 313 return os.path.join(self._DiffDir(archives), 'diff_results.txt') |
| 314 |
| 315 def _DiffMetadataPath(self, archives): |
| 316 return os.path.join(self._DiffDir(archives), 'metadata.txt') |
| 317 |
| 318 def _DiffDir(self, archives): |
| 319 diff_path = os.path.join( |
| 320 self.archive_dir, 'diffs', '_'.join(a.rev for a in archives)) |
| 321 _EnsureDirsExist(diff_path) |
| 322 return diff_path |
| 323 |
| 324 |
| 325 def _EnsureDirsExist(path): |
| 326 if not os.path.exists(path): |
| 327 os.makedirs(path) |
| 328 |
| 329 |
| 330 def _GenerateMetadata(archives, build, path, subrepo): |
| 331 return { |
| 332 'revs': [a.rev for a in archives], |
| 333 'archive_dirs': [a.dir for a in archives], |
| 334 'target': build.target, |
| 335 'target_os': build.target_os, |
| 336 'is_cloud': build.IsCloud(), |
| 337 'subrepo': subrepo, |
| 338 'path': path, |
| 339 'gn_args': { |
| 340 'extra_gn_args_str': build.extra_gn_args_str, |
| 341 'enable_chrome_android_internal': build.enable_chrome_android_internal, |
| 342 } |
| 343 } |
| 344 |
| 345 |
| 346 def _WriteMetadata(metadata): |
| 347 with open(metadata['path'], 'w') as f: |
| 348 json.dump(metadata, f) |
| 349 |
| 350 |
| 351 def _MetadataExists(metadata): |
| 352 old_metadata = {} |
| 353 path = metadata['path'] |
| 354 if os.path.exists(path): |
| 355 with open(path, 'r') as f: |
| 356 old_metadata = json.load(f) |
| 357 ret = len(metadata) == len(old_metadata) |
| 358 ret &= all(v == old_metadata[k] |
| 359 for k, v in metadata.items() if k != 'gn_args') |
| 360 # GN args don't matter when artifacts are downloaded. For local builds |
| 361 # they need to be the same so that diffs are accurate (differing GN args |
| 362 # will change the final APK/native library). |
| 363 if not metadata['is_cloud']: |
| 364 ret &= metadata['gn_args'] == old_metadata['gn_args'] |
| 365 return ret |
| 366 return False |
| 367 |
| 368 |
| 369 def _RunCmd(cmd, print_stdout=False, exit_on_failure=True): |
| 222 """Convenience function for running commands. | 370 """Convenience function for running commands. |
| 223 | 371 |
| 224 Args: | 372 Args: |
| 225 cmd: the command to run. | 373 cmd: the command to run. |
| 226 print_stdout: if this is True, then the stdout of the process will be | 374 print_stdout: if this is True, then the stdout of the process will be |
| 227 printed, otherwise stdout will be returned. | 375 printed instead of returned. |
| 376 exit_on_failure: die if an error occurs when this is True. |
| 228 | 377 |
| 229 Returns: | 378 Returns: |
| 230 Command stdout if |print_stdout| is False otherwise ''. | 379 Tuple of (process stdout, process returncode). |
| 231 """ | 380 """ |
| 232 cmd_str = ' '.join(c for c in cmd) | 381 cmd_str = ' '.join(c for c in cmd) |
| 233 _Print('Running: {}', cmd_str) | 382 _Print('Running: {}', cmd_str) |
| 234 if print_stdout: | 383 proc_stdout = sys.stdout if print_stdout else subprocess.PIPE |
| 235 proc_stdout = sys.stdout | |
| 236 else: | |
| 237 proc_stdout = subprocess.PIPE | |
| 238 | 384 |
| 239 proc = subprocess.Popen(cmd, stdout=proc_stdout, stderr=subprocess.PIPE) | 385 proc = subprocess.Popen(cmd, stdout=proc_stdout, stderr=subprocess.PIPE) |
| 240 stdout, stderr = proc.communicate() | 386 stdout, stderr = proc.communicate() |
| 241 | 387 |
| 242 if proc.returncode: | 388 if proc.returncode and exit_on_failure: |
| 243 _Die('command failed: {}\nstderr:\n{}', cmd_str, stderr) | 389 _Die('command failed: {}\nstderr:\n{}', cmd_str, stderr) |
| 244 | 390 |
| 245 return stdout.strip() if stdout else '' | 391 stdout = stdout.strip() if stdout else '' |
| 392 return stdout, proc.returncode |
| 246 | 393 |
| 247 | 394 |
| 248 def _GitCmd(args): | 395 def _GitCmd(args, subrepo): |
| 249 return _RunCmd(['git', '-C', _SRC_ROOT] + args) | 396 return _RunCmd(['git', '-C', subrepo] + args)[0] |
| 250 | 397 |
| 251 | 398 |
| 252 def _GclientSyncCmd(rev): | 399 def _GclientSyncCmd(rev, subrepo): |
| 253 cwd = os.getcwd() | 400 cwd = os.getcwd() |
| 254 os.chdir(_SRC_ROOT) | 401 os.chdir(subrepo) |
| 255 _RunCmd(['gclient', 'sync', '-r', 'src@' + rev], print_stdout=True) | 402 _RunCmd(['gclient', 'sync', '-r', 'src@' + rev], print_stdout=True) |
| 256 os.chdir(cwd) | 403 os.chdir(cwd) |
| 257 | 404 |
| 258 | 405 |
| 259 def _ArchiveBuildResult(archive_dir, build): | |
| 260 """Save build artifacts necessary for diffing. | |
| 261 | |
| 262 Expects |build.output_directory| to be correct. | |
| 263 """ | |
| 264 _Print('Saving build results to: {}', archive_dir) | |
| 265 if not os.path.exists(archive_dir): | |
| 266 os.makedirs(archive_dir) | |
| 267 | |
| 268 def ArchiveFile(filename): | |
| 269 if not os.path.exists(filename): | |
| 270 _Die('missing expected file: {}', filename) | |
| 271 shutil.copy(filename, archive_dir) | |
| 272 | |
| 273 ArchiveFile(build.main_lib_path) | |
| 274 lib_name_noext = os.path.splitext(os.path.basename(build.main_lib_path))[0] | |
| 275 size_path = os.path.join(archive_dir, lib_name_noext + '.size') | |
| 276 supersize_path = os.path.join(_SRC_ROOT, 'tools/binary_size/supersize') | |
| 277 tool_prefix = _FindToolPrefix(build.output_directory) | |
| 278 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file', | |
| 279 build.main_lib_path, '--tool-prefix', tool_prefix, | |
| 280 '--output-directory', build.output_directory, | |
| 281 '--no-source-paths'] | |
| 282 if build.IsAndroid(): | |
| 283 supersize_cmd += ['--apk-file', build.abs_apk_path] | |
| 284 ArchiveFile(build.abs_apk_path) | |
| 285 | |
| 286 _RunCmd(supersize_cmd) | |
| 287 | |
| 288 | |
| 289 def _FindToolPrefix(output_directory): | 406 def _FindToolPrefix(output_directory): |
| 290 build_vars_path = os.path.join(output_directory, 'build_vars.txt') | 407 build_vars_path = os.path.join(output_directory, 'build_vars.txt') |
| 291 if os.path.exists(build_vars_path): | 408 if os.path.exists(build_vars_path): |
| 292 with open(build_vars_path) as f: | 409 with open(build_vars_path) as f: |
| 293 build_vars = dict(l.rstrip().split('=', 1) for l in f if '=' in l) | 410 build_vars = dict(l.rstrip().split('=', 1) for l in f if '=' in l) |
| 294 # Tool prefix is relative to output dir, rebase to source root. | 411 # Tool prefix is relative to output dir, rebase to source root. |
| 295 tool_prefix = build_vars['android_tool_prefix'] | 412 tool_prefix = build_vars['android_tool_prefix'] |
| 296 while os.path.sep in tool_prefix: | 413 while os.path.sep in tool_prefix: |
| 297 rebased_tool_prefix = os.path.join(_SRC_ROOT, tool_prefix) | 414 rebased_tool_prefix = os.path.join(_SRC_ROOT, tool_prefix) |
| 298 if os.path.exists(rebased_tool_prefix + 'readelf'): | 415 if os.path.exists(rebased_tool_prefix + 'readelf'): |
| 299 return rebased_tool_prefix | 416 return rebased_tool_prefix |
| 300 tool_prefix = tool_prefix[tool_prefix.find(os.path.sep) + 1:] | 417 tool_prefix = tool_prefix[tool_prefix.find(os.path.sep) + 1:] |
| 301 return '' | 418 return '' |
| 302 | 419 |
| 303 | 420 |
| 304 def _SyncAndBuild(revs, archive_dirs, build): | 421 def _SyncAndBuild(archive, build, subrepo): |
| 305 # Move to a detached state since gclient sync doesn't work with local commits | 422 # Simply do a checkout if subrepo is used. |
| 306 # on a branch. | 423 if subrepo != _SRC_ROOT: |
| 307 _GitCmd(['checkout', '--detach']) | 424 _GitCmd(['checkout', archive.rev], subrepo) |
| 308 for rev, archive_dir in itertools.izip(revs, archive_dirs): | 425 else: |
| 309 _GclientSyncCmd(rev) | 426 # Move to a detached state since gclient sync doesn't work with local |
| 310 build.Run() | 427 # commits on a branch. |
| 311 _ArchiveBuildResult(archive_dir, build) | 428 _GitCmd(['checkout', '--detach'], subrepo) |
| 429 _GclientSyncCmd(archive.rev, subrepo) |
| 430 retcode = build.Run() |
| 431 return retcode == 0 |
| 312 | 432 |
| 313 | 433 |
| 314 def _NormalizeRev(rev): | 434 def _GenerateRevList(with_patch, without_patch, all_in_range, subrepo): |
| 315 """Use actual revs instead of HEAD, HEAD^, etc.""" | 435 """Normalize and optionally generate a list of commits in the given range. |
| 316 return _GitCmd(['rev-parse', rev]) | 436 |
| 437 Returns a list of revisions ordered from newest to oldest. |
| 438 """ |
| 439 cmd = ['git', '-C', subrepo, 'merge-base', '--is-ancestor', without_patch, |
| 440 with_patch] |
| 441 _, retcode = _RunCmd(cmd, exit_on_failure=False) |
| 442 assert not retcode and with_patch != without_patch, ( |
| 443 'Invalid revision arguments, rev_without_patch (%s) is newer than ' |
| 444 'rev_with_patch (%s)' % (without_patch, with_patch)) |
| 445 |
| 446 rev_seq = '%s^..%s' % (without_patch, with_patch) |
| 447 stdout = _GitCmd(['rev-list', rev_seq], subrepo) |
| 448 all_revs = stdout.splitlines() |
| 449 if all_in_range: |
| 450 revs = all_revs |
| 451 else: |
| 452 revs = [all_revs[0], all_revs[-1]] |
| 453 _VerifyUserAckCommitCount(len(revs)) |
| 454 return revs |
| 317 | 455 |
| 318 | 456 |
| 319 def _EnsureDirectoryClean(): | 457 def _VerifyUserAckCommitCount(count): |
| 458 if count >= _COMMIT_COUNT_WARN_THRESHOLD: |
| 459 _Print('You\'ve provided a commit range that contains {} commits, do you ' |
| 460 'want to proceed? [y/n]', count) |
| 461 if raw_input('> ').lower() != 'y': |
| 462 _global_restore_checkout_func() |
| 463 sys.exit(1) |
| 464 |
| 465 |
| 466 def _EnsureDirectoryClean(subrepo): |
| 320 _Print('Checking source directory') | 467 _Print('Checking source directory') |
| 321 stdout = _GitCmd(['status', '--porcelain']) | 468 stdout = _GitCmd(['status', '--porcelain'], subrepo) |
| 322 # Ignore untracked files. | 469 # Ignore untracked files. |
| 323 if stdout and stdout[:2] != '??': | 470 if stdout and stdout[:2] != '??': |
| 324 _Die('please ensure working directory is clean.') | 471 _Print('Failure: please ensure working directory is clean.') |
| 325 | 472 sys.exit() |
| 326 | |
| 327 def _SetInitialBranch(): | |
| 328 global _initial_branch | |
| 329 _initial_branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD']) | |
| 330 | |
| 331 | |
| 332 def _RestoreInitialBranch(): | |
| 333 if _initial_branch: | |
| 334 _GitCmd(['checkout', _initial_branch]) | |
| 335 | 473 |
| 336 | 474 |
| 337 def _Die(s, *args, **kwargs): | 475 def _Die(s, *args, **kwargs): |
| 338 _Print('Failure: ' + s, *args, **kwargs) | 476 _Print('Failure: ' + s, *args, **kwargs) |
| 339 _RestoreInitialBranch() | 477 _global_restore_checkout_func() |
| 340 sys.exit(1) | 478 sys.exit(1) |
| 341 | 479 |
| 342 | 480 |
| 343 def _DownloadBuildArtifacts(revs, archive_dirs, build, depot_tools_path=None): | 481 def _DownloadBuildArtifacts(archive, build, depot_tools_path=None): |
| 344 """Download artifacts from arm32 chromium perf builder.""" | 482 """Download artifacts from arm32 chromium perf builder.""" |
| 345 if depot_tools_path: | 483 if depot_tools_path: |
| 346 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py') | 484 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py') |
| 347 else: | 485 else: |
| 348 gsutil_path = distutils.spawn.find_executable('gsutil.py') | 486 gsutil_path = distutils.spawn.find_executable('gsutil.py') |
| 349 | 487 |
| 350 if not gsutil_path: | 488 if not gsutil_path: |
| 351 _Die('gsutil.py not found, please provide path to depot_tools via ' | 489 _Die('gsutil.py not found, please provide path to depot_tools via ' |
| 352 '--depot-tools-path or add it to your PATH') | 490 '--depot-tools-path or add it to your PATH') |
| 353 | 491 |
| 354 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) | 492 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) |
| 355 try: | 493 try: |
| 356 for rev, archive_dir in itertools.izip(revs, archive_dirs): | 494 _DownloadAndArchive(gsutil_path, archive, download_dir, build) |
| 357 _DownloadAndArchive(gsutil_path, rev, archive_dir, download_dir, build) | |
| 358 finally: | 495 finally: |
| 359 shutil.rmtree(download_dir) | 496 shutil.rmtree(download_dir) |
| 360 | 497 |
| 361 | 498 |
| 362 def _DownloadAndArchive(gsutil_path, rev, archive_dir, dl_dir, build): | 499 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build): |
| 363 dl_file = 'full-build-linux_%s.zip' % rev | 500 dl_file = 'full-build-linux_%s.zip' % archive.rev |
| 364 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file | 501 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file |
| 365 dl_dst = os.path.join(dl_dir, dl_file) | 502 dl_dst = os.path.join(dl_dir, dl_file) |
| 366 _Print('Downloading build artifacts for {}', rev) | 503 _Print('Downloading build artifacts for {}', archive.rev) |
| 367 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to | 504 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to |
| 368 # sys.stdout. | 505 # sys.stdout. |
| 369 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir], | 506 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir], |
| 370 stdout=sys.stdout, stderr=subprocess.STDOUT) | 507 stdout=sys.stdout, stderr=subprocess.STDOUT) |
| 371 if retcode: | 508 if retcode: |
| 372 _Die('unexpected error while downloading {}. It may no longer exist on ' | 509 _Die('unexpected error while downloading {}. It may no longer exist on ' |
| 373 'the server or it may not have been uploaded yet (check {}). ' | 510 'the server or it may not have been uploaded yet (check {}). ' |
| 374 'Otherwise, you may not have the correct access permissions.', | 511 'Otherwise, you may not have the correct access permissions.', |
| 375 dl_url, _BUILDER_URL) | 512 dl_url, _BUILDER_URL) |
| 376 | 513 |
| 377 # Files needed for supersize and resource_sizes. Paths relative to out dir. | 514 # Files needed for supersize and resource_sizes. Paths relative to out dir. |
| 378 to_extract = [build.main_lib_name, build.map_file_name, 'args.gn', | 515 to_extract = [build.main_lib_name, build.map_file_name, 'args.gn', |
| 379 'build_vars.txt', build.apk_path] | 516 'build_vars.txt', build.apk_path] |
| 380 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped') | 517 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped') |
| 381 # Storage bucket stores entire output directory including out/Release prefix. | 518 # Storage bucket stores entire output directory including out/Release prefix. |
| 382 _Print('Extracting build artifacts') | 519 _Print('Extracting build artifacts') |
| 383 with zipfile.ZipFile(dl_dst, 'r') as z: | 520 with zipfile.ZipFile(dl_dst, 'r') as z: |
| 384 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z) | 521 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z) |
| 385 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR) | 522 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR) |
| 386 build.output_directory, output_directory = dl_out, build.output_directory | 523 build.output_directory, output_directory = dl_out, build.output_directory |
| 387 _ArchiveBuildResult(archive_dir, build) | 524 archive.ArchiveBuildResults() |
| 388 build.output_directory = output_directory | 525 build.output_directory = output_directory |
| 389 | 526 |
| 390 | 527 |
| 391 def _ExtractFiles(to_extract, prefix, dst, z): | 528 def _ExtractFiles(to_extract, prefix, dst, z): |
| 392 zip_infos = z.infolist() | 529 zip_infos = z.infolist() |
| 393 assert all(info.filename.startswith(prefix) for info in zip_infos), ( | 530 assert all(info.filename.startswith(prefix) for info in zip_infos), ( |
| 394 'Storage bucket folder structure doesn\'t start with %s' % prefix) | 531 'Storage bucket folder structure doesn\'t start with %s' % prefix) |
| 395 to_extract = [os.path.join(prefix, f) for f in to_extract] | 532 to_extract = [os.path.join(prefix, f) for f in to_extract] |
| 396 for f in to_extract: | 533 for f in to_extract: |
| 397 z.extract(f, path=dst) | 534 z.extract(f, path=dst) |
| 398 | 535 |
| 399 | 536 |
| 400 def _Print(s, *args, **kwargs): | 537 def _Print(s, *args, **kwargs): |
| 401 print s.format(*args, **kwargs) | 538 print s.format(*args, **kwargs) |
| 402 | 539 |
| 403 | 540 |
| 404 def _PrintAndWriteToFile(logfile, s): | 541 def _PrintAndWriteToFile(logfile, s): |
| 405 """Print |s| to |logfile| and stdout.""" | 542 """Print |s| to |logfile| and stdout.""" |
| 406 _Print(s) | 543 _Print(s) |
| 407 logfile.write('%s\n' % s) | 544 logfile.write('%s\n' % s) |
| 408 | 545 |
| 409 | 546 |
| 410 def main(): | 547 def main(): |
| 411 parser = argparse.ArgumentParser( | 548 parser = argparse.ArgumentParser( |
| 412 description='Find the cause of APK size bloat.', | 549 description='Find the cause of APK size bloat.') |
| 413 formatter_class=argparse.ArgumentDefaultsHelpFormatter) | |
| 414 parser.add_argument('--archive-dir', | 550 parser.add_argument('--archive-dir', |
| 415 default=_DEFAULT_ARCHIVE_DIR, | 551 default=_DEFAULT_ARCHIVE_DIR, |
| 416 help='Where results are stored.') | 552 help='Where results are stored.') |
| 417 parser.add_argument('--rev-with-patch', | 553 parser.add_argument('rev', |
| 418 default='HEAD', | 554 help='Find binary size bloat for this commit.') |
| 419 help='Commit with patch.') | 555 parser.add_argument('--reference-rev', |
| 420 parser.add_argument('--rev-without-patch', | 556 help='Older rev to diff against. If not supplied, ' |
| 421 help='Older patch to diff against. If not supplied, ' | 557 'the previous commit to rev will be used.') |
| 422 'the previous commit to rev_with_patch will be used.') | 558 parser.add_argument('--all', |
| 559 action='store_true', |
| 560 help='Build/download all revs from --reference-rev to ' |
| 561 'rev and diff the contiguous revisions.') |
| 423 parser.add_argument('--include-slow-options', | 562 parser.add_argument('--include-slow-options', |
| 424 action='store_true', | 563 action='store_true', |
| 425 help='Run some extra steps that take longer to complete. ' | 564 help='Run some extra steps that take longer to complete. ' |
| 426 'This includes apk-patch-size estimation and ' | 565 'This includes apk-patch-size estimation and ' |
| 427 'static-initializer counting') | 566 'static-initializer counting.') |
| 428 parser.add_argument('--cloud', | 567 parser.add_argument('--cloud', |
| 429 action='store_true', | 568 action='store_true', |
| 430 help='Download build artifacts from perf builders ' | 569 help='Download build artifacts from perf builders ' |
| 431 '(Android only, Googlers only).') | 570 '(Android only, Googlers only).') |
| 432 parser.add_argument('--depot-tools-path', | 571 parser.add_argument('--depot-tools-path', |
| 433 help='Custom path to depot tools. Needed for --cloud if ' | 572 help='Custom path to depot tools. Needed for --cloud if ' |
| 434 'depot tools isn\'t in your PATH') | 573 'depot tools isn\'t in your PATH.') |
| 574 parser.add_argument('--subrepo', |
| 575 help='Specify a subrepo directory to use. Gclient sync ' |
| 576 'will be skipped if this option is used and all git ' |
| 577 'commands will be executed from the subrepo directory. ' |
| 578 'This option doesn\'t work with --cloud.') |
| 435 | 579 |
| 436 build_group = parser.add_argument_group('ninja', 'Args to use with ninja/gn') | 580 build_group = parser.add_argument_group('ninja', 'Args to use with ninja/gn') |
| 437 build_group.add_argument('-j', | 581 build_group.add_argument('-j', |
| 438 dest='max_jobs', | 582 dest='max_jobs', |
| 439 help='Run N jobs in parallel.') | 583 help='Run N jobs in parallel.') |
| 440 build_group.add_argument('-l', | 584 build_group.add_argument('-l', |
| 441 dest='max_load_average', | 585 dest='max_load_average', |
| 442 help='Do not start new jobs if the load average is ' | 586 help='Do not start new jobs if the load average is ' |
| 443 'greater than N.') | 587 'greater than N.') |
| 444 build_group.add_argument('--no-goma', | 588 build_group.add_argument('--no-goma', |
| 445 action='store_false', | 589 action='store_false', |
| 446 dest='use_goma', | 590 dest='use_goma', |
| 447 default=True, | 591 default=True, |
| 448 help='Use goma when building with ninja.') | 592 help='Do not use goma when building with ninja.') |
| 449 build_group.add_argument('--target-os', | 593 build_group.add_argument('--target-os', |
| 450 default='android', | 594 default='android', |
| 451 choices=['android', 'linux'], | 595 choices=['android', 'linux'], |
| 452 help='target_os gn arg.') | 596 help='target_os gn arg. Default: android.') |
| 453 build_group.add_argument('--output-directory', | 597 build_group.add_argument('--output-directory', |
| 454 default=_DEFAULT_OUT_DIR, | 598 default=_DEFAULT_OUT_DIR, |
| 455 help='ninja output directory.') | 599 help='ninja output directory. ' |
| 456 build_group.add_argument('--enable_chrome_android_internal', | 600 'Default: %s.' % _DEFAULT_OUT_DIR) |
| 601 build_group.add_argument('--enable-chrome-android-internal', |
| 457 action='store_true', | 602 action='store_true', |
| 458 help='Allow downstream targets to be built.') | 603 help='Allow downstream targets to be built.') |
| 459 build_group.add_argument('--target', | 604 build_group.add_argument('--target', |
| 460 default=_DEFAULT_TARGET, | 605 default=_DEFAULT_TARGET, |
| 461 help='GN APK target to build.') | 606 help='GN APK target to build. ' |
| 607 'Default %s.' % _DEFAULT_TARGET) |
| 608 if len(sys.argv) == 1: |
| 609 parser.print_help() |
| 610 sys.exit() |
| 462 args = parser.parse_args() | 611 args = parser.parse_args() |
| 463 build = _BuildHelper(args) | 612 build = _BuildHelper(args) |
| 464 if args.cloud and build.IsLinux(): | 613 if build.IsCloud(): |
| 465 parser.error('--cloud only works for android') | 614 if build.IsLinux(): |
| 615 parser.error('--cloud only works for android') |
| 616 if args.subrepo: |
| 617 parser.error('--subrepo doesn\'t work with --cloud') |
| 466 | 618 |
| 467 _EnsureDirectoryClean() | 619 subrepo = args.subrepo or _SRC_ROOT |
| 468 _SetInitialBranch() | 620 _EnsureDirectoryClean(subrepo) |
| 469 revs = [args.rev_with_patch, | 621 _global_restore_checkout_func = _RestoreFunc(subrepo) |
| 470 args.rev_without_patch or args.rev_with_patch + '^'] | 622 revs = _GenerateRevList(args.rev, |
| 471 revs = [_NormalizeRev(r) for r in revs] | 623 args.reference_rev or args.rev + '^', |
| 472 archive_dirs = [os.path.join(args.archive_dir, '%d-%s' % (len(revs) - i, rev)) | 624 args.all, |
| 473 for i, rev in enumerate(revs)] | 625 subrepo) |
| 474 if args.cloud: | |
| 475 _DownloadBuildArtifacts(revs, archive_dirs, build, | |
| 476 depot_tools_path=args.depot_tools_path) | |
| 477 else: | |
| 478 _SetInitialBranch() | |
| 479 _SyncAndBuild(revs, archive_dirs, build) | |
| 480 _RestoreInitialBranch() | |
| 481 | |
| 482 output_file = os.path.join(args.archive_dir, | |
| 483 'diff_result_{}_{}.txt'.format(*revs)) | |
| 484 if os.path.exists(output_file): | |
| 485 os.remove(output_file) | |
| 486 diffs = [] | 626 diffs = [] |
| 487 if build.IsAndroid(): | 627 if build.IsAndroid(): |
| 488 diffs += [ | 628 diffs += [ |
| 489 ResourceSizesDiff(archive_dirs, build.apk_name, | 629 ResourceSizesDiff( |
| 490 slow_options=args.include_slow_options) | 630 build.apk_name, slow_options=args.include_slow_options) |
| 491 ] | 631 ] |
| 492 with open(output_file, 'a') as logfile: | 632 diff_mngr = _DiffArchiveManager(revs, args.archive_dir, diffs, build, subrepo) |
| 493 for d in diffs: | 633 consecutive_failures = 0 |
| 494 d.AppendResults(logfile) | 634 for i, archive in enumerate(diff_mngr.IterArchives()): |
| 635 if archive.Exists(): |
| 636 _Print('Found matching metadata for {}, skipping build step.', |
| 637 archive.rev) |
| 638 else: |
| 639 if build.IsCloud(): |
| 640 _DownloadBuildArtifacts(archive, build, |
| 641 depot_tools_path=args.depot_tools_path) |
| 642 else: |
| 643 build_success = _SyncAndBuild(archive, build, subrepo) |
| 644 if not build_success: |
| 645 consecutive_failures += 1 |
| 646 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES: |
| 647 _Die('{} builds failed in a row, last failure was {}.', |
| 648 consecutive_failures, archive.rev) |
| 649 else: |
| 650 archive.ArchiveBuildResults() |
| 651 consecutive_failures = 0 |
| 652 |
| 653 if i != 0: |
| 654 diff_mngr.MaybeDiff(i - 1, i) |
| 655 |
| 656 _global_restore_checkout_func() |
| 495 | 657 |
| 496 if __name__ == '__main__': | 658 if __name__ == '__main__': |
| 497 sys.exit(main()) | 659 sys.exit(main()) |
| 498 | 660 |
| OLD | NEW |