Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2017 The Chromium Authors. All rights reserved. | 2 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Tool for finding the cause of APK bloat. | 6 """Tool for finding the cause of APK bloat. |
| 7 | 7 |
| 8 Run diagnose_apk_bloat.py -h for detailed usage help. | 8 Run diagnose_apk_bloat.py -h for detailed usage help. |
| 9 """ | 9 """ |
| 10 | 10 |
| 11 import argparse | 11 import argparse |
| 12 import collections | 12 import collections |
| 13 from contextlib import contextmanager | |
| 13 import distutils.spawn | 14 import distutils.spawn |
| 14 import itertools | |
| 15 import json | 15 import json |
| 16 import multiprocessing | 16 import multiprocessing |
| 17 import os | 17 import os |
| 18 import shutil | 18 import shutil |
| 19 import subprocess | 19 import subprocess |
| 20 import sys | 20 import sys |
| 21 import tempfile | 21 import tempfile |
| 22 import zipfile | 22 import zipfile |
| 23 | 23 |
| 24 _ALLOWED_CONSECUTIVE_BUILDS = 15 | 24 _ALLOWED_CONSECUTIVE_BUILDS = 15 |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 234 class _BuildArchive(object): | 234 class _BuildArchive(object): |
| 235 """Class for managing a directory with build results and build metadata.""" | 235 """Class for managing a directory with build results and build metadata.""" |
| 236 def __init__(self, rev, base_archive_dir, build): | 236 def __init__(self, rev, base_archive_dir, build): |
| 237 self.build = build | 237 self.build = build |
| 238 self.dir = os.path.join(base_archive_dir, rev) | 238 self.dir = os.path.join(base_archive_dir, rev) |
| 239 _EnsureDirsExist(self.dir) | 239 _EnsureDirsExist(self.dir) |
| 240 metadata_path = os.path.join(self.dir, 'metadata.txt') | 240 metadata_path = os.path.join(self.dir, 'metadata.txt') |
| 241 self.rev = rev | 241 self.rev = rev |
| 242 self.metadata = _GenerateMetadata([self], build, metadata_path) | 242 self.metadata = _GenerateMetadata([self], build, metadata_path) |
| 243 | 243 |
| 244 def ArchiveBuildResults(self): | 244 def ArchiveBuildResults(self, bs_dir): |
| 245 """Save build artifacts necessary for diffing.""" | 245 """Save build artifacts necessary for diffing.""" |
| 246 _Print('Saving build results to: {}', self.dir) | 246 _Print('Saving build results to: {}', self.dir) |
| 247 build = self.build | 247 build = self.build |
| 248 self._ArchiveFile(build.main_lib_path) | 248 self._ArchiveFile(build.main_lib_path) |
| 249 lib_name_noext = os.path.splitext(os.path.basename(build.main_lib_path))[0] | 249 lib_name_noext = os.path.splitext(os.path.basename(build.main_lib_path))[0] |
| 250 size_path = os.path.join(self.dir, lib_name_noext + '.size') | 250 size_path = os.path.join(self.dir, lib_name_noext + '.size') |
| 251 supersize_path = os.path.join(_SRC_ROOT, 'tools/binary_size/supersize') | 251 supersize_path = os.path.join(bs_dir, 'supersize') |
| 252 tool_prefix = _FindToolPrefix(build.output_directory) | 252 tool_prefix = _FindToolPrefix(build.output_directory) |
| 253 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file', | 253 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file', |
| 254 build.main_lib_path, '--tool-prefix', tool_prefix, | 254 build.main_lib_path, '--tool-prefix', tool_prefix, |
| 255 '--output-directory', build.output_directory, | 255 '--output-directory', build.output_directory, |
| 256 '--no-source-paths'] | 256 '--no-source-paths'] |
| 257 if build.IsAndroid(): | 257 if build.IsAndroid(): |
| 258 supersize_cmd += ['--apk-file', build.abs_apk_path] | 258 supersize_cmd += ['--apk-file', build.abs_apk_path] |
| 259 self._ArchiveFile(build.abs_apk_path) | 259 self._ArchiveFile(build.abs_apk_path) |
| 260 | 260 |
| 261 _RunCmd(supersize_cmd) | 261 _RunCmd(supersize_cmd) |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 471 global _subrepo | 471 global _subrepo |
| 472 _subrepo = subrepo or _SRC_ROOT | 472 _subrepo = subrepo or _SRC_ROOT |
| 473 | 473 |
| 474 | 474 |
| 475 def _Die(s, *args, **kwargs): | 475 def _Die(s, *args, **kwargs): |
| 476 _Print('Failure: ' + s, *args, **kwargs) | 476 _Print('Failure: ' + s, *args, **kwargs) |
| 477 _RestoreInitialBranch() | 477 _RestoreInitialBranch() |
| 478 sys.exit(1) | 478 sys.exit(1) |
| 479 | 479 |
| 480 | 480 |
| 481 def _DownloadBuildArtifacts(archive, build, depot_tools_path=None): | 481 def _DownloadBuildArtifacts(archive, build, bs_dir, depot_tools_path): |
| 482 """Download artifacts from arm32 chromium perf builder.""" | 482 """Download artifacts from arm32 chromium perf builder.""" |
| 483 if depot_tools_path: | 483 if depot_tools_path: |
| 484 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py') | 484 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py') |
| 485 else: | 485 else: |
| 486 gsutil_path = distutils.spawn.find_executable('gsutil.py') | 486 gsutil_path = distutils.spawn.find_executable('gsutil.py') |
| 487 | 487 |
| 488 if not gsutil_path: | 488 if not gsutil_path: |
| 489 _Die('gsutil.py not found, please provide path to depot_tools via ' | 489 _Die('gsutil.py not found, please provide path to depot_tools via ' |
| 490 '--depot-tools-path or add it to your PATH') | 490 '--depot-tools-path or add it to your PATH') |
| 491 | 491 |
| 492 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) | 492 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) |
| 493 try: | 493 try: |
| 494 _DownloadAndArchive(gsutil_path, archive, download_dir, build) | 494 _DownloadAndArchive(gsutil_path, archive, download_dir, build, bs_dir) |
| 495 finally: | 495 finally: |
| 496 shutil.rmtree(download_dir) | 496 shutil.rmtree(download_dir) |
| 497 | 497 |
| 498 | 498 |
| 499 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build): | 499 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, bs_dir): |
| 500 dl_file = 'full-build-linux_%s.zip' % archive.rev | 500 dl_file = 'full-build-linux_%s.zip' % archive.rev |
| 501 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file | 501 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file |
| 502 dl_dst = os.path.join(dl_dir, dl_file) | 502 dl_dst = os.path.join(dl_dir, dl_file) |
| 503 _Print('Downloading build artifacts for {}', archive.rev) | 503 _Print('Downloading build artifacts for {}', archive.rev) |
| 504 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to | 504 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to |
| 505 # sys.stdout. | 505 # sys.stdout. |
| 506 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir], | 506 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir], |
| 507 stdout=sys.stdout, stderr=subprocess.STDOUT) | 507 stdout=sys.stdout, stderr=subprocess.STDOUT) |
| 508 if retcode: | 508 if retcode: |
| 509 _Die('unexpected error while downloading {}. It may no longer exist on ' | 509 _Die('unexpected error while downloading {}. It may no longer exist on ' |
| 510 'the server or it may not have been uploaded yet (check {}). ' | 510 'the server or it may not have been uploaded yet (check {}). ' |
| 511 'Otherwise, you may not have the correct access permissions.', | 511 'Otherwise, you may not have the correct access permissions.', |
| 512 dl_url, _BUILDER_URL) | 512 dl_url, _BUILDER_URL) |
| 513 | 513 |
| 514 # Files needed for supersize and resource_sizes. Paths relative to out dir. | 514 # Files needed for supersize and resource_sizes. Paths relative to out dir. |
| 515 to_extract = [build.main_lib_name, build.map_file_name, 'args.gn', | 515 to_extract = [build.main_lib_name, build.map_file_name, 'args.gn', |
| 516 'build_vars.txt', build.apk_path] | 516 'build_vars.txt', build.apk_path] |
| 517 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped') | 517 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped') |
| 518 # Storage bucket stores entire output directory including out/Release prefix. | 518 # Storage bucket stores entire output directory including out/Release prefix. |
| 519 _Print('Extracting build artifacts') | 519 _Print('Extracting build artifacts') |
| 520 with zipfile.ZipFile(dl_dst, 'r') as z: | 520 with zipfile.ZipFile(dl_dst, 'r') as z: |
| 521 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z) | 521 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z) |
| 522 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR) | 522 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR) |
| 523 build.output_directory, output_directory = dl_out, build.output_directory | 523 build.output_directory, output_directory = dl_out, build.output_directory |
| 524 archive.ArchiveBuildResults() | 524 archive.ArchiveBuildResults(bs_dir) |
| 525 build.output_directory = output_directory | 525 build.output_directory = output_directory |
| 526 | 526 |
| 527 | 527 |
| 528 def _ExtractFiles(to_extract, prefix, dst, z): | 528 def _ExtractFiles(to_extract, prefix, dst, z): |
| 529 zip_infos = z.infolist() | 529 zip_infos = z.infolist() |
| 530 assert all(info.filename.startswith(prefix) for info in zip_infos), ( | 530 assert all(info.filename.startswith(prefix) for info in zip_infos), ( |
| 531 'Storage bucket folder structure doesn\'t start with %s' % prefix) | 531 'Storage bucket folder structure doesn\'t start with %s' % prefix) |
| 532 to_extract = [os.path.join(prefix, f) for f in to_extract] | 532 to_extract = [os.path.join(prefix, f) for f in to_extract] |
| 533 for f in to_extract: | 533 for f in to_extract: |
| 534 z.extract(f, path=dst) | 534 z.extract(f, path=dst) |
| 535 | 535 |
| 536 | 536 |
| 537 def _Print(s, *args, **kwargs): | 537 def _Print(s, *args, **kwargs): |
| 538 print s.format(*args, **kwargs) | 538 print s.format(*args, **kwargs) |
| 539 | 539 |
| 540 | 540 |
| 541 def _PrintAndWriteToFile(logfile, s): | 541 def _PrintAndWriteToFile(logfile, s): |
| 542 """Print |s| to |logfile| and stdout.""" | 542 """Print |s| to |logfile| and stdout.""" |
| 543 _Print(s) | 543 _Print(s) |
| 544 logfile.write('%s\n' % s) | 544 logfile.write('%s\n' % s) |
| 545 | 545 |
| 546 | 546 |
| 547 @contextmanager | |
| 548 def _TmpBinarySizeDir(): | |
| 549 """Recursively copy files to a temp dir and yield the tmp binary_size dir.""" | |
| 550 tmp_dir = tempfile.mkdtemp() | |
|
agrieve
2017/04/21 17:00:29
This will break helpers.SOURCE_DIR, won't it? Need
estevenson
2017/04/21 20:27:02
Yep, thanks! Missed this when I tested it since it
| |
| 551 try: | |
| 552 bs_dir = os.path.join(tmp_dir, 'binary_size') | |
| 553 shutil.copytree(os.path.join(_SRC_ROOT, 'tools', 'binary_size'), bs_dir) | |
| 554 yield bs_dir | |
| 555 finally: | |
| 556 shutil.rmtree(tmp_dir) | |
| 557 | |
| 558 | |
| 547 def main(): | 559 def main(): |
| 548 parser = argparse.ArgumentParser( | 560 parser = argparse.ArgumentParser( |
| 549 description='Find the cause of APK size bloat.', | 561 description='Find the cause of APK size bloat.', |
| 550 formatter_class=argparse.ArgumentDefaultsHelpFormatter) | 562 formatter_class=argparse.ArgumentDefaultsHelpFormatter) |
| 551 parser.add_argument('--archive-dir', | 563 parser.add_argument('--archive-directory', |
| 552 default=_DEFAULT_ARCHIVE_DIR, | 564 default=_DEFAULT_ARCHIVE_DIR, |
| 553 help='Where results are stored.') | 565 help='Where results are stored.') |
| 554 parser.add_argument('--rev-with-patch', | 566 parser.add_argument('--rev-with-patch', |
| 555 default='HEAD', | 567 default='HEAD', |
| 556 help='Commit with patch.') | 568 help='Commit with patch.') |
| 557 parser.add_argument('--rev-without-patch', | 569 parser.add_argument('--rev-without-patch', |
| 558 help='Older patch to diff against. If not supplied, ' | 570 help='Older patch to diff against. If not supplied, ' |
| 559 'the previous commit to rev_with_patch will be used.') | 571 'the previous commit to rev_with_patch will be used.') |
| 560 parser.add_argument('--all', | 572 parser.add_argument('--all', |
| 561 action='store_true', | 573 action='store_true', |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 619 _SetInitialBranch() | 631 _SetInitialBranch() |
| 620 revs = _GenerateRevList(args.rev_with_patch, | 632 revs = _GenerateRevList(args.rev_with_patch, |
| 621 args.rev_without_patch or args.rev_with_patch + '^', | 633 args.rev_without_patch or args.rev_with_patch + '^', |
| 622 args.all) | 634 args.all) |
| 623 diffs = [] | 635 diffs = [] |
| 624 if build.IsAndroid(): | 636 if build.IsAndroid(): |
| 625 diffs += [ | 637 diffs += [ |
| 626 ResourceSizesDiff( | 638 ResourceSizesDiff( |
| 627 build.apk_name, slow_options=args.include_slow_options) | 639 build.apk_name, slow_options=args.include_slow_options) |
| 628 ] | 640 ] |
| 629 diff_mngr = _DiffArchiveManager(revs, args.archive_dir, diffs, build) | 641 diff_mngr = _DiffArchiveManager(revs, args.archive_directory, diffs, build) |
| 630 consecutive_failures = 0 | 642 consecutive_failures = 0 |
| 631 for i, archive in enumerate(diff_mngr.IterArchives()): | 643 with _TmpBinarySizeDir() as bs_dir: |
| 632 if archive.Exists(): | 644 for i, archive in enumerate(diff_mngr.IterArchives()): |
| 633 _Print('Found matching metadata for {}, skipping build step.', | 645 if archive.Exists(): |
| 634 archive.rev) | 646 _Print('Found matching metadata for {}, skipping build step.', |
| 635 else: | 647 archive.rev) |
| 636 if build.IsCloud(): | |
| 637 _DownloadBuildArtifacts(archive, build, | |
| 638 depot_tools_path=args.depot_tools_path) | |
| 639 else: | 648 else: |
| 640 build_success = _SyncAndBuild(archive, build, use_subrepo) | 649 if build.IsCloud(): |
| 641 if not build_success: | 650 _DownloadBuildArtifacts(archive, build, bs_dir, args.depot_tools_path) |
| 642 consecutive_failures += 1 | |
| 643 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES: | |
| 644 _Die('{} builds failed in a row, last failure was {}.', | |
| 645 consecutive_failures, archive.rev) | |
| 646 else: | 651 else: |
| 647 archive.ArchiveBuildResults() | 652 build_success = _SyncAndBuild(archive, build, use_subrepo) |
| 648 consecutive_failures = 0 | 653 if not build_success: |
| 654 consecutive_failures += 1 | |
| 655 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES: | |
| 656 _Die('{} builds failed in a row, last failure was {}.', | |
| 657 consecutive_failures, archive.rev) | |
| 658 else: | |
| 659 archive.ArchiveBuildResults(bs_dir) | |
| 660 consecutive_failures = 0 | |
| 649 | 661 |
| 650 if i != 0: | 662 if i != 0: |
| 651 diff_mngr.MaybeDiff(i - 1, i) | 663 diff_mngr.MaybeDiff(i - 1, i) |
| 652 | 664 |
| 653 _RestoreInitialBranch() | 665 _RestoreInitialBranch() |
| 654 | 666 |
| 655 if __name__ == '__main__': | 667 if __name__ == '__main__': |
| 656 sys.exit(main()) | 668 sys.exit(main()) |
| 657 | 669 |
| OLD | NEW |