| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2017 The Chromium Authors. All rights reserved. | 2 # Copyright 2017 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Tool for finding the cause of APK bloat. | 6 """Tool for finding the cause of APK bloat. |
| 7 | 7 |
| 8 Run diagnose_apk_bloat.py -h for detailed usage help. | 8 Run diagnose_apk_bloat.py -h for detailed usage help. |
| 9 """ | 9 """ |
| 10 | 10 |
| 11 import argparse | 11 import argparse |
| 12 import collections | 12 import collections |
| 13 from contextlib import contextmanager | 13 from contextlib import contextmanager |
| 14 import distutils.spawn | 14 import distutils.spawn |
| 15 import json | 15 import json |
| 16 import multiprocessing | 16 import multiprocessing |
| 17 import os | 17 import os |
| 18 import re | 18 import re |
| 19 import shutil | 19 import shutil |
| 20 import subprocess | 20 import subprocess |
| 21 import sys | 21 import sys |
| 22 import tempfile | 22 import tempfile |
| 23 import zipfile | 23 import zipfile |
| 24 | 24 |
| 25 _COMMIT_COUNT_WARN_THRESHOLD = 15 | 25 _COMMIT_COUNT_WARN_THRESHOLD = 15 |
| 26 _ALLOWED_CONSECUTIVE_FAILURES = 2 | 26 _ALLOWED_CONSECUTIVE_FAILURES = 2 |
| 27 _DIFF_DETAILS_LINES_THRESHOLD = 100 | 27 _DIFF_DETAILS_LINES_THRESHOLD = 100 |
| 28 _BUILDER_URL = \ | 28 _BUILDER_URL = \ |
| 29 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' | 29 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' |
| 30 _CLOUD_OUT_DIR = os.path.join('out', 'Release') | |
| 31 _SRC_ROOT = os.path.abspath( | 30 _SRC_ROOT = os.path.abspath( |
| 32 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) | 31 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) |
| 33 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') | 32 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') |
| 34 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') | 33 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') |
| 35 _DEFAULT_TARGET = 'monochrome_public_apk' | 34 _DEFAULT_ANDROID_TARGET = 'monochrome_public_apk' |
| 36 | |
| 37 | 35 |
| 38 _global_restore_checkout_func = None | 36 _global_restore_checkout_func = None |
| 39 | 37 |
| 40 | 38 |
| 41 def _SetRestoreFunc(subrepo): | 39 def _SetRestoreFunc(subrepo): |
| 42 branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD'], subrepo) | 40 branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD'], subrepo) |
| 43 global _global_restore_checkout_func | 41 global _global_restore_checkout_func |
| 44 _global_restore_checkout_func = lambda: _GitCmd(['checkout', branch], subrepo) | 42 _global_restore_checkout_func = lambda: _GitCmd(['checkout', branch], subrepo) |
| 45 | 43 |
| 46 | 44 |
| 47 _DiffResult = collections.namedtuple( | 45 _DiffResult = collections.namedtuple('DiffResult', ['name', 'value', 'units']) |
| 48 'DiffResult', ['name', 'value', 'units']) | |
| 49 | 46 |
| 50 | 47 |
| 51 class BaseDiff(object): | 48 class BaseDiff(object): |
| 52 """Base class capturing binary size diffs.""" | 49 """Base class capturing binary size diffs.""" |
| 53 def __init__(self, name): | 50 def __init__(self, name): |
| 54 self.name = name | 51 self.name = name |
| 55 self.banner = '\n' + '*' * 30 + name + '*' * 30 | 52 self.banner = '\n' + '*' * 30 + name + '*' * 30 |
| 56 | 53 |
| 57 def AppendResults(self, logfile): | 54 def AppendResults(self, logfile): |
| 58 """Print and write diff results to an open |logfile|.""" | 55 """Print and write diff results to an open |logfile|.""" |
| 59 _PrintAndWriteToFile(logfile, self.banner) | 56 _PrintAndWriteToFile(logfile, self.banner) |
| 60 _PrintAndWriteToFile(logfile, 'Summary:') | 57 _PrintAndWriteToFile(logfile, 'Summary:') |
| 61 _PrintAndWriteToFile(logfile, self.Summary()) | 58 _PrintAndWriteToFile(logfile, self.Summary()) |
| 62 _PrintAndWriteToFile(logfile, '\nDetails:') | 59 _PrintAndWriteToFile(logfile, '\nDetails:') |
| 63 _PrintAndWriteToFile(logfile, self.DetailedResults()) | 60 _PrintAndWriteToFile(logfile, self.DetailedResults()) |
| 64 | 61 |
| 65 @property | 62 @property |
| 66 def summary_stat(self): | 63 def summary_stat(self): |
| 67 return None | 64 return None |
| 68 | 65 |
| 69 def Summary(self): | 66 def Summary(self): |
| 70 """A short description that summarizes the source of binary size bloat.""" | 67 """A short description that summarizes the source of binary size bloat.""" |
| 71 raise NotImplementedError() | 68 raise NotImplementedError() |
| 72 | 69 |
| 73 def DetailedResults(self): | 70 def DetailedResults(self): |
| 74 """An iterable description of the cause of binary size bloat.""" | 71 """An iterable description of the cause of binary size bloat.""" |
| 75 raise NotImplementedError() | 72 raise NotImplementedError() |
| 76 | 73 |
| 77 def ProduceDiff(self, archive_dirs): | 74 def ProduceDiff(self, before_dir, after_dir): |
| 78 """Prepare a binary size diff with ready to print results.""" | 75 """Prepare a binary size diff with ready to print results.""" |
| 79 raise NotImplementedError() | 76 raise NotImplementedError() |
| 80 | 77 |
| 81 def RunDiff(self, logfile, archive_dirs): | 78 def RunDiff(self, logfile, before_dir, after_dir): |
| 82 self.ProduceDiff(archive_dirs) | 79 self.ProduceDiff(before_dir, after_dir) |
| 83 self.AppendResults(logfile) | 80 self.AppendResults(logfile) |
| 84 | 81 |
| 85 | 82 |
| 86 class NativeDiff(BaseDiff): | 83 class NativeDiff(BaseDiff): |
| 87 _RE_SUMMARY = re.compile( | 84 _RE_SUMMARY = re.compile( |
| 88 r'.*(Section Sizes .*? object files added, \d+ removed).*', | 85 r'.*(Section Sizes .*? object files added, \d+ removed).*', |
| 89 flags=re.DOTALL) | 86 flags=re.DOTALL) |
| 90 _RE_SUMMARY_STAT = re.compile( | 87 _RE_SUMMARY_STAT = re.compile( |
| 91 r'Section Sizes \(Total=(?P<value>\d+) (?P<units>\w+)\)') | 88 r'Section Sizes \(Total=(?P<value>\d+) (?P<units>\w+)\)') |
| 92 _SUMMARY_STAT_NAME = 'Native Library Delta' | 89 _SUMMARY_STAT_NAME = 'Native Library Delta' |
| (...skipping 11 matching lines...) Expand all Loading... |
| 104 return _DiffResult( | 101 return _DiffResult( |
| 105 NativeDiff._SUMMARY_STAT_NAME, m.group('value'), m.group('units')) | 102 NativeDiff._SUMMARY_STAT_NAME, m.group('value'), m.group('units')) |
| 106 return None | 103 return None |
| 107 | 104 |
| 108 def DetailedResults(self): | 105 def DetailedResults(self): |
| 109 return self._diff.splitlines() | 106 return self._diff.splitlines() |
| 110 | 107 |
| 111 def Summary(self): | 108 def Summary(self): |
| 112 return NativeDiff._RE_SUMMARY.match(self._diff).group(1) | 109 return NativeDiff._RE_SUMMARY.match(self._diff).group(1) |
| 113 | 110 |
| 114 def ProduceDiff(self, archive_dirs): | 111 def ProduceDiff(self, before_dir, after_dir): |
| 115 size_files = [os.path.join(a, self._size_name) | 112 before_size = os.path.join(before_dir, self._size_name) |
| 116 for a in reversed(archive_dirs)] | 113 after_size = os.path.join(after_dir, self._size_name) |
| 117 cmd = [self._supersize_path, 'diff'] + size_files | 114 cmd = [self._supersize_path, 'diff', before_size, after_size] |
| 118 self._diff = _RunCmd(cmd)[0].replace('{', '{{').replace('}', '}}') | 115 self._diff = _RunCmd(cmd)[0].replace('{', '{{').replace('}', '}}') |
| 119 | 116 |
| 120 | 117 |
| 121 class ResourceSizesDiff(BaseDiff): | 118 class ResourceSizesDiff(BaseDiff): |
| 122 _RESOURCE_SIZES_PATH = os.path.join( | 119 _RESOURCE_SIZES_PATH = os.path.join( |
| 123 _SRC_ROOT, 'build', 'android', 'resource_sizes.py') | 120 _SRC_ROOT, 'build', 'android', 'resource_sizes.py') |
| 124 | 121 |
| 125 def __init__(self, apk_name, slow_options=False): | 122 def __init__(self, apk_name, slow_options=False): |
| 126 self._apk_name = apk_name | 123 self._apk_name = apk_name |
| 127 self._slow_options = slow_options | 124 self._slow_options = slow_options |
| 128 self._diff = None # Set by |ProduceDiff()| | 125 self._diff = None # Set by |ProduceDiff()| |
| 129 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff') | 126 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff') |
| 130 | 127 |
| 131 @property | 128 @property |
| 132 def summary_stat(self): | 129 def summary_stat(self): |
| 133 for s in self._diff: | 130 for s in self._diff: |
| 134 if 'normalized' in s.name: | 131 if 'normalized' in s.name: |
| 135 return s | 132 return s |
| 136 return None | 133 return None |
| 137 | 134 |
| 138 def DetailedResults(self): | 135 def DetailedResults(self): |
| 139 return ['{:>+10,} {} {}'.format(value, units, name) | 136 return ['{:>+10,} {} {}'.format(value, units, name) |
| 140 for name, value, units in self._diff] | 137 for name, value, units in self._diff] |
| 141 | 138 |
| 142 def Summary(self): | 139 def Summary(self): |
| 143 return 'Normalized APK size: {:+,} {}'.format( | 140 return 'Normalized APK size: {:+,} {}'.format( |
| 144 self.summary_stat.value, self.summary_stat.units) | 141 self.summary_stat.value, self.summary_stat.units) |
| 145 | 142 |
| 146 def ProduceDiff(self, archive_dirs): | 143 def ProduceDiff(self, before_dir, after_dir): |
| 147 chartjsons = self._RunResourceSizes(archive_dirs) | 144 before = self._RunResourceSizes(before_dir) |
| 145 after = self._RunResourceSizes(after_dir) |
| 148 diff = [] | 146 diff = [] |
| 149 with_patch = chartjsons[0]['charts'] | 147 for section, section_dict in after.iteritems(): |
| 150 without_patch = chartjsons[1]['charts'] | |
| 151 for section, section_dict in with_patch.iteritems(): | |
| 152 for subsection, v in section_dict.iteritems(): | 148 for subsection, v in section_dict.iteritems(): |
| 153 # Ignore entries when resource_sizes.py chartjson format has changed. | 149 # Ignore entries when resource_sizes.py chartjson format has changed. |
| 154 if (section not in without_patch or | 150 if (section not in before or |
| 155 subsection not in without_patch[section] or | 151 subsection not in before[section] or |
| 156 v['units'] != without_patch[section][subsection]['units']): | 152 v['units'] != before[section][subsection]['units']): |
| 157 _Print('Found differing dict structures for resource_sizes.py, ' | 153 _Print('Found differing dict structures for resource_sizes.py, ' |
| 158 'skipping {} {}', section, subsection) | 154 'skipping {} {}', section, subsection) |
| 159 else: | 155 else: |
| 160 diff.append( | 156 diff.append( |
| 161 _DiffResult( | 157 _DiffResult( |
| 162 '%s %s' % (section, subsection), | 158 '%s %s' % (section, subsection), |
| 163 v['value'] - without_patch[section][subsection]['value'], | 159 v['value'] - before[section][subsection]['value'], |
| 164 v['units'])) | 160 v['units'])) |
| 165 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True) | 161 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True) |
| 166 | 162 |
| 167 def _RunResourceSizes(self, archive_dirs): | 163 def _RunResourceSizes(self, archive_dir): |
| 168 chartjsons = [] | 164 apk_path = os.path.join(archive_dir, self._apk_name) |
| 169 for archive_dir in archive_dirs: | 165 chartjson_file = os.path.join(archive_dir, 'results-chart.json') |
| 170 apk_path = os.path.join(archive_dir, self._apk_name) | 166 cmd = [self._RESOURCE_SIZES_PATH, apk_path,'--output-dir', archive_dir, |
| 171 chartjson_file = os.path.join(archive_dir, 'results-chart.json') | 167 '--no-output-dir', '--chartjson'] |
| 172 cmd = [self._RESOURCE_SIZES_PATH, apk_path,'--output-dir', archive_dir, | 168 if self._slow_options: |
| 173 '--no-output-dir', '--chartjson'] | 169 cmd += ['--estimate-patch-size'] |
| 174 if self._slow_options: | 170 else: |
| 175 cmd += ['--estimate-patch-size'] | 171 cmd += ['--no-static-initializer-check'] |
| 176 else: | 172 _RunCmd(cmd) |
| 177 cmd += ['--no-static-initializer-check'] | 173 with open(chartjson_file) as f: |
| 178 _RunCmd(cmd) | 174 chartjson = json.load(f) |
| 179 with open(chartjson_file) as f: | 175 return chartjson['charts'] |
| 180 chartjsons.append(json.load(f)) | |
| 181 return chartjsons | |
| 182 | 176 |
| 183 | 177 |
| 184 class _BuildHelper(object): | 178 class _BuildHelper(object): |
| 185 """Helper class for generating and building targets.""" | 179 """Helper class for generating and building targets.""" |
| 186 def __init__(self, args): | 180 def __init__(self, args): |
| 187 self.cloud = args.cloud | 181 self.cloud = args.cloud |
| 188 self.enable_chrome_android_internal = args.enable_chrome_android_internal | 182 self.enable_chrome_android_internal = args.enable_chrome_android_internal |
| 189 self.extra_gn_args_str = '' | 183 self.extra_gn_args_str = '' |
| 190 self.max_jobs = args.max_jobs | 184 self.max_jobs = args.max_jobs |
| 191 self.max_load_average = args.max_load_average | 185 self.max_load_average = args.max_load_average |
| (...skipping 25 matching lines...) Expand all Loading... |
| 217 elif 'monochrome' in self.target: | 211 elif 'monochrome' in self.target: |
| 218 return 'lib.unstripped/libmonochrome.so' | 212 return 'lib.unstripped/libmonochrome.so' |
| 219 else: | 213 else: |
| 220 return 'lib.unstripped/libchrome.so' | 214 return 'lib.unstripped/libchrome.so' |
| 221 | 215 |
| 222 @property | 216 @property |
| 223 def abs_main_lib_path(self): | 217 def abs_main_lib_path(self): |
| 224 return os.path.join(self.output_directory, self.main_lib_path) | 218 return os.path.join(self.output_directory, self.main_lib_path) |
| 225 | 219 |
| 226 @property | 220 @property |
| 221 def download_bucket(self): |
| 222 return 'gs://chrome-perf/%s Builder/' % self.target_os.title() |
| 223 |
| 224 @property |
| 225 def download_output_dir(self): |
| 226 return 'out/Release' if self.IsAndroid() else 'full-build-linux' |
| 227 |
| 228 @property |
| 227 def map_file_path(self): | 229 def map_file_path(self): |
| 228 return self.main_lib_path + '.map.gz' | 230 return self.main_lib_path + '.map.gz' |
| 229 | 231 |
| 230 @property | 232 @property |
| 231 def size_name(self): | 233 def size_name(self): |
| 232 return os.path.splitext(os.path.basename(self.main_lib_path))[0] + '.size' | 234 return os.path.splitext(os.path.basename(self.main_lib_path))[0] + '.size' |
| 233 | 235 |
| 234 def _SetDefaults(self): | 236 def _SetDefaults(self): |
| 235 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma')) | 237 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma')) |
| 236 self.use_goma = self.use_goma or has_goma_dir | 238 self.use_goma = self.use_goma or has_goma_dir |
| 237 self.max_load_average = (self.max_load_average or | 239 self.max_load_average = (self.max_load_average or |
| 238 str(multiprocessing.cpu_count())) | 240 str(multiprocessing.cpu_count())) |
| 239 if not self.max_jobs: | 241 if not self.max_jobs: |
| 240 self.max_jobs = '10000' if self.use_goma else '500' | 242 self.max_jobs = '10000' if self.use_goma else '500' |
| 241 | 243 |
| 242 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')): | 244 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')): |
| 243 self.extra_gn_args_str = ' is_chrome_branded=true' | 245 self.extra_gn_args_str = ' is_chrome_branded=true' |
| 244 else: | 246 else: |
| 245 self.extra_gn_args_str = (' exclude_unwind_tables=true ' | 247 self.extra_gn_args_str = (' exclude_unwind_tables=true ' |
| 246 'ffmpeg_branding="Chrome" proprietary_codecs=true') | 248 'ffmpeg_branding="Chrome" proprietary_codecs=true') |
| 249 self.target = self.target if self.IsAndroid() else 'chrome' |
| 247 | 250 |
| 248 def _GenGnCmd(self): | 251 def _GenGnCmd(self): |
| 249 gn_args = 'is_official_build=true symbol_level=1' | 252 gn_args = 'is_official_build=true symbol_level=1' |
| 250 gn_args += ' use_goma=%s' % str(self.use_goma).lower() | 253 gn_args += ' use_goma=%s' % str(self.use_goma).lower() |
| 251 gn_args += ' target_os="%s"' % self.target_os | 254 gn_args += ' target_os="%s"' % self.target_os |
| 252 gn_args += (' enable_chrome_android_internal=%s' % | 255 gn_args += (' enable_chrome_android_internal=%s' % |
| 253 str(self.enable_chrome_android_internal).lower()) | 256 str(self.enable_chrome_android_internal).lower()) |
| 254 gn_args += self.extra_gn_args_str | 257 gn_args += self.extra_gn_args_str |
| 255 return ['gn', 'gen', self.output_directory, '--args=%s' % gn_args] | 258 return ['gn', 'gen', self.output_directory, '--args=%s' % gn_args] |
| 256 | 259 |
| 257 def _GenNinjaCmd(self): | 260 def _GenNinjaCmd(self): |
| 258 cmd = ['ninja', '-C', self.output_directory] | 261 cmd = ['ninja', '-C', self.output_directory] |
| 259 cmd += ['-j', self.max_jobs] if self.max_jobs else [] | 262 cmd += ['-j', self.max_jobs] if self.max_jobs else [] |
| 260 cmd += ['-l', self.max_load_average] if self.max_load_average else [] | 263 cmd += ['-l', self.max_load_average] if self.max_load_average else [] |
| 261 cmd += [self.target] | 264 cmd += [self.target] |
| 262 return cmd | 265 return cmd |
| 263 | 266 |
| 264 def Run(self): | 267 def Run(self): |
| 265 """Run GN gen/ninja build and return the process returncode.""" | 268 """Run GN gen/ninja build and return the process returncode.""" |
| 266 _Print('Building: {}.', self.target) | 269 _Print('Building: {}.', self.target) |
| 267 retcode = _RunCmd( | 270 retcode = _RunCmd( |
| 268 self._GenGnCmd(), print_stdout=True, exit_on_failure=False)[1] | 271 self._GenGnCmd(), print_stdout=True, exit_on_failure=False)[1] |
| 269 if retcode: | 272 if retcode: |
| 270 return retcode | 273 return retcode |
| 271 return _RunCmd( | 274 return _RunCmd( |
| 272 self._GenNinjaCmd(), print_stdout=True, exit_on_failure=False)[1] | 275 self._GenNinjaCmd(), print_stdout=True, exit_on_failure=False)[1] |
| 273 | 276 |
| 277 def DownloadUrl(self, rev): |
| 278 return self.download_bucket + 'full-build-linux_%s.zip' % rev |
| 279 |
| 274 def IsAndroid(self): | 280 def IsAndroid(self): |
| 275 return self.target_os == 'android' | 281 return self.target_os == 'android' |
| 276 | 282 |
| 277 def IsLinux(self): | 283 def IsLinux(self): |
| 278 return self.target_os == 'linux' | 284 return self.target_os == 'linux' |
| 279 | 285 |
| 280 def IsCloud(self): | 286 def IsCloud(self): |
| 281 return self.cloud | 287 return self.cloud |
| 282 | 288 |
| 283 | 289 |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 325 self.build = build | 331 self.build = build |
| 326 self.build_archives = [_BuildArchive(rev, archive_dir, build, subrepo) | 332 self.build_archives = [_BuildArchive(rev, archive_dir, build, subrepo) |
| 327 for rev in revs] | 333 for rev in revs] |
| 328 self.diffs = diffs | 334 self.diffs = diffs |
| 329 self.subrepo = subrepo | 335 self.subrepo = subrepo |
| 330 self._summary_stats = [] | 336 self._summary_stats = [] |
| 331 | 337 |
| 332 def IterArchives(self): | 338 def IterArchives(self): |
| 333 return iter(self.build_archives) | 339 return iter(self.build_archives) |
| 334 | 340 |
| 335 def MaybeDiff(self, first_id, second_id): | 341 def MaybeDiff(self, before_id, after_id): |
| 336 """Perform diffs given two build archives.""" | 342 """Perform diffs given two build archives.""" |
| 337 archives = [ | 343 before = self.build_archives[before_id] |
| 338 self.build_archives[first_id], self.build_archives[second_id]] | 344 after = self.build_archives[after_id] |
| 339 diff_path = self._DiffFilePath(archives) | 345 diff_path = self._DiffFilePath(before, after) |
| 340 if not self._CanDiff(archives): | 346 if not self._CanDiff(before, after): |
| 341 _Print('Skipping diff for {} due to missing build archives.', diff_path) | 347 _Print('Skipping diff for {} due to missing build archives.', diff_path) |
| 342 return | 348 return |
| 343 | 349 |
| 344 metadata_path = self._DiffMetadataPath(archives) | 350 metadata_path = self._DiffMetadataPath(before, after) |
| 345 metadata = _GenerateMetadata( | 351 metadata = _GenerateMetadata( |
| 346 archives, self.build, metadata_path, self.subrepo) | 352 [before, after], self.build, metadata_path, self.subrepo) |
| 347 if _MetadataExists(metadata): | 353 if _MetadataExists(metadata): |
| 348 _Print('Skipping diff for {} and {}. Matching diff already exists: {}', | 354 _Print('Skipping diff for {} and {}. Matching diff already exists: {}', |
| 349 archives[0].rev, archives[1].rev, diff_path) | 355 before.rev, after.rev, diff_path) |
| 350 else: | 356 else: |
| 351 if os.path.exists(diff_path): | 357 if os.path.exists(diff_path): |
| 352 os.remove(diff_path) | 358 os.remove(diff_path) |
| 353 archive_dirs = [archives[0].dir, archives[1].dir] | |
| 354 with open(diff_path, 'a') as diff_file: | 359 with open(diff_path, 'a') as diff_file: |
| 355 for d in self.diffs: | 360 for d in self.diffs: |
| 356 d.RunDiff(diff_file, archive_dirs) | 361 d.RunDiff(diff_file, before.dir, after.dir) |
| 357 _Print('\nSee detailed diff results here: {}.', diff_path) | 362 _Print('\nSee detailed diff results here: {}.', diff_path) |
| 358 _WriteMetadata(metadata) | 363 _WriteMetadata(metadata) |
| 359 self._AddDiffSummaryStat(archives) | 364 self._AddDiffSummaryStat(before, after) |
| 360 | 365 |
| 361 def Summarize(self): | 366 def Summarize(self): |
| 362 if self._summary_stats: | 367 if self._summary_stats: |
| 363 path = os.path.join(self.archive_dir, 'last_diff_summary.txt') | 368 path = os.path.join(self.archive_dir, 'last_diff_summary.txt') |
| 364 with open(path, 'w') as f: | 369 with open(path, 'w') as f: |
| 365 stats = sorted( | 370 stats = sorted( |
| 366 self._summary_stats, key=lambda x: x[0].value, reverse=True) | 371 self._summary_stats, key=lambda x: x[0].value, reverse=True) |
| 367 _PrintAndWriteToFile(f, '\nDiff Summary') | 372 _PrintAndWriteToFile(f, '\nDiff Summary') |
| 368 for s, before, after in stats: | 373 for s, before, after in stats: |
| 369 _PrintAndWriteToFile(f, '{:>+10} {} {} for range: {}..{}', | 374 _PrintAndWriteToFile(f, '{:>+10} {} {} for range: {}..{}', |
| 370 s.value, s.units, s.name, before, after) | 375 s.value, s.units, s.name, before, after) |
| 371 | 376 |
| 372 def _AddDiffSummaryStat(self, archives): | 377 def _AddDiffSummaryStat(self, before, after): |
| 373 stat = None | 378 stat = None |
| 374 if self.build.IsAndroid(): | 379 if self.build.IsAndroid(): |
| 375 summary_diff_type = ResourceSizesDiff | 380 summary_diff_type = ResourceSizesDiff |
| 376 else: | 381 else: |
| 377 summary_diff_type = NativeDiff | 382 summary_diff_type = NativeDiff |
| 378 for d in self.diffs: | 383 for d in self.diffs: |
| 379 if isinstance(d, summary_diff_type): | 384 if isinstance(d, summary_diff_type): |
| 380 stat = d.summary_stat | 385 stat = d.summary_stat |
| 381 if stat: | 386 if stat: |
| 382 self._summary_stats.append((stat, archives[1].rev, archives[0].rev)) | 387 self._summary_stats.append((stat, before.rev, after.rev)) |
| 383 | 388 |
| 384 def _CanDiff(self, archives): | 389 def _CanDiff(self, before, after): |
| 385 return all(a.Exists() for a in archives) | 390 return before.Exists() and after.Exists() |
| 386 | 391 |
| 387 def _DiffFilePath(self, archives): | 392 def _DiffFilePath(self, before, after): |
| 388 return os.path.join(self._DiffDir(archives), 'diff_results.txt') | 393 return os.path.join(self._DiffDir(before, after), 'diff_results.txt') |
| 389 | 394 |
| 390 def _DiffMetadataPath(self, archives): | 395 def _DiffMetadataPath(self, before, after): |
| 391 return os.path.join(self._DiffDir(archives), 'metadata.txt') | 396 return os.path.join(self._DiffDir(before, after), 'metadata.txt') |
| 392 | 397 |
| 393 def _DiffDir(self, archives): | 398 def _DiffDir(self, before, after): |
| 394 archive_range = '%s..%s' % (archives[1].rev, archives[0].rev) | 399 archive_range = '%s..%s' % (before.rev, after.rev) |
| 395 diff_path = os.path.join(self.archive_dir, 'diffs', archive_range) | 400 diff_path = os.path.join(self.archive_dir, 'diffs', archive_range) |
| 396 _EnsureDirsExist(diff_path) | 401 _EnsureDirsExist(diff_path) |
| 397 return diff_path | 402 return diff_path |
| 398 | 403 |
| 399 | 404 |
| 400 def _EnsureDirsExist(path): | 405 def _EnsureDirsExist(path): |
| 401 if not os.path.exists(path): | 406 if not os.path.exists(path): |
| 402 os.makedirs(path) | 407 os.makedirs(path) |
| 403 | 408 |
| 404 | 409 |
| (...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 499 _GitCmd(['checkout', archive.rev], subrepo) | 504 _GitCmd(['checkout', archive.rev], subrepo) |
| 500 else: | 505 else: |
| 501 # Move to a detached state since gclient sync doesn't work with local | 506 # Move to a detached state since gclient sync doesn't work with local |
| 502 # commits on a branch. | 507 # commits on a branch. |
| 503 _GitCmd(['checkout', '--detach'], subrepo) | 508 _GitCmd(['checkout', '--detach'], subrepo) |
| 504 _GclientSyncCmd(archive.rev, subrepo) | 509 _GclientSyncCmd(archive.rev, subrepo) |
| 505 retcode = build.Run() | 510 retcode = build.Run() |
| 506 return retcode == 0 | 511 return retcode == 0 |
| 507 | 512 |
| 508 | 513 |
| 509 def _GenerateRevList(with_patch, without_patch, all_in_range, subrepo): | 514 def _GenerateRevList(rev, reference_rev, all_in_range, subrepo): |
| 510 """Normalize and optionally generate a list of commits in the given range. | 515 """Normalize and optionally generate a list of commits in the given range. |
| 511 | 516 |
| 512 Returns a list of revisions ordered from newest to oldest. | 517 Returns: |
| 518 A list of revisions ordered from oldest to newest. |
| 513 """ | 519 """ |
| 514 cmd = ['git', '-C', subrepo, 'merge-base', '--is-ancestor', without_patch, | 520 rev_seq = '%s^..%s' % (reference_rev, rev) |
| 515 with_patch] | |
| 516 _, retcode = _RunCmd(cmd, exit_on_failure=False) | |
| 517 assert not retcode and with_patch != without_patch, ( | |
| 518 'Invalid revision arguments, rev_without_patch (%s) is newer than ' | |
| 519 'rev_with_patch (%s)' % (without_patch, with_patch)) | |
| 520 | |
| 521 rev_seq = '%s^..%s' % (without_patch, with_patch) | |
| 522 stdout = _GitCmd(['rev-list', rev_seq], subrepo) | 521 stdout = _GitCmd(['rev-list', rev_seq], subrepo) |
| 523 all_revs = stdout.splitlines() | 522 all_revs = stdout.splitlines()[::-1] |
| 524 if all_in_range: | 523 if all_in_range: |
| 525 revs = all_revs | 524 revs = all_revs |
| 526 else: | 525 else: |
| 527 revs = [all_revs[0], all_revs[-1]] | 526 revs = [all_revs[0], all_revs[-1]] |
| 528 _VerifyUserAckCommitCount(len(revs)) | 527 if len(revs) >= _COMMIT_COUNT_WARN_THRESHOLD: |
| 528 _VerifyUserAccepts( |
| 529 'You\'ve provided a commit range that contains %d commits' % len(revs)) |
| 529 return revs | 530 return revs |
| 530 | 531 |
| 531 | 532 |
| 532 def _VerifyUserAckCommitCount(count): | 533 def _ValidateRevs(rev, reference_rev, subrepo): |
| 533 if count >= _COMMIT_COUNT_WARN_THRESHOLD: | 534 def git_fatal(args, message): |
| 534 _Print('You\'ve provided a commit range that contains {} commits, do you ' | 535 devnull = open(os.devnull, 'wb') |
| 535 'want to proceed? [y/n]', count) | 536 retcode = subprocess.call( |
| 536 if raw_input('> ').lower() != 'y': | 537 ['git', '-C', subrepo] + args, stdout=devnull, stderr=subprocess.STDOUT) |
| 537 _global_restore_checkout_func() | 538 if retcode: |
| 538 sys.exit(1) | 539 _Die(message) |
| 540 |
| 541 if rev == reference_rev: |
| 542 _Die('rev and reference-rev cannot be equal') |
| 543 no_obj_message = ('%s either doesn\'t exist or your local repo is out of ' |
| 544 'date, try "git fetch origin master"') |
| 545 git_fatal(['cat-file', '-e', rev], no_obj_message % rev) |
| 546 git_fatal(['cat-file', '-e', reference_rev], no_obj_message % reference_rev) |
| 547 git_fatal(['merge-base', '--is-ancestor', reference_rev, rev], |
| 548 'reference-rev is newer than rev') |
| 549 return rev, reference_rev |
| 550 |
| 551 |
| 552 def _VerifyUserAccepts(message): |
| 553 _Print(message + 'Do you want to proceed? [y/n]') |
| 554 if raw_input('> ').lower() != 'y': |
| 555 _global_restore_checkout_func() |
| 556 sys.exit() |
| 539 | 557 |
| 540 | 558 |
| 541 def _EnsureDirectoryClean(subrepo): | 559 def _EnsureDirectoryClean(subrepo): |
| 542 _Print('Checking source directory') | 560 _Print('Checking source directory') |
| 543 stdout = _GitCmd(['status', '--porcelain'], subrepo) | 561 stdout = _GitCmd(['status', '--porcelain'], subrepo) |
| 544 # Ignore untracked files. | 562 # Ignore untracked files. |
| 545 if stdout and stdout[:2] != '??': | 563 if stdout and stdout[:2] != '??': |
| 546 _Print('Failure: please ensure working directory is clean.') | 564 _Print('Failure: please ensure working directory is clean.') |
| 547 sys.exit() | 565 sys.exit() |
| 548 | 566 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 566 | 584 |
| 567 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) | 585 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) |
| 568 try: | 586 try: |
| 569 _DownloadAndArchive( | 587 _DownloadAndArchive( |
| 570 gsutil_path, archive, download_dir, build, supersize_path) | 588 gsutil_path, archive, download_dir, build, supersize_path) |
| 571 finally: | 589 finally: |
| 572 shutil.rmtree(download_dir) | 590 shutil.rmtree(download_dir) |
| 573 | 591 |
| 574 | 592 |
| 575 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path): | 593 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path): |
| 576 dl_file = 'full-build-linux_%s.zip' % archive.rev | 594 dl_dst = os.path.join(dl_dir, archive.rev) |
| 577 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file | |
| 578 dl_dst = os.path.join(dl_dir, dl_file) | |
| 579 _Print('Downloading build artifacts for {}', archive.rev) | 595 _Print('Downloading build artifacts for {}', archive.rev) |
| 580 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to | 596 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to |
| 581 # sys.stdout. | 597 # sys.stdout. |
| 582 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir], | 598 retcode = subprocess.call( |
| 583 stdout=sys.stdout, stderr=subprocess.STDOUT) | 599 [gsutil_path, 'cp', build.DownloadUrl(archive.rev), dl_dst], |
| 600 stdout=sys.stdout, stderr=subprocess.STDOUT) |
| 584 if retcode: | 601 if retcode: |
| 585 _Die('unexpected error while downloading {}. It may no longer exist on ' | 602 _Die('unexpected error while downloading {}. It may no longer exist on ' |
| 586 'the server or it may not have been uploaded yet (check {}). ' | 603 'the server or it may not have been uploaded yet (check {}). ' |
| 587 'Otherwise, you may not have the correct access permissions.', | 604 'Otherwise, you may not have the correct access permissions.', |
| 588 dl_url, _BUILDER_URL) | 605 build.DownloadUrl(archive.rev), _BUILDER_URL) |
| 589 | 606 |
| 590 # Files needed for supersize and resource_sizes. Paths relative to out dir. | 607 # Files needed for supersize and resource_sizes. Paths relative to out dir. |
| 591 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn', | 608 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn'] |
| 592 'build_vars.txt', build.apk_path] | 609 if build.IsAndroid(): |
| 593 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped') | 610 to_extract += ['build_vars.txt', build.apk_path] |
| 611 extract_dir = dl_dst + '_' + 'unzipped' |
| 594 # Storage bucket stores entire output directory including out/Release prefix. | 612 # Storage bucket stores entire output directory including out/Release prefix. |
| 595 _Print('Extracting build artifacts') | 613 _Print('Extracting build artifacts') |
| 596 with zipfile.ZipFile(dl_dst, 'r') as z: | 614 with zipfile.ZipFile(dl_dst, 'r') as z: |
| 597 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z) | 615 _ExtractFiles(to_extract, build.download_output_dir, extract_dir, z) |
| 598 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR) | 616 dl_out = os.path.join(extract_dir, build.download_output_dir) |
| 599 build.output_directory, output_directory = dl_out, build.output_directory | 617 build.output_directory, output_directory = dl_out, build.output_directory |
| 600 archive.ArchiveBuildResults(supersize_path) | 618 archive.ArchiveBuildResults(supersize_path) |
| 601 build.output_directory = output_directory | 619 build.output_directory = output_directory |
| 602 | 620 |
| 603 | 621 |
| 604 def _ExtractFiles(to_extract, prefix, dst, z): | 622 def _ExtractFiles(to_extract, prefix, dst, z): |
| 605 zip_infos = z.infolist() | 623 zip_infos = z.infolist() |
| 606 assert all(info.filename.startswith(prefix) for info in zip_infos), ( | 624 assert all(info.filename.startswith(prefix) for info in zip_infos), ( |
| 607 'Storage bucket folder structure doesn\'t start with %s' % prefix) | 625 'Storage bucket folder structure doesn\'t start with %s' % prefix) |
| 608 to_extract = [os.path.join(prefix, f) for f in to_extract] | 626 to_extract = [os.path.join(prefix, f) for f in to_extract] |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 693 choices=['android', 'linux'], | 711 choices=['android', 'linux'], |
| 694 help='target_os gn arg. Default: android.') | 712 help='target_os gn arg. Default: android.') |
| 695 build_group.add_argument('--output-directory', | 713 build_group.add_argument('--output-directory', |
| 696 default=_DEFAULT_OUT_DIR, | 714 default=_DEFAULT_OUT_DIR, |
| 697 help='ninja output directory. ' | 715 help='ninja output directory. ' |
| 698 'Default: %s.' % _DEFAULT_OUT_DIR) | 716 'Default: %s.' % _DEFAULT_OUT_DIR) |
| 699 build_group.add_argument('--enable-chrome-android-internal', | 717 build_group.add_argument('--enable-chrome-android-internal', |
| 700 action='store_true', | 718 action='store_true', |
| 701 help='Allow downstream targets to be built.') | 719 help='Allow downstream targets to be built.') |
| 702 build_group.add_argument('--target', | 720 build_group.add_argument('--target', |
| 703 default=_DEFAULT_TARGET, | 721 default=_DEFAULT_ANDROID_TARGET, |
| 704 help='GN APK target to build. ' | 722 help='GN APK target to build. Ignored for Linux. ' |
| 705 'Default %s.' % _DEFAULT_TARGET) | 723 'Default %s.' % _DEFAULT_ANDROID_TARGET) |
| 706 if len(sys.argv) == 1: | 724 if len(sys.argv) == 1: |
| 707 parser.print_help() | 725 parser.print_help() |
| 708 sys.exit() | 726 sys.exit() |
| 709 args = parser.parse_args() | 727 args = parser.parse_args() |
| 710 build = _BuildHelper(args) | 728 build = _BuildHelper(args) |
| 711 if build.IsCloud(): | 729 if build.IsCloud() and args.subrepo: |
| 712 if build.IsLinux(): | |
| 713 parser.error('--cloud only works for android') | |
| 714 if args.subrepo: | |
| 715 parser.error('--subrepo doesn\'t work with --cloud') | 730 parser.error('--subrepo doesn\'t work with --cloud') |
| 716 | 731 |
| 717 subrepo = args.subrepo or _SRC_ROOT | 732 subrepo = args.subrepo or _SRC_ROOT |
| 718 _EnsureDirectoryClean(subrepo) | 733 _EnsureDirectoryClean(subrepo) |
| 719 _SetRestoreFunc(subrepo) | 734 _SetRestoreFunc(subrepo) |
| 720 revs = _GenerateRevList(args.rev, | 735 if build.IsLinux(): |
| 721 args.reference_rev or args.rev + '^', | 736 _VerifyUserAccepts('Linux diffs have known deficiencies (crbug/717550).') |
| 722 args.all, | 737 |
| 723 subrepo) | 738 rev, reference_rev = _ValidateRevs( |
| 739 args.rev, args.reference_rev or args.rev + '^', subrepo) |
| 740 revs = _GenerateRevList(rev, reference_rev, args.all, subrepo) |
| 724 with _TmpCopyBinarySizeDir() as supersize_path: | 741 with _TmpCopyBinarySizeDir() as supersize_path: |
| 725 diffs = [NativeDiff(build.size_name, supersize_path)] | 742 diffs = [NativeDiff(build.size_name, supersize_path)] |
| 726 if build.IsAndroid(): | 743 if build.IsAndroid(): |
| 727 diffs += [ | 744 diffs += [ |
| 728 ResourceSizesDiff( | 745 ResourceSizesDiff( |
| 729 build.apk_name, slow_options=args.include_slow_options) | 746 build.apk_name, slow_options=args.include_slow_options) |
| 730 ] | 747 ] |
| 731 diff_mngr = _DiffArchiveManager( | 748 diff_mngr = _DiffArchiveManager( |
| 732 revs, args.archive_directory, diffs, build, subrepo) | 749 revs, args.archive_directory, diffs, build, subrepo) |
| 733 consecutive_failures = 0 | 750 consecutive_failures = 0 |
| (...skipping 19 matching lines...) Expand all Loading... |
| 753 if i != 0: | 770 if i != 0: |
| 754 diff_mngr.MaybeDiff(i - 1, i) | 771 diff_mngr.MaybeDiff(i - 1, i) |
| 755 | 772 |
| 756 diff_mngr.Summarize() | 773 diff_mngr.Summarize() |
| 757 | 774 |
| 758 _global_restore_checkout_func() | 775 _global_restore_checkout_func() |
| 759 | 776 |
| 760 if __name__ == '__main__': | 777 if __name__ == '__main__': |
| 761 sys.exit(main()) | 778 sys.exit(main()) |
| 762 | 779 |
| OLD | NEW |