Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(301)

Side by Side Diff: tools/binary_size/diagnose_apk_bloat.py

Issue 2847243005: diagnose_apk_bloat.py: fix error messages and simplify rev order. (Closed)
Patch Set: + README Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/binary_size/README.md ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2017 The Chromium Authors. All rights reserved. 2 # Copyright 2017 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Tool for finding the cause of APK bloat. 6 """Tool for finding the cause of APK bloat.
7 7
8 Run diagnose_apk_bloat.py -h for detailed usage help. 8 Run diagnose_apk_bloat.py -h for detailed usage help.
9 """ 9 """
10 10
11 import argparse 11 import argparse
12 import collections 12 import collections
13 from contextlib import contextmanager 13 from contextlib import contextmanager
14 import distutils.spawn 14 import distutils.spawn
15 import json 15 import json
16 import multiprocessing 16 import multiprocessing
17 import os 17 import os
18 import re 18 import re
19 import shutil 19 import shutil
20 import subprocess 20 import subprocess
21 import sys 21 import sys
22 import tempfile 22 import tempfile
23 import zipfile 23 import zipfile
24 24
25 _COMMIT_COUNT_WARN_THRESHOLD = 15 25 _COMMIT_COUNT_WARN_THRESHOLD = 15
26 _ALLOWED_CONSECUTIVE_FAILURES = 2 26 _ALLOWED_CONSECUTIVE_FAILURES = 2
27 _DIFF_DETAILS_LINES_THRESHOLD = 100 27 _DIFF_DETAILS_LINES_THRESHOLD = 100
28 _BUILDER_URL = \ 28 _BUILDER_URL = \
29 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' 29 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder'
30 _CLOUD_OUT_DIR = os.path.join('out', 'Release')
31 _SRC_ROOT = os.path.abspath( 30 _SRC_ROOT = os.path.abspath(
32 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) 31 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
33 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') 32 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat')
34 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') 33 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat')
35 _DEFAULT_TARGET = 'monochrome_public_apk' 34 _DEFAULT_ANDROID_TARGET = 'monochrome_public_apk'
35 _ANDROID_BUCKET = 'gs://chrome-perf/Android Builder/'
36 _LINUX_BUCKET = 'gs://chrome-perf/Linux Builder/'
36 37
37 38
38 _global_restore_checkout_func = None 39 _global_restore_checkout_func = None
39 40
40 41
41 def _SetRestoreFunc(subrepo): 42 def _SetRestoreFunc(subrepo):
42 branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD'], subrepo) 43 branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD'], subrepo)
43 global _global_restore_checkout_func 44 global _global_restore_checkout_func
44 _global_restore_checkout_func = lambda: _GitCmd(['checkout', branch], subrepo) 45 _global_restore_checkout_func = lambda: _GitCmd(['checkout', branch], subrepo)
45 46
46 47
47 _DiffResult = collections.namedtuple( 48 _DiffResult = collections.namedtuple('DiffResult', ['name', 'value', 'units'])
48 'DiffResult', ['name', 'value', 'units'])
49 49
50 50
51 class BaseDiff(object): 51 class BaseDiff(object):
52 """Base class capturing binary size diffs.""" 52 """Base class capturing binary size diffs."""
53 def __init__(self, name): 53 def __init__(self, name):
54 self.name = name 54 self.name = name
55 self.banner = '\n' + '*' * 30 + name + '*' * 30 55 self.banner = '\n' + '*' * 30 + name + '*' * 30
56 56
57 def AppendResults(self, logfile): 57 def AppendResults(self, logfile):
58 """Print and write diff results to an open |logfile|.""" 58 """Print and write diff results to an open |logfile|."""
59 _PrintAndWriteToFile(logfile, self.banner) 59 _PrintAndWriteToFile(logfile, self.banner)
60 _PrintAndWriteToFile(logfile, 'Summary:') 60 _PrintAndWriteToFile(logfile, 'Summary:')
61 _PrintAndWriteToFile(logfile, self.Summary()) 61 _PrintAndWriteToFile(logfile, self.Summary())
62 _PrintAndWriteToFile(logfile, '\nDetails:') 62 _PrintAndWriteToFile(logfile, '\nDetails:')
63 _PrintAndWriteToFile(logfile, self.DetailedResults()) 63 _PrintAndWriteToFile(logfile, self.DetailedResults())
64 64
65 @property 65 @property
66 def summary_stat(self): 66 def summary_stat(self):
67 return None 67 return None
68 68
69 def Summary(self): 69 def Summary(self):
70 """A short description that summarizes the source of binary size bloat.""" 70 """A short description that summarizes the source of binary size bloat."""
71 raise NotImplementedError() 71 raise NotImplementedError()
72 72
73 def DetailedResults(self): 73 def DetailedResults(self):
74 """An iterable description of the cause of binary size bloat.""" 74 """An iterable description of the cause of binary size bloat."""
75 raise NotImplementedError() 75 raise NotImplementedError()
76 76
77 def ProduceDiff(self, archive_dirs): 77 def ProduceDiff(self, before_dir, after_dir):
78 """Prepare a binary size diff with ready to print results.""" 78 """Prepare a binary size diff with ready to print results."""
79 raise NotImplementedError() 79 raise NotImplementedError()
80 80
81 def RunDiff(self, logfile, archive_dirs): 81 def RunDiff(self, logfile, before_dir, after_dir):
82 self.ProduceDiff(archive_dirs) 82 self.ProduceDiff(before_dir, after_dir)
83 self.AppendResults(logfile) 83 self.AppendResults(logfile)
84 84
85 85
86 class NativeDiff(BaseDiff): 86 class NativeDiff(BaseDiff):
87 _RE_SUMMARY = re.compile( 87 _RE_SUMMARY = re.compile(
88 r'.*(Section Sizes .*? object files added, \d+ removed).*', 88 r'.*(Section Sizes .*? object files added, \d+ removed).*',
89 flags=re.DOTALL) 89 flags=re.DOTALL)
90 _RE_SUMMARY_STAT = re.compile( 90 _RE_SUMMARY_STAT = re.compile(
91 r'Section Sizes \(Total=(?P<value>\d+) (?P<units>\w+)\)') 91 r'Section Sizes \(Total=(?P<value>\d+) (?P<units>\w+)\)')
92 _SUMMARY_STAT_NAME = 'Native Library Delta' 92 _SUMMARY_STAT_NAME = 'Native Library Delta'
(...skipping 11 matching lines...) Expand all
104 return _DiffResult( 104 return _DiffResult(
105 NativeDiff._SUMMARY_STAT_NAME, m.group('value'), m.group('units')) 105 NativeDiff._SUMMARY_STAT_NAME, m.group('value'), m.group('units'))
106 return None 106 return None
107 107
108 def DetailedResults(self): 108 def DetailedResults(self):
109 return self._diff.splitlines() 109 return self._diff.splitlines()
110 110
111 def Summary(self): 111 def Summary(self):
112 return NativeDiff._RE_SUMMARY.match(self._diff).group(1) 112 return NativeDiff._RE_SUMMARY.match(self._diff).group(1)
113 113
114 def ProduceDiff(self, archive_dirs): 114 def ProduceDiff(self, before_dir, after_dir):
115 size_files = [os.path.join(a, self._size_name) 115 before_size = os.path.join(before_dir, self._size_name)
116 for a in reversed(archive_dirs)] 116 after_size = os.path.join(after_dir, self._size_name)
117 cmd = [self._supersize_path, 'diff'] + size_files 117 cmd = [self._supersize_path, 'diff', before_size, after_size]
118 self._diff = _RunCmd(cmd)[0].replace('{', '{{').replace('}', '}}') 118 self._diff = _RunCmd(cmd)[0].replace('{', '{{').replace('}', '}}')
119 119
120 120
121 class ResourceSizesDiff(BaseDiff): 121 class ResourceSizesDiff(BaseDiff):
122 _RESOURCE_SIZES_PATH = os.path.join( 122 _RESOURCE_SIZES_PATH = os.path.join(
123 _SRC_ROOT, 'build', 'android', 'resource_sizes.py') 123 _SRC_ROOT, 'build', 'android', 'resource_sizes.py')
124 124
125 def __init__(self, apk_name, slow_options=False): 125 def __init__(self, apk_name, slow_options=False):
126 self._apk_name = apk_name 126 self._apk_name = apk_name
127 self._slow_options = slow_options 127 self._slow_options = slow_options
128 self._diff = None # Set by |ProduceDiff()| 128 self._diff = None # Set by |ProduceDiff()|
129 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff') 129 super(ResourceSizesDiff, self).__init__('Resource Sizes Diff')
130 130
131 @property 131 @property
132 def summary_stat(self): 132 def summary_stat(self):
133 for s in self._diff: 133 for s in self._diff:
134 if 'normalized' in s.name: 134 if 'normalized' in s.name:
135 return s 135 return s
136 return None 136 return None
137 137
138 def DetailedResults(self): 138 def DetailedResults(self):
139 return ['{:>+10,} {} {}'.format(value, units, name) 139 return ['{:>+10,} {} {}'.format(value, units, name)
140 for name, value, units in self._diff] 140 for name, value, units in self._diff]
141 141
142 def Summary(self): 142 def Summary(self):
143 return 'Normalized APK size: {:+,} {}'.format( 143 return 'Normalized APK size: {:+,} {}'.format(
144 self.summary_stat.value, self.summary_stat.units) 144 self.summary_stat.value, self.summary_stat.units)
145 145
146 def ProduceDiff(self, archive_dirs): 146 def ProduceDiff(self, before_dir, after_dir):
147 chartjsons = self._RunResourceSizes(archive_dirs) 147 before = self._RunResourceSizes(before_dir)
148 after = self._RunResourceSizes(after_dir)
148 diff = [] 149 diff = []
149 with_patch = chartjsons[0]['charts'] 150 for section, section_dict in after.iteritems():
150 without_patch = chartjsons[1]['charts']
151 for section, section_dict in with_patch.iteritems():
152 for subsection, v in section_dict.iteritems(): 151 for subsection, v in section_dict.iteritems():
153 # Ignore entries when resource_sizes.py chartjson format has changed. 152 # Ignore entries when resource_sizes.py chartjson format has changed.
154 if (section not in without_patch or 153 if (section not in before or
155 subsection not in without_patch[section] or 154 subsection not in before[section] or
156 v['units'] != without_patch[section][subsection]['units']): 155 v['units'] != before[section][subsection]['units']):
157 _Print('Found differing dict structures for resource_sizes.py, ' 156 _Print('Found differing dict structures for resource_sizes.py, '
158 'skipping {} {}', section, subsection) 157 'skipping {} {}', section, subsection)
159 else: 158 else:
160 diff.append( 159 diff.append(
161 _DiffResult( 160 _DiffResult(
162 '%s %s' % (section, subsection), 161 '%s %s' % (section, subsection),
163 v['value'] - without_patch[section][subsection]['value'], 162 v['value'] - before[section][subsection]['value'],
164 v['units'])) 163 v['units']))
165 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True) 164 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True)
166 165
167 def _RunResourceSizes(self, archive_dirs): 166 def _RunResourceSizes(self, archive_dir):
168 chartjsons = [] 167 apk_path = os.path.join(archive_dir, self._apk_name)
169 for archive_dir in archive_dirs: 168 chartjson_file = os.path.join(archive_dir, 'results-chart.json')
170 apk_path = os.path.join(archive_dir, self._apk_name) 169 cmd = [self._RESOURCE_SIZES_PATH, apk_path,'--output-dir', archive_dir,
171 chartjson_file = os.path.join(archive_dir, 'results-chart.json') 170 '--no-output-dir', '--chartjson']
172 cmd = [self._RESOURCE_SIZES_PATH, apk_path,'--output-dir', archive_dir, 171 if self._slow_options:
173 '--no-output-dir', '--chartjson'] 172 cmd += ['--estimate-patch-size']
174 if self._slow_options: 173 else:
175 cmd += ['--estimate-patch-size'] 174 cmd += ['--no-static-initializer-check']
176 else: 175 _RunCmd(cmd)
177 cmd += ['--no-static-initializer-check'] 176 with open(chartjson_file) as f:
178 _RunCmd(cmd) 177 chartjson = json.load(f)
179 with open(chartjson_file) as f: 178 return chartjson['charts']
180 chartjsons.append(json.load(f))
181 return chartjsons
182 179
183 180
184 class _BuildHelper(object): 181 class _BuildHelper(object):
185 """Helper class for generating and building targets.""" 182 """Helper class for generating and building targets."""
186 def __init__(self, args): 183 def __init__(self, args):
187 self.cloud = args.cloud 184 self.cloud = args.cloud
188 self.enable_chrome_android_internal = args.enable_chrome_android_internal 185 self.enable_chrome_android_internal = args.enable_chrome_android_internal
189 self.extra_gn_args_str = '' 186 self.extra_gn_args_str = ''
190 self.max_jobs = args.max_jobs 187 self.max_jobs = args.max_jobs
191 self.max_load_average = args.max_load_average 188 self.max_load_average = args.max_load_average
(...skipping 25 matching lines...) Expand all
217 elif 'monochrome' in self.target: 214 elif 'monochrome' in self.target:
218 return 'lib.unstripped/libmonochrome.so' 215 return 'lib.unstripped/libmonochrome.so'
219 else: 216 else:
220 return 'lib.unstripped/libchrome.so' 217 return 'lib.unstripped/libchrome.so'
221 218
222 @property 219 @property
223 def abs_main_lib_path(self): 220 def abs_main_lib_path(self):
224 return os.path.join(self.output_directory, self.main_lib_path) 221 return os.path.join(self.output_directory, self.main_lib_path)
225 222
226 @property 223 @property
224 def download_bucket(self):
225 return _ANDROID_BUCKET if self.IsAndroid() else _LINUX_BUCKET
226
227 @property
228 def download_output_dir(self):
229 return 'out/Release' if self.IsAndroid() else 'full-build-linux'
230
231 @property
227 def map_file_path(self): 232 def map_file_path(self):
228 return self.main_lib_path + '.map.gz' 233 return self.main_lib_path + '.map.gz'
229 234
230 @property 235 @property
231 def size_name(self): 236 def size_name(self):
232 return os.path.splitext(os.path.basename(self.main_lib_path))[0] + '.size' 237 return os.path.splitext(os.path.basename(self.main_lib_path))[0] + '.size'
233 238
234 def _SetDefaults(self): 239 def _SetDefaults(self):
235 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma')) 240 has_goma_dir = os.path.exists(os.path.join(os.path.expanduser('~'), 'goma'))
236 self.use_goma = self.use_goma or has_goma_dir 241 self.use_goma = self.use_goma or has_goma_dir
237 self.max_load_average = (self.max_load_average or 242 self.max_load_average = (self.max_load_average or
238 str(multiprocessing.cpu_count())) 243 str(multiprocessing.cpu_count()))
239 if not self.max_jobs: 244 if not self.max_jobs:
240 self.max_jobs = '10000' if self.use_goma else '500' 245 self.max_jobs = '10000' if self.use_goma else '500'
241 246
242 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')): 247 if os.path.exists(os.path.join(os.path.dirname(_SRC_ROOT), 'src-internal')):
243 self.extra_gn_args_str = ' is_chrome_branded=true' 248 self.extra_gn_args_str = ' is_chrome_branded=true'
244 else: 249 else:
245 self.extra_gn_args_str = (' exclude_unwind_tables=true ' 250 self.extra_gn_args_str = (' exclude_unwind_tables=true '
246 'ffmpeg_branding="Chrome" proprietary_codecs=true') 251 'ffmpeg_branding="Chrome" proprietary_codecs=true')
252 self.target = self.target if self.IsAndroid() else 'chrome'
247 253
248 def _GenGnCmd(self): 254 def _GenGnCmd(self):
249 gn_args = 'is_official_build=true symbol_level=1' 255 gn_args = 'is_official_build=true symbol_level=1'
250 gn_args += ' use_goma=%s' % str(self.use_goma).lower() 256 gn_args += ' use_goma=%s' % str(self.use_goma).lower()
251 gn_args += ' target_os="%s"' % self.target_os 257 gn_args += ' target_os="%s"' % self.target_os
252 gn_args += (' enable_chrome_android_internal=%s' % 258 gn_args += (' enable_chrome_android_internal=%s' %
253 str(self.enable_chrome_android_internal).lower()) 259 str(self.enable_chrome_android_internal).lower())
254 gn_args += self.extra_gn_args_str 260 gn_args += self.extra_gn_args_str
255 return ['gn', 'gen', self.output_directory, '--args=%s' % gn_args] 261 return ['gn', 'gen', self.output_directory, '--args=%s' % gn_args]
256 262
257 def _GenNinjaCmd(self): 263 def _GenNinjaCmd(self):
258 cmd = ['ninja', '-C', self.output_directory] 264 cmd = ['ninja', '-C', self.output_directory]
259 cmd += ['-j', self.max_jobs] if self.max_jobs else [] 265 cmd += ['-j', self.max_jobs] if self.max_jobs else []
260 cmd += ['-l', self.max_load_average] if self.max_load_average else [] 266 cmd += ['-l', self.max_load_average] if self.max_load_average else []
261 cmd += [self.target] 267 cmd += [self.target]
262 return cmd 268 return cmd
263 269
264 def Run(self): 270 def Run(self):
265 """Run GN gen/ninja build and return the process returncode.""" 271 """Run GN gen/ninja build and return the process returncode."""
266 _Print('Building: {}.', self.target) 272 _Print('Building: {}.', self.target)
267 retcode = _RunCmd( 273 retcode = _RunCmd(
268 self._GenGnCmd(), print_stdout=True, exit_on_failure=False)[1] 274 self._GenGnCmd(), print_stdout=True, exit_on_failure=False)[1]
269 if retcode: 275 if retcode:
270 return retcode 276 return retcode
271 return _RunCmd( 277 return _RunCmd(
272 self._GenNinjaCmd(), print_stdout=True, exit_on_failure=False)[1] 278 self._GenNinjaCmd(), print_stdout=True, exit_on_failure=False)[1]
273 279
280 def DownloadUrl(self, rev):
281 return self.download_bucket + 'full-build-linux_%s.zip' % rev
282
274 def IsAndroid(self): 283 def IsAndroid(self):
275 return self.target_os == 'android' 284 return self.target_os == 'android'
276 285
277 def IsLinux(self): 286 def IsLinux(self):
278 return self.target_os == 'linux' 287 return self.target_os == 'linux'
279 288
280 def IsCloud(self): 289 def IsCloud(self):
281 return self.cloud 290 return self.cloud
282 291
283 292
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
325 self.build = build 334 self.build = build
326 self.build_archives = [_BuildArchive(rev, archive_dir, build, subrepo) 335 self.build_archives = [_BuildArchive(rev, archive_dir, build, subrepo)
327 for rev in revs] 336 for rev in revs]
328 self.diffs = diffs 337 self.diffs = diffs
329 self.subrepo = subrepo 338 self.subrepo = subrepo
330 self._summary_stats = [] 339 self._summary_stats = []
331 340
332 def IterArchives(self): 341 def IterArchives(self):
333 return iter(self.build_archives) 342 return iter(self.build_archives)
334 343
335 def MaybeDiff(self, first_id, second_id): 344 def MaybeDiff(self, before_id, after_id):
336 """Perform diffs given two build archives.""" 345 """Perform diffs given two build archives."""
337 archives = [ 346 before = self.build_archives[before_id]
338 self.build_archives[first_id], self.build_archives[second_id]] 347 after = self.build_archives[after_id]
339 diff_path = self._DiffFilePath(archives) 348 diff_path = self._DiffFilePath(before, after)
340 if not self._CanDiff(archives): 349 if not self._CanDiff(before, after):
341 _Print('Skipping diff for {} due to missing build archives.', diff_path) 350 _Print('Skipping diff for {} due to missing build archives.', diff_path)
342 return 351 return
343 352
344 metadata_path = self._DiffMetadataPath(archives) 353 metadata_path = self._DiffMetadataPath(before, after)
345 metadata = _GenerateMetadata( 354 metadata = _GenerateMetadata(
346 archives, self.build, metadata_path, self.subrepo) 355 [before, after], self.build, metadata_path, self.subrepo)
347 if _MetadataExists(metadata): 356 if _MetadataExists(metadata):
348 _Print('Skipping diff for {} and {}. Matching diff already exists: {}', 357 _Print('Skipping diff for {} and {}. Matching diff already exists: {}',
349 archives[0].rev, archives[1].rev, diff_path) 358 before.rev, after.rev, diff_path)
350 else: 359 else:
351 if os.path.exists(diff_path): 360 if os.path.exists(diff_path):
352 os.remove(diff_path) 361 os.remove(diff_path)
353 archive_dirs = [archives[0].dir, archives[1].dir]
354 with open(diff_path, 'a') as diff_file: 362 with open(diff_path, 'a') as diff_file:
355 for d in self.diffs: 363 for d in self.diffs:
356 d.RunDiff(diff_file, archive_dirs) 364 d.RunDiff(diff_file, before.dir, after.dir)
357 _Print('\nSee detailed diff results here: {}.', diff_path) 365 _Print('\nSee detailed diff results here: {}.', diff_path)
358 _WriteMetadata(metadata) 366 _WriteMetadata(metadata)
359 self._AddDiffSummaryStat(archives) 367 self._AddDiffSummaryStat(before, after)
360 368
361 def Summarize(self): 369 def Summarize(self):
362 if self._summary_stats: 370 if self._summary_stats:
363 path = os.path.join(self.archive_dir, 'last_diff_summary.txt') 371 path = os.path.join(self.archive_dir, 'last_diff_summary.txt')
364 with open(path, 'w') as f: 372 with open(path, 'w') as f:
365 stats = sorted( 373 stats = sorted(
366 self._summary_stats, key=lambda x: x[0].value, reverse=True) 374 self._summary_stats, key=lambda x: x[0].value, reverse=True)
367 _PrintAndWriteToFile(f, '\nDiff Summary') 375 _PrintAndWriteToFile(f, '\nDiff Summary')
368 for s, before, after in stats: 376 for s, before, after in stats:
369 _PrintAndWriteToFile(f, '{:>+10} {} {} for range: {}..{}', 377 _PrintAndWriteToFile(f, '{:>+10} {} {} for range: {}..{}',
370 s.value, s.units, s.name, before, after) 378 s.value, s.units, s.name, before, after)
371 379
372 def _AddDiffSummaryStat(self, archives): 380 def _AddDiffSummaryStat(self, before, after):
373 stat = None 381 stat = None
374 if self.build.IsAndroid(): 382 if self.build.IsAndroid():
375 summary_diff_type = ResourceSizesDiff 383 summary_diff_type = ResourceSizesDiff
376 else: 384 else:
377 summary_diff_type = NativeDiff 385 summary_diff_type = NativeDiff
378 for d in self.diffs: 386 for d in self.diffs:
379 if isinstance(d, summary_diff_type): 387 if isinstance(d, summary_diff_type):
380 stat = d.summary_stat 388 stat = d.summary_stat
381 if stat: 389 if stat:
382 self._summary_stats.append((stat, archives[1].rev, archives[0].rev)) 390 self._summary_stats.append((stat, before.rev, after.rev))
383 391
384 def _CanDiff(self, archives): 392 def _CanDiff(self, before, after):
385 return all(a.Exists() for a in archives) 393 return before.Exists() and after.Exists()
386 394
387 def _DiffFilePath(self, archives): 395 def _DiffFilePath(self, before, after):
388 return os.path.join(self._DiffDir(archives), 'diff_results.txt') 396 return os.path.join(self._DiffDir(before, after), 'diff_results.txt')
389 397
390 def _DiffMetadataPath(self, archives): 398 def _DiffMetadataPath(self, before, after):
391 return os.path.join(self._DiffDir(archives), 'metadata.txt') 399 return os.path.join(self._DiffDir(before, after), 'metadata.txt')
392 400
393 def _DiffDir(self, archives): 401 def _DiffDir(self, before, after):
394 archive_range = '%s..%s' % (archives[1].rev, archives[0].rev) 402 archive_range = '%s..%s' % (before.rev, after.rev)
395 diff_path = os.path.join(self.archive_dir, 'diffs', archive_range) 403 diff_path = os.path.join(self.archive_dir, 'diffs', archive_range)
396 _EnsureDirsExist(diff_path) 404 _EnsureDirsExist(diff_path)
397 return diff_path 405 return diff_path
398 406
399 407
400 def _EnsureDirsExist(path): 408 def _EnsureDirsExist(path):
401 if not os.path.exists(path): 409 if not os.path.exists(path):
402 os.makedirs(path) 410 os.makedirs(path)
403 411
404 412
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
499 _GitCmd(['checkout', archive.rev], subrepo) 507 _GitCmd(['checkout', archive.rev], subrepo)
500 else: 508 else:
501 # Move to a detached state since gclient sync doesn't work with local 509 # Move to a detached state since gclient sync doesn't work with local
502 # commits on a branch. 510 # commits on a branch.
503 _GitCmd(['checkout', '--detach'], subrepo) 511 _GitCmd(['checkout', '--detach'], subrepo)
504 _GclientSyncCmd(archive.rev, subrepo) 512 _GclientSyncCmd(archive.rev, subrepo)
505 retcode = build.Run() 513 retcode = build.Run()
506 return retcode == 0 514 return retcode == 0
507 515
508 516
509 def _GenerateRevList(with_patch, without_patch, all_in_range, subrepo): 517 def _GenerateRevList(rev, reference_rev, all_in_range, subrepo):
510 """Normalize and optionally generate a list of commits in the given range. 518 """Normalize and optionally generate a list of commits in the given range.
511 519
512 Returns a list of revisions ordered from newest to oldest. 520 Returns:
521 A list of revisions ordered from oldest to newest.
513 """ 522 """
514 cmd = ['git', '-C', subrepo, 'merge-base', '--is-ancestor', without_patch, 523 rev_seq = '%s^..%s' % (reference_rev, rev)
515 with_patch]
516 _, retcode = _RunCmd(cmd, exit_on_failure=False)
517 assert not retcode and with_patch != without_patch, (
518 'Invalid revision arguments, rev_without_patch (%s) is newer than '
519 'rev_with_patch (%s)' % (without_patch, with_patch))
520
521 rev_seq = '%s^..%s' % (without_patch, with_patch)
522 stdout = _GitCmd(['rev-list', rev_seq], subrepo) 524 stdout = _GitCmd(['rev-list', rev_seq], subrepo)
523 all_revs = stdout.splitlines() 525 all_revs = stdout.splitlines()[::-1]
524 if all_in_range: 526 if all_in_range:
525 revs = all_revs 527 revs = all_revs
526 else: 528 else:
527 revs = [all_revs[0], all_revs[-1]] 529 revs = [all_revs[0], all_revs[-1]]
528 _VerifyUserAckCommitCount(len(revs)) 530 if len(revs) >= _COMMIT_COUNT_WARN_THRESHOLD:
531 _VerifyUserAccepts(
532 'You\'ve provided a commit range that contains %d commits' % len(revs))
529 return revs 533 return revs
530 534
531 535
532 def _VerifyUserAckCommitCount(count): 536 def _ValidateRevs(rev, reference_rev, subrepo):
533 if count >= _COMMIT_COUNT_WARN_THRESHOLD: 537 def git_fatal(args, message):
534 _Print('You\'ve provided a commit range that contains {} commits, do you ' 538 devnull = open(os.devnull, 'wb')
535 'want to proceed? [y/n]', count) 539 retcode = subprocess.call(
536 if raw_input('> ').lower() != 'y': 540 ['git', '-C', subrepo] + args, stdout=devnull, stderr=subprocess.STDOUT)
537 _global_restore_checkout_func() 541 if retcode:
538 sys.exit(1) 542 _Die(message)
543
544 if rev == reference_rev:
545 _Die('rev and reference-rev cannot be equal')
546 no_obj_message = ('%s either doesn\'t exist or your local repo is out of '
547 'date, try "git fetch origin master"')
548 git_fatal(['cat-file', '-e', rev], no_obj_message % rev)
549 git_fatal(['cat-file', '-e', reference_rev], no_obj_message % reference_rev)
550 git_fatal(['merge-base', '--is-ancestor', reference_rev, rev],
551 'reference-rev is newer than rev')
552 return rev, reference_rev
553
554
555 def _VerifyUserAccepts(message):
556 _Print(message + 'Do you want to proceed? [y/n]')
557 if raw_input('> ').lower() != 'y':
558 _global_restore_checkout_func()
559 sys.exit()
539 560
540 561
541 def _EnsureDirectoryClean(subrepo): 562 def _EnsureDirectoryClean(subrepo):
542 _Print('Checking source directory') 563 _Print('Checking source directory')
543 stdout = _GitCmd(['status', '--porcelain'], subrepo) 564 stdout = _GitCmd(['status', '--porcelain'], subrepo)
544 # Ignore untracked files. 565 # Ignore untracked files.
545 if stdout and stdout[:2] != '??': 566 if stdout and stdout[:2] != '??':
546 _Print('Failure: please ensure working directory is clean.') 567 _Print('Failure: please ensure working directory is clean.')
547 sys.exit() 568 sys.exit()
548 569
(...skipping 17 matching lines...) Expand all
566 587
567 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) 588 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT)
568 try: 589 try:
569 _DownloadAndArchive( 590 _DownloadAndArchive(
570 gsutil_path, archive, download_dir, build, supersize_path) 591 gsutil_path, archive, download_dir, build, supersize_path)
571 finally: 592 finally:
572 shutil.rmtree(download_dir) 593 shutil.rmtree(download_dir)
573 594
574 595
575 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path): 596 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path):
576 dl_file = 'full-build-linux_%s.zip' % archive.rev 597 dl_dst = os.path.join(dl_dir, archive.rev)
577 dl_url = 'gs://chrome-perf/Android Builder/%s' % dl_file
578 dl_dst = os.path.join(dl_dir, dl_file)
579 _Print('Downloading build artifacts for {}', archive.rev) 598 _Print('Downloading build artifacts for {}', archive.rev)
580 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to 599 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to
581 # sys.stdout. 600 # sys.stdout.
582 retcode = subprocess.call([gsutil_path, 'cp', dl_url, dl_dir], 601 retcode = subprocess.call(
583 stdout=sys.stdout, stderr=subprocess.STDOUT) 602 [gsutil_path, 'cp', build.DownloadUrl(archive.rev), dl_dst],
603 stdout=sys.stdout, stderr=subprocess.STDOUT)
584 if retcode: 604 if retcode:
585 _Die('unexpected error while downloading {}. It may no longer exist on ' 605 _Die('unexpected error while downloading {}. It may no longer exist on '
586 'the server or it may not have been uploaded yet (check {}). ' 606 'the server or it may not have been uploaded yet (check {}). '
587 'Otherwise, you may not have the correct access permissions.', 607 'Otherwise, you may not have the correct access permissions.',
588 dl_url, _BUILDER_URL) 608 build.DownloadUrl(archive.rev), _BUILDER_URL)
589 609
590 # Files needed for supersize and resource_sizes. Paths relative to out dir. 610 # Files needed for supersize and resource_sizes. Paths relative to out dir.
591 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn', 611 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn']
592 'build_vars.txt', build.apk_path] 612 if build.IsAndroid():
593 extract_dir = os.path.join(os.path.splitext(dl_dst)[0], 'unzipped') 613 to_extract += ['build_vars.txt', build.apk_path]
614 extract_dir = dl_dst + '_' + 'unzipped'
594 # Storage bucket stores entire output directory including out/Release prefix. 615 # Storage bucket stores entire output directory including out/Release prefix.
595 _Print('Extracting build artifacts') 616 _Print('Extracting build artifacts')
596 with zipfile.ZipFile(dl_dst, 'r') as z: 617 with zipfile.ZipFile(dl_dst, 'r') as z:
597 _ExtractFiles(to_extract, _CLOUD_OUT_DIR, extract_dir, z) 618 _ExtractFiles(to_extract, build.download_output_dir, extract_dir, z)
598 dl_out = os.path.join(extract_dir, _CLOUD_OUT_DIR) 619 dl_out = os.path.join(extract_dir, build.download_output_dir)
599 build.output_directory, output_directory = dl_out, build.output_directory 620 build.output_directory, output_directory = dl_out, build.output_directory
600 archive.ArchiveBuildResults(supersize_path) 621 archive.ArchiveBuildResults(supersize_path)
601 build.output_directory = output_directory 622 build.output_directory = output_directory
602 623
603 624
604 def _ExtractFiles(to_extract, prefix, dst, z): 625 def _ExtractFiles(to_extract, prefix, dst, z):
605 zip_infos = z.infolist() 626 zip_infos = z.infolist()
606 assert all(info.filename.startswith(prefix) for info in zip_infos), ( 627 assert all(info.filename.startswith(prefix) for info in zip_infos), (
607 'Storage bucket folder structure doesn\'t start with %s' % prefix) 628 'Storage bucket folder structure doesn\'t start with %s' % prefix)
608 to_extract = [os.path.join(prefix, f) for f in to_extract] 629 to_extract = [os.path.join(prefix, f) for f in to_extract]
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
693 choices=['android', 'linux'], 714 choices=['android', 'linux'],
694 help='target_os gn arg. Default: android.') 715 help='target_os gn arg. Default: android.')
695 build_group.add_argument('--output-directory', 716 build_group.add_argument('--output-directory',
696 default=_DEFAULT_OUT_DIR, 717 default=_DEFAULT_OUT_DIR,
697 help='ninja output directory. ' 718 help='ninja output directory. '
698 'Default: %s.' % _DEFAULT_OUT_DIR) 719 'Default: %s.' % _DEFAULT_OUT_DIR)
699 build_group.add_argument('--enable-chrome-android-internal', 720 build_group.add_argument('--enable-chrome-android-internal',
700 action='store_true', 721 action='store_true',
701 help='Allow downstream targets to be built.') 722 help='Allow downstream targets to be built.')
702 build_group.add_argument('--target', 723 build_group.add_argument('--target',
703 default=_DEFAULT_TARGET, 724 default=_DEFAULT_ANDROID_TARGET,
704 help='GN APK target to build. ' 725 help='GN APK target to build. Ignored for Linux. '
705 'Default %s.' % _DEFAULT_TARGET) 726 'Default %s.' % _DEFAULT_ANDROID_TARGET)
706 if len(sys.argv) == 1: 727 if len(sys.argv) == 1:
707 parser.print_help() 728 parser.print_help()
708 sys.exit() 729 sys.exit()
709 args = parser.parse_args() 730 args = parser.parse_args()
710 build = _BuildHelper(args) 731 build = _BuildHelper(args)
711 if build.IsCloud(): 732 if build.IsCloud() and args.subrepo:
712 if build.IsLinux():
713 parser.error('--cloud only works for android')
714 if args.subrepo:
715 parser.error('--subrepo doesn\'t work with --cloud') 733 parser.error('--subrepo doesn\'t work with --cloud')
716 734
717 subrepo = args.subrepo or _SRC_ROOT 735 subrepo = args.subrepo or _SRC_ROOT
718 _EnsureDirectoryClean(subrepo) 736 _EnsureDirectoryClean(subrepo)
719 _SetRestoreFunc(subrepo) 737 _SetRestoreFunc(subrepo)
720 revs = _GenerateRevList(args.rev, 738 if build.IsLinux():
721 args.reference_rev or args.rev + '^', 739 _VerifyUserAccepts('Linux diffs are less useful than Android diffs, '
agrieve 2017/05/02 00:16:52 nit: Worth being more specific here I think. Maybe
estevenson 2017/05/02 16:00:10 Done.
722 args.all, 740 'you should probably be using android.')
723 subrepo) 741
742 rev, reference_rev = _ValidateRevs(
743 args.rev, args.reference_rev or args.rev + '^', subrepo)
744 revs = _GenerateRevList(rev, reference_rev, args.all, subrepo)
724 with _TmpCopyBinarySizeDir() as supersize_path: 745 with _TmpCopyBinarySizeDir() as supersize_path:
725 diffs = [NativeDiff(build.size_name, supersize_path)] 746 diffs = [NativeDiff(build.size_name, supersize_path)]
726 if build.IsAndroid(): 747 if build.IsAndroid():
727 diffs += [ 748 diffs += [
728 ResourceSizesDiff( 749 ResourceSizesDiff(
729 build.apk_name, slow_options=args.include_slow_options) 750 build.apk_name, slow_options=args.include_slow_options)
730 ] 751 ]
731 diff_mngr = _DiffArchiveManager( 752 diff_mngr = _DiffArchiveManager(
732 revs, args.archive_directory, diffs, build, subrepo) 753 revs, args.archive_directory, diffs, build, subrepo)
733 consecutive_failures = 0 754 consecutive_failures = 0
(...skipping 19 matching lines...) Expand all
753 if i != 0: 774 if i != 0:
754 diff_mngr.MaybeDiff(i - 1, i) 775 diff_mngr.MaybeDiff(i - 1, i)
755 776
756 diff_mngr.Summarize() 777 diff_mngr.Summarize()
757 778
758 _global_restore_checkout_func() 779 _global_restore_checkout_func()
759 780
760 if __name__ == '__main__': 781 if __name__ == '__main__':
761 sys.exit(main()) 782 sys.exit(main())
762 783
OLDNEW
« no previous file with comments | « tools/binary_size/README.md ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698