Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(832)

Side by Side Diff: tools/binary_size/diagnose_bloat.py

Issue 2857073002: diagnose_bloat.py: update logging and general clean up. (Closed)
Patch Set: --silent help, metadata and step string Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/binary_size/diagnose_apk_bloat.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2017 The Chromium Authors. All rights reserved. 2 # Copyright 2017 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Tool for finding the cause of APK bloat. 6 """Tool for finding the cause of APK bloat.
7 7
8 Run diagnose_apk_bloat.py -h for detailed usage help. 8 Run diagnose_apk_bloat.py -h for detailed usage help.
9 """ 9 """
10 10
11 import atexit
11 import argparse 12 import argparse
12 import collections 13 import collections
13 from contextlib import contextmanager 14 from contextlib import contextmanager
14 import distutils.spawn 15 import distutils.spawn
15 import json 16 import json
17 import logging
16 import multiprocessing 18 import multiprocessing
17 import os 19 import os
18 import re 20 import re
19 import shutil 21 import shutil
20 import subprocess 22 import subprocess
21 import sys 23 import sys
22 import tempfile 24 import tempfile
23 import zipfile 25 import zipfile
24 26
25 _COMMIT_COUNT_WARN_THRESHOLD = 15 27 _COMMIT_COUNT_WARN_THRESHOLD = 15
26 _ALLOWED_CONSECUTIVE_FAILURES = 2 28 _ALLOWED_CONSECUTIVE_FAILURES = 2
27 _DIFF_DETAILS_LINES_THRESHOLD = 100 29 _DIFF_DETAILS_LINES_THRESHOLD = 100
28 _BUILDER_URL = \ 30 _BUILDER_URL = \
29 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder' 31 'https://build.chromium.org/p/chromium.perf/builders/Android%20Builder'
30 _SRC_ROOT = os.path.abspath( 32 _SRC_ROOT = os.path.abspath(
31 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) 33 os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
32 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat') 34 _DEFAULT_ARCHIVE_DIR = os.path.join(_SRC_ROOT, 'binary-size-bloat')
33 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat') 35 _DEFAULT_OUT_DIR = os.path.join(_SRC_ROOT, 'out', 'diagnose-apk-bloat')
34 _DEFAULT_ANDROID_TARGET = 'monochrome_public_apk' 36 _DEFAULT_ANDROID_TARGET = 'monochrome_public_apk'
35 37
36 _global_restore_checkout_func = None
37
38
39 def _SetRestoreFunc(subrepo):
40 branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD'], subrepo)
41 global _global_restore_checkout_func
42 _global_restore_checkout_func = lambda: _GitCmd(['checkout', branch], subrepo)
43
44 38
45 _DiffResult = collections.namedtuple('DiffResult', ['name', 'value', 'units']) 39 _DiffResult = collections.namedtuple('DiffResult', ['name', 'value', 'units'])
46 40
47 41
48 class BaseDiff(object): 42 class BaseDiff(object):
49 """Base class capturing binary size diffs.""" 43 """Base class capturing binary size diffs."""
50 def __init__(self, name): 44 def __init__(self, name):
51 self.name = name 45 self.name = name
52 self.banner = '\n' + '*' * 30 + name + '*' * 30 46 self.banner = '\n' + '*' * 30 + name + '*' * 30
53 47
(...skipping 15 matching lines...) Expand all
69 63
70 def DetailedResults(self): 64 def DetailedResults(self):
71 """An iterable description of the cause of binary size bloat.""" 65 """An iterable description of the cause of binary size bloat."""
72 raise NotImplementedError() 66 raise NotImplementedError()
73 67
74 def ProduceDiff(self, before_dir, after_dir): 68 def ProduceDiff(self, before_dir, after_dir):
75 """Prepare a binary size diff with ready to print results.""" 69 """Prepare a binary size diff with ready to print results."""
76 raise NotImplementedError() 70 raise NotImplementedError()
77 71
78 def RunDiff(self, logfile, before_dir, after_dir): 72 def RunDiff(self, logfile, before_dir, after_dir):
73 logging.info('Creating: %s', self.name)
79 self.ProduceDiff(before_dir, after_dir) 74 self.ProduceDiff(before_dir, after_dir)
80 self.AppendResults(logfile) 75 self.AppendResults(logfile)
81 76
82 77
83 class NativeDiff(BaseDiff): 78 class NativeDiff(BaseDiff):
84 _RE_SUMMARY = re.compile( 79 _RE_SUMMARY = re.compile(
85 r'.*(Section Sizes .*? object files added, \d+ removed).*', 80 r'.*(Section Sizes .*? object files added, \d+ removed).*',
86 flags=re.DOTALL) 81 flags=re.DOTALL)
87 _RE_SUMMARY_STAT = re.compile( 82 _RE_SUMMARY_STAT = re.compile(
88 r'Section Sizes \(Total=(?P<value>\d+) (?P<units>\w+)\)') 83 r'Section Sizes \(Total=(?P<value>\d+) (?P<units>\w+)\)')
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
143 def ProduceDiff(self, before_dir, after_dir): 138 def ProduceDiff(self, before_dir, after_dir):
144 before = self._RunResourceSizes(before_dir) 139 before = self._RunResourceSizes(before_dir)
145 after = self._RunResourceSizes(after_dir) 140 after = self._RunResourceSizes(after_dir)
146 diff = [] 141 diff = []
147 for section, section_dict in after.iteritems(): 142 for section, section_dict in after.iteritems():
148 for subsection, v in section_dict.iteritems(): 143 for subsection, v in section_dict.iteritems():
149 # Ignore entries when resource_sizes.py chartjson format has changed. 144 # Ignore entries when resource_sizes.py chartjson format has changed.
150 if (section not in before or 145 if (section not in before or
151 subsection not in before[section] or 146 subsection not in before[section] or
152 v['units'] != before[section][subsection]['units']): 147 v['units'] != before[section][subsection]['units']):
153 _Print('Found differing dict structures for resource_sizes.py, ' 148 logging.warning(
154 'skipping {} {}', section, subsection) 149 'Found differing dict structures for resource_sizes.py, '
150 'skipping %s %s', section, subsection)
155 else: 151 else:
156 diff.append( 152 diff.append(
157 _DiffResult( 153 _DiffResult(
158 '%s %s' % (section, subsection), 154 '%s %s' % (section, subsection),
159 v['value'] - before[section][subsection]['value'], 155 v['value'] - before[section][subsection]['value'],
160 v['units'])) 156 v['units']))
161 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True) 157 self._diff = sorted(diff, key=lambda x: abs(x.value), reverse=True)
162 158
163 def _RunResourceSizes(self, archive_dir): 159 def _RunResourceSizes(self, archive_dir):
164 apk_path = os.path.join(archive_dir, self._apk_name) 160 apk_path = os.path.join(archive_dir, self._apk_name)
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
259 255
260 def _GenNinjaCmd(self): 256 def _GenNinjaCmd(self):
261 cmd = ['ninja', '-C', self.output_directory] 257 cmd = ['ninja', '-C', self.output_directory]
262 cmd += ['-j', self.max_jobs] if self.max_jobs else [] 258 cmd += ['-j', self.max_jobs] if self.max_jobs else []
263 cmd += ['-l', self.max_load_average] if self.max_load_average else [] 259 cmd += ['-l', self.max_load_average] if self.max_load_average else []
264 cmd += [self.target] 260 cmd += [self.target]
265 return cmd 261 return cmd
266 262
267 def Run(self): 263 def Run(self):
268 """Run GN gen/ninja build and return the process returncode.""" 264 """Run GN gen/ninja build and return the process returncode."""
269 _Print('Building: {}.', self.target) 265 logging.info('Building: %s.', self.target)
270 retcode = _RunCmd( 266 retcode = _RunCmd(
271 self._GenGnCmd(), print_stdout=True, exit_on_failure=False)[1] 267 self._GenGnCmd(), verbose=True, exit_on_failure=False)[1]
272 if retcode: 268 if retcode:
273 return retcode 269 return retcode
274 return _RunCmd( 270 return _RunCmd(
275 self._GenNinjaCmd(), print_stdout=True, exit_on_failure=False)[1] 271 self._GenNinjaCmd(), verbose=True, exit_on_failure=False)[1]
276 272
277 def DownloadUrl(self, rev): 273 def DownloadUrl(self, rev):
278 return self.download_bucket + 'full-build-linux_%s.zip' % rev 274 return self.download_bucket + 'full-build-linux_%s.zip' % rev
279 275
280 def IsAndroid(self): 276 def IsAndroid(self):
281 return self.target_os == 'android' 277 return self.target_os == 'android'
282 278
283 def IsLinux(self): 279 def IsLinux(self):
284 return self.target_os == 'linux' 280 return self.target_os == 'linux'
285 281
286 def IsCloud(self): 282 def IsCloud(self):
287 return self.cloud 283 return self.cloud
288 284
289 285
290 class _BuildArchive(object): 286 class _BuildArchive(object):
291 """Class for managing a directory with build results and build metadata.""" 287 """Class for managing a directory with build results and build metadata."""
292 def __init__(self, rev, base_archive_dir, build, subrepo): 288 def __init__(self, rev, base_archive_dir, build, subrepo):
293 self.build = build 289 self.build = build
294 self.dir = os.path.join(base_archive_dir, rev) 290 self.dir = os.path.join(base_archive_dir, rev)
295 metadata_path = os.path.join(self.dir, 'metadata.txt') 291 metadata_path = os.path.join(self.dir, 'metadata.txt')
296 self.rev = rev 292 self.rev = rev
297 self.metadata = _GenerateMetadata([self], build, metadata_path, subrepo) 293 self.metadata = _Metadata([self], build, metadata_path, subrepo)
298 294
299 def ArchiveBuildResults(self, supersize_path): 295 def ArchiveBuildResults(self, supersize_path):
300 """Save build artifacts necessary for diffing.""" 296 """Save build artifacts necessary for diffing."""
301 _Print('Saving build results to: {}', self.dir) 297 logging.info('Saving build results to: %s', self.dir)
302 _EnsureDirsExist(self.dir) 298 _EnsureDirsExist(self.dir)
303 build = self.build 299 build = self.build
304 self._ArchiveFile(build.abs_main_lib_path) 300 self._ArchiveFile(build.abs_main_lib_path)
305 tool_prefix = _FindToolPrefix(build.output_directory) 301 tool_prefix = _FindToolPrefix(build.output_directory)
306 size_path = os.path.join(self.dir, build.size_name) 302 size_path = os.path.join(self.dir, build.size_name)
307 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file', 303 supersize_cmd = [supersize_path, 'archive', size_path, '--elf-file',
308 build.abs_main_lib_path, '--tool-prefix', tool_prefix, 304 build.abs_main_lib_path, '--tool-prefix', tool_prefix,
309 '--output-directory', build.output_directory, 305 '--output-directory', build.output_directory,
310 '--no-source-paths'] 306 '--no-source-paths']
311 if build.IsAndroid(): 307 if build.IsAndroid():
312 supersize_cmd += ['--apk-file', build.abs_apk_path] 308 supersize_cmd += ['--apk-file', build.abs_apk_path]
313 self._ArchiveFile(build.abs_apk_path) 309 self._ArchiveFile(build.abs_apk_path)
314 310
311 logging.info('Creating .size file')
315 _RunCmd(supersize_cmd) 312 _RunCmd(supersize_cmd)
316 _WriteMetadata(self.metadata) 313 self.metadata.Write()
317 314
318 def Exists(self): 315 def Exists(self):
319 return _MetadataExists(self.metadata) 316 return self.metadata.Exists()
320 317
321 def _ArchiveFile(self, filename): 318 def _ArchiveFile(self, filename):
322 if not os.path.exists(filename): 319 if not os.path.exists(filename):
323 _Die('missing expected file: {}', filename) 320 _Die('missing expected file: %s', filename)
324 shutil.copy(filename, self.dir) 321 shutil.copy(filename, self.dir)
325 322
326 323
327 class _DiffArchiveManager(object): 324 class _DiffArchiveManager(object):
328 """Class for maintaining BuildArchives and their related diff artifacts.""" 325 """Class for maintaining BuildArchives and their related diff artifacts."""
329 def __init__(self, revs, archive_dir, diffs, build, subrepo): 326 def __init__(self, revs, archive_dir, diffs, build, subrepo):
330 self.archive_dir = archive_dir 327 self.archive_dir = archive_dir
331 self.build = build 328 self.build = build
332 self.build_archives = [_BuildArchive(rev, archive_dir, build, subrepo) 329 self.build_archives = [_BuildArchive(rev, archive_dir, build, subrepo)
333 for rev in revs] 330 for rev in revs]
334 self.diffs = diffs 331 self.diffs = diffs
335 self.subrepo = subrepo 332 self.subrepo = subrepo
336 self._summary_stats = [] 333 self._summary_stats = []
337 334
338 def IterArchives(self): 335 def IterArchives(self):
339 return iter(self.build_archives) 336 return iter(self.build_archives)
340 337
341 def MaybeDiff(self, before_id, after_id): 338 def MaybeDiff(self, before_id, after_id):
342 """Perform diffs given two build archives.""" 339 """Perform diffs given two build archives."""
343 before = self.build_archives[before_id] 340 before = self.build_archives[before_id]
344 after = self.build_archives[after_id] 341 after = self.build_archives[after_id]
345 diff_path = self._DiffFilePath(before, after) 342 diff_path = self._DiffFilePath(before, after)
346 if not self._CanDiff(before, after): 343 if not self._CanDiff(before, after):
347 _Print('Skipping diff for {} due to missing build archives.', diff_path) 344 logging.info(
345 'Skipping diff for %s due to missing build archives.', diff_path)
348 return 346 return
349 347
350 metadata_path = self._DiffMetadataPath(before, after) 348 metadata_path = self._DiffMetadataPath(before, after)
351 metadata = _GenerateMetadata( 349 metadata = _Metadata(
352 [before, after], self.build, metadata_path, self.subrepo) 350 [before, after], self.build, metadata_path, self.subrepo)
353 if _MetadataExists(metadata): 351 if metadata.Exists():
354 _Print('Skipping diff for {} and {}. Matching diff already exists: {}', 352 logging.info(
355 before.rev, after.rev, diff_path) 353 'Skipping diff for %s and %s. Matching diff already exists: %s',
354 before.rev, after.rev, diff_path)
356 else: 355 else:
357 if os.path.exists(diff_path): 356 if os.path.exists(diff_path):
358 os.remove(diff_path) 357 os.remove(diff_path)
359 with open(diff_path, 'a') as diff_file: 358 with open(diff_path, 'a') as diff_file:
360 for d in self.diffs: 359 for d in self.diffs:
361 d.RunDiff(diff_file, before.dir, after.dir) 360 d.RunDiff(diff_file, before.dir, after.dir)
362 _Print('\nSee detailed diff results here: {}.', diff_path) 361 logging.info('See detailed diff results here: %s.', diff_path)
363 _WriteMetadata(metadata) 362 metadata.Write()
364 self._AddDiffSummaryStat(before, after) 363 self._AddDiffSummaryStat(before, after)
365 364
366 def Summarize(self): 365 def Summarize(self):
367 if self._summary_stats: 366 if self._summary_stats:
368 path = os.path.join(self.archive_dir, 'last_diff_summary.txt') 367 path = os.path.join(self.archive_dir, 'last_diff_summary.txt')
369 with open(path, 'w') as f: 368 with open(path, 'w') as f:
370 stats = sorted( 369 stats = sorted(
371 self._summary_stats, key=lambda x: x[0].value, reverse=True) 370 self._summary_stats, key=lambda x: x[0].value, reverse=True)
372 _PrintAndWriteToFile(f, '\nDiff Summary') 371 _PrintAndWriteToFile(f, '\nDiff Summary')
373 for s, before, after in stats: 372 for s, before, after in stats:
(...skipping 21 matching lines...) Expand all
395 def _DiffMetadataPath(self, before, after): 394 def _DiffMetadataPath(self, before, after):
396 return os.path.join(self._DiffDir(before, after), 'metadata.txt') 395 return os.path.join(self._DiffDir(before, after), 'metadata.txt')
397 396
398 def _DiffDir(self, before, after): 397 def _DiffDir(self, before, after):
399 archive_range = '%s..%s' % (before.rev, after.rev) 398 archive_range = '%s..%s' % (before.rev, after.rev)
400 diff_path = os.path.join(self.archive_dir, 'diffs', archive_range) 399 diff_path = os.path.join(self.archive_dir, 'diffs', archive_range)
401 _EnsureDirsExist(diff_path) 400 _EnsureDirsExist(diff_path)
402 return diff_path 401 return diff_path
403 402
404 403
404 class _Metadata(object):
405
406 def __init__(self, archives, build, path, subrepo):
407 self.is_cloud = build.IsCloud()
408 self.data = {
409 'revs': [a.rev for a in archives],
410 'archive_dirs': [a.dir for a in archives],
411 'target': build.target,
412 'target_os': build.target_os,
413 'is_cloud': build.IsCloud(),
414 'subrepo': subrepo,
415 'path': path,
416 'gn_args': {
417 'extra_gn_args_str': build.extra_gn_args_str,
418 'enable_chrome_android_internal': build.enable_chrome_android_internal,
419 }
420 }
421
422 def Exists(self):
423 old_metadata = {}
424 path = self.data['path']
425 if os.path.exists(path):
426 with open(path, 'r') as f:
427 old_metadata = json.load(f)
428 # For local builds, all keys need to be the same. Differing GN args will
429 # make diffs noisy and inaccurate. GN args do not matter for --cloud
430 # since we download prebuilt build artifacts.
431 keys = self.data.keys()
432 if self.is_cloud:
433 keys.remove('gn_args')
434 return all(v == old_metadata[k]
435 for k, v in self.data.iteritems() if k in keys)
436
437 def Write(self):
438 with open(self.data['path'], 'w') as f:
439 json.dump(self.data, f)
440
441
405 def _EnsureDirsExist(path): 442 def _EnsureDirsExist(path):
406 if not os.path.exists(path): 443 if not os.path.exists(path):
407 os.makedirs(path) 444 os.makedirs(path)
408 445
409 446
410 def _GenerateMetadata(archives, build, path, subrepo): 447 def _RunCmd(cmd, verbose=False, exit_on_failure=True):
411 return {
412 'revs': [a.rev for a in archives],
413 'archive_dirs': [a.dir for a in archives],
414 'target': build.target,
415 'target_os': build.target_os,
416 'is_cloud': build.IsCloud(),
417 'subrepo': subrepo,
418 'path': path,
419 'gn_args': {
420 'extra_gn_args_str': build.extra_gn_args_str,
421 'enable_chrome_android_internal': build.enable_chrome_android_internal,
422 }
423 }
424
425
426 def _WriteMetadata(metadata):
427 with open(metadata['path'], 'w') as f:
428 json.dump(metadata, f)
429
430
431 def _MetadataExists(metadata):
432 old_metadata = {}
433 path = metadata['path']
434 if os.path.exists(path):
435 with open(path, 'r') as f:
436 old_metadata = json.load(f)
437 ret = len(metadata) == len(old_metadata)
438 ret &= all(v == old_metadata[k]
439 for k, v in metadata.items() if k != 'gn_args')
440 # GN args don't matter when artifacts are downloaded. For local builds
441 # they need to be the same so that diffs are accurate (differing GN args
442 # will change the final APK/native library).
443 if not metadata['is_cloud']:
444 ret &= metadata['gn_args'] == old_metadata['gn_args']
445 return ret
446 return False
447
448
449 def _RunCmd(cmd, print_stdout=False, exit_on_failure=True):
450 """Convenience function for running commands. 448 """Convenience function for running commands.
451 449
452 Args: 450 Args:
453 cmd: the command to run. 451 cmd: the command to run.
454 print_stdout: if this is True, then the stdout of the process will be 452 verbose: if this is True, then the stdout and stderr of the process will be
455 printed instead of returned. 453 printed. If it's false, the stdout will be returned.
456 exit_on_failure: die if an error occurs when this is True. 454 exit_on_failure: die if an error occurs when this is True.
457 455
458 Returns: 456 Returns:
459 Tuple of (process stdout, process returncode). 457 Tuple of (process stdout, process returncode).
460 """ 458 """
459 assert not (verbose and exit_on_failure)
461 cmd_str = ' '.join(c for c in cmd) 460 cmd_str = ' '.join(c for c in cmd)
462 _Print('Running: {}', cmd_str) 461 logging.debug('Running: %s', cmd_str)
463 proc_stdout = sys.stdout if print_stdout else subprocess.PIPE 462 proc_stdout = proc_stderr = subprocess.PIPE
463 if verbose and logging.getLogger().getEffectiveLevel() < logging.INFO:
464 proc_stdout, proc_stderr = sys.stdout, subprocess.STDOUT
464 465
465 proc = subprocess.Popen(cmd, stdout=proc_stdout, stderr=subprocess.PIPE) 466 proc = subprocess.Popen(cmd, stdout=proc_stdout, stderr=proc_stderr)
466 stdout, stderr = proc.communicate() 467 stdout, stderr = proc.communicate()
467 468
468 if proc.returncode and exit_on_failure: 469 if proc.returncode and exit_on_failure:
469 _Die('command failed: {}\nstderr:\n{}', cmd_str, stderr) 470 _Die('command failed: %s\nstderr:\n%s', cmd_str, stderr)
470 471
471 stdout = stdout.strip() if stdout else '' 472 stdout = stdout.strip() if stdout else ''
472 return stdout, proc.returncode 473 return stdout, proc.returncode
473 474
474 475
475 def _GitCmd(args, subrepo): 476 def _GitCmd(args, subrepo):
476 return _RunCmd(['git', '-C', subrepo] + args)[0] 477 return _RunCmd(['git', '-C', subrepo] + args)[0]
477 478
478 479
479 def _GclientSyncCmd(rev, subrepo): 480 def _GclientSyncCmd(rev, subrepo):
480 cwd = os.getcwd() 481 cwd = os.getcwd()
481 os.chdir(subrepo) 482 os.chdir(subrepo)
482 _RunCmd(['gclient', 'sync', '-r', 'src@' + rev], print_stdout=True) 483 _, retcode = _RunCmd(['gclient', 'sync', '-r', 'src@' + rev],
484 verbose=True, exit_on_failure=False)
483 os.chdir(cwd) 485 os.chdir(cwd)
486 return retcode
484 487
485 488
486 def _FindToolPrefix(output_directory): 489 def _FindToolPrefix(output_directory):
487 build_vars_path = os.path.join(output_directory, 'build_vars.txt') 490 build_vars_path = os.path.join(output_directory, 'build_vars.txt')
488 if os.path.exists(build_vars_path): 491 if os.path.exists(build_vars_path):
489 with open(build_vars_path) as f: 492 with open(build_vars_path) as f:
490 build_vars = dict(l.rstrip().split('=', 1) for l in f if '=' in l) 493 build_vars = dict(l.rstrip().split('=', 1) for l in f if '=' in l)
491 # Tool prefix is relative to output dir, rebase to source root. 494 # Tool prefix is relative to output dir, rebase to source root.
492 tool_prefix = build_vars['android_tool_prefix'] 495 tool_prefix = build_vars['android_tool_prefix']
493 while os.path.sep in tool_prefix: 496 while os.path.sep in tool_prefix:
494 rebased_tool_prefix = os.path.join(_SRC_ROOT, tool_prefix) 497 rebased_tool_prefix = os.path.join(_SRC_ROOT, tool_prefix)
495 if os.path.exists(rebased_tool_prefix + 'readelf'): 498 if os.path.exists(rebased_tool_prefix + 'readelf'):
496 return rebased_tool_prefix 499 return rebased_tool_prefix
497 tool_prefix = tool_prefix[tool_prefix.find(os.path.sep) + 1:] 500 tool_prefix = tool_prefix[tool_prefix.find(os.path.sep) + 1:]
498 return '' 501 return ''
499 502
500 503
501 def _SyncAndBuild(archive, build, subrepo): 504 def _SyncAndBuild(archive, build, subrepo):
505 """Sync, build and return non 0 if any commands failed."""
502 # Simply do a checkout if subrepo is used. 506 # Simply do a checkout if subrepo is used.
503 if subrepo != _SRC_ROOT: 507 if subrepo != _SRC_ROOT:
504 _GitCmd(['checkout', archive.rev], subrepo) 508 _GitCmd(['checkout', archive.rev], subrepo)
509 return 0
505 else: 510 else:
506 # Move to a detached state since gclient sync doesn't work with local 511 # Move to a detached state since gclient sync doesn't work with local
507 # commits on a branch. 512 # commits on a branch.
508 _GitCmd(['checkout', '--detach'], subrepo) 513 _GitCmd(['checkout', '--detach'], subrepo)
509 _GclientSyncCmd(archive.rev, subrepo) 514 logging.info('Syncing to %s', archive.rev)
510 retcode = build.Run() 515 return _GclientSyncCmd(archive.rev, subrepo) or build.Run()
511 return retcode == 0
512 516
513 517
514 def _GenerateRevList(rev, reference_rev, all_in_range, subrepo): 518 def _GenerateRevList(rev, reference_rev, all_in_range, subrepo):
515 """Normalize and optionally generate a list of commits in the given range. 519 """Normalize and optionally generate a list of commits in the given range.
516 520
517 Returns: 521 Returns:
518 A list of revisions ordered from oldest to newest. 522 A list of revisions ordered from oldest to newest.
519 """ 523 """
520 rev_seq = '%s^..%s' % (reference_rev, rev) 524 rev_seq = '%s^..%s' % (reference_rev, rev)
521 stdout = _GitCmd(['rev-list', rev_seq], subrepo) 525 stdout = _GitCmd(['rev-list', rev_seq], subrepo)
(...skipping 21 matching lines...) Expand all
543 no_obj_message = ('%s either doesn\'t exist or your local repo is out of ' 547 no_obj_message = ('%s either doesn\'t exist or your local repo is out of '
544 'date, try "git fetch origin master"') 548 'date, try "git fetch origin master"')
545 git_fatal(['cat-file', '-e', rev], no_obj_message % rev) 549 git_fatal(['cat-file', '-e', rev], no_obj_message % rev)
546 git_fatal(['cat-file', '-e', reference_rev], no_obj_message % reference_rev) 550 git_fatal(['cat-file', '-e', reference_rev], no_obj_message % reference_rev)
547 git_fatal(['merge-base', '--is-ancestor', reference_rev, rev], 551 git_fatal(['merge-base', '--is-ancestor', reference_rev, rev],
548 'reference-rev is newer than rev') 552 'reference-rev is newer than rev')
549 return rev, reference_rev 553 return rev, reference_rev
550 554
551 555
552 def _VerifyUserAccepts(message): 556 def _VerifyUserAccepts(message):
553 _Print(message + 'Do you want to proceed? [y/n]') 557 print message + 'Do you want to proceed? [y/n]'
554 if raw_input('> ').lower() != 'y': 558 if raw_input('> ').lower() != 'y':
555 _global_restore_checkout_func()
556 sys.exit() 559 sys.exit()
557 560
558 561
559 def _EnsureDirectoryClean(subrepo): 562 def _EnsureDirectoryClean(subrepo):
560 _Print('Checking source directory') 563 logging.info('Checking source directory')
561 stdout = _GitCmd(['status', '--porcelain'], subrepo) 564 stdout = _GitCmd(['status', '--porcelain'], subrepo)
562 # Ignore untracked files. 565 # Ignore untracked files.
563 if stdout and stdout[:2] != '??': 566 if stdout and stdout[:2] != '??':
564 _Print('Failure: please ensure working directory is clean.') 567 logging.error('Failure: please ensure working directory is clean.')
565 sys.exit() 568 sys.exit()
566 569
567 570
568 def _Die(s, *args, **kwargs): 571 def _Die(s, *args):
569 _Print('Failure: ' + s, *args, **kwargs) 572 logging.error('Failure: ' + s, *args)
570 _global_restore_checkout_func()
571 sys.exit(1) 573 sys.exit(1)
572 574
573 575
574 def _DownloadBuildArtifacts(archive, build, supersize_path, depot_tools_path): 576 def _DownloadBuildArtifacts(archive, build, supersize_path, depot_tools_path):
575 """Download artifacts from arm32 chromium perf builder.""" 577 """Download artifacts from arm32 chromium perf builder."""
576 if depot_tools_path: 578 if depot_tools_path:
577 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py') 579 gsutil_path = os.path.join(depot_tools_path, 'gsutil.py')
578 else: 580 else:
579 gsutil_path = distutils.spawn.find_executable('gsutil.py') 581 gsutil_path = distutils.spawn.find_executable('gsutil.py')
580 582
581 if not gsutil_path: 583 if not gsutil_path:
582 _Die('gsutil.py not found, please provide path to depot_tools via ' 584 _Die('gsutil.py not found, please provide path to depot_tools via '
583 '--depot-tools-path or add it to your PATH') 585 '--depot-tools-path or add it to your PATH')
584 586
585 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT) 587 download_dir = tempfile.mkdtemp(dir=_SRC_ROOT)
586 try: 588 try:
587 _DownloadAndArchive( 589 _DownloadAndArchive(
588 gsutil_path, archive, download_dir, build, supersize_path) 590 gsutil_path, archive, download_dir, build, supersize_path)
589 finally: 591 finally:
590 shutil.rmtree(download_dir) 592 shutil.rmtree(download_dir)
591 593
592 594
593 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path): 595 def _DownloadAndArchive(gsutil_path, archive, dl_dir, build, supersize_path):
594 dl_dst = os.path.join(dl_dir, archive.rev) 596 dl_dst = os.path.join(dl_dir, archive.rev)
595 _Print('Downloading build artifacts for {}', archive.rev) 597 logging.info('Downloading build artifacts for %s', archive.rev)
596 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to 598 # gsutil writes stdout and stderr to stderr, so pipe stdout and stderr to
597 # sys.stdout. 599 # sys.stdout.
598 retcode = subprocess.call( 600 retcode = subprocess.call(
599 [gsutil_path, 'cp', build.DownloadUrl(archive.rev), dl_dst], 601 [gsutil_path, 'cp', build.DownloadUrl(archive.rev), dl_dst],
600 stdout=sys.stdout, stderr=subprocess.STDOUT) 602 stdout=sys.stdout, stderr=subprocess.STDOUT)
601 if retcode: 603 if retcode:
602 _Die('unexpected error while downloading {}. It may no longer exist on ' 604 _Die('unexpected error while downloading %s. It may no longer exist on '
603 'the server or it may not have been uploaded yet (check {}). ' 605 'the server or it may not have been uploaded yet (check %s). '
604 'Otherwise, you may not have the correct access permissions.', 606 'Otherwise, you may not have the correct access permissions.',
605 build.DownloadUrl(archive.rev), _BUILDER_URL) 607 build.DownloadUrl(archive.rev), _BUILDER_URL)
606 608
607 # Files needed for supersize and resource_sizes. Paths relative to out dir. 609 # Files needed for supersize and resource_sizes. Paths relative to out dir.
608 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn'] 610 to_extract = [build.main_lib_path, build.map_file_path, 'args.gn']
609 if build.IsAndroid(): 611 if build.IsAndroid():
610 to_extract += ['build_vars.txt', build.apk_path] 612 to_extract += ['build_vars.txt', build.apk_path]
611 extract_dir = dl_dst + '_' + 'unzipped' 613 extract_dir = dl_dst + '_' + 'unzipped'
612 # Storage bucket stores entire output directory including out/Release prefix. 614 # Storage bucket stores entire output directory including out/Release prefix.
613 _Print('Extracting build artifacts') 615 logging.info('Extracting build artifacts')
614 with zipfile.ZipFile(dl_dst, 'r') as z: 616 with zipfile.ZipFile(dl_dst, 'r') as z:
615 _ExtractFiles(to_extract, build.download_output_dir, extract_dir, z) 617 _ExtractFiles(to_extract, build.download_output_dir, extract_dir, z)
616 dl_out = os.path.join(extract_dir, build.download_output_dir) 618 dl_out = os.path.join(extract_dir, build.download_output_dir)
617 build.output_directory, output_directory = dl_out, build.output_directory 619 build.output_directory, output_directory = dl_out, build.output_directory
618 archive.ArchiveBuildResults(supersize_path) 620 archive.ArchiveBuildResults(supersize_path)
619 build.output_directory = output_directory 621 build.output_directory = output_directory
620 622
621 623
622 def _ExtractFiles(to_extract, prefix, dst, z): 624 def _ExtractFiles(to_extract, prefix, dst, z):
623 zip_infos = z.infolist() 625 zip_infos = z.infolist()
624 assert all(info.filename.startswith(prefix) for info in zip_infos), ( 626 assert all(info.filename.startswith(prefix) for info in zip_infos), (
625 'Storage bucket folder structure doesn\'t start with %s' % prefix) 627 'Storage bucket folder structure doesn\'t start with %s' % prefix)
626 to_extract = [os.path.join(prefix, f) for f in to_extract] 628 to_extract = [os.path.join(prefix, f) for f in to_extract]
627 for f in to_extract: 629 for f in to_extract:
628 z.extract(f, path=dst) 630 z.extract(f, path=dst)
629 631
630 632
631 def _Print(s, *args, **kwargs):
632 print s.format(*args, **kwargs)
633
634
635 def _PrintAndWriteToFile(logfile, s, *args, **kwargs): 633 def _PrintAndWriteToFile(logfile, s, *args, **kwargs):
636 """Write and print |s| thottling output if |s| is a large list.""" 634 """Write and print |s| thottling output if |s| is a large list."""
637 if isinstance(s, basestring): 635 if isinstance(s, basestring):
638 s = s.format(*args, **kwargs) 636 s = s.format(*args, **kwargs)
639 _Print(s) 637 print s
640 logfile.write('%s\n' % s) 638 logfile.write('%s\n' % s)
641 else: 639 else:
642 for l in s[:_DIFF_DETAILS_LINES_THRESHOLD]: 640 for l in s[:_DIFF_DETAILS_LINES_THRESHOLD]:
643 _Print(l) 641 print l
644 if len(s) > _DIFF_DETAILS_LINES_THRESHOLD: 642 if len(s) > _DIFF_DETAILS_LINES_THRESHOLD:
645 _Print('\nOutput truncated, see {} for more.', logfile.name) 643 print '\nOutput truncated, see %s for more.' % logfile.name
646 logfile.write('\n'.join(s)) 644 logfile.write('\n'.join(s))
647 645
648 646
649 @contextmanager 647 @contextmanager
650 def _TmpCopyBinarySizeDir(): 648 def _TmpCopyBinarySizeDir():
651 """Recursively copy files to a temp dir and yield supersize path.""" 649 """Recursively copy files to a temp dir and yield supersize path."""
652 # Needs to be at same level of nesting as the real //tools/binary_size 650 # Needs to be at same level of nesting as the real //tools/binary_size
653 # since supersize uses this to find d3 in //third_party. 651 # since supersize uses this to find d3 in //third_party.
654 tmp_dir = tempfile.mkdtemp(dir=_SRC_ROOT) 652 tmp_dir = tempfile.mkdtemp(dir=_SRC_ROOT)
655 try: 653 try:
656 bs_dir = os.path.join(tmp_dir, 'binary_size') 654 bs_dir = os.path.join(tmp_dir, 'binary_size')
657 shutil.copytree(os.path.join(_SRC_ROOT, 'tools', 'binary_size'), bs_dir) 655 shutil.copytree(os.path.join(_SRC_ROOT, 'tools', 'binary_size'), bs_dir)
658 yield os.path.join(bs_dir, 'supersize') 656 yield os.path.join(bs_dir, 'supersize')
659 finally: 657 finally:
660 shutil.rmtree(tmp_dir) 658 shutil.rmtree(tmp_dir)
661 659
662 660
661 def _SetRestoreFunc(subrepo):
662 branch = _GitCmd(['rev-parse', '--abbrev-ref', 'HEAD'], subrepo)
663 atexit.register(lambda: _GitCmd(['checkout', branch], subrepo))
664
665
663 def main(): 666 def main():
664 parser = argparse.ArgumentParser( 667 parser = argparse.ArgumentParser(
665 description='Find the cause of APK size bloat.') 668 description='Find the cause of APK size bloat.')
666 parser.add_argument('--archive-directory', 669 parser.add_argument('--archive-directory',
667 default=_DEFAULT_ARCHIVE_DIR, 670 default=_DEFAULT_ARCHIVE_DIR,
668 help='Where results are stored.') 671 help='Where results are stored.')
669 parser.add_argument('rev', 672 parser.add_argument('rev',
670 help='Find binary size bloat for this commit.') 673 help='Find binary size bloat for this commit.')
671 parser.add_argument('--reference-rev', 674 parser.add_argument('--reference-rev',
672 help='Older rev to diff against. If not supplied, ' 675 help='Older rev to diff against. If not supplied, '
(...skipping 12 matching lines...) Expand all
685 help='Download build artifacts from perf builders ' 688 help='Download build artifacts from perf builders '
686 '(Android only, Googlers only).') 689 '(Android only, Googlers only).')
687 parser.add_argument('--depot-tools-path', 690 parser.add_argument('--depot-tools-path',
688 help='Custom path to depot tools. Needed for --cloud if ' 691 help='Custom path to depot tools. Needed for --cloud if '
689 'depot tools isn\'t in your PATH.') 692 'depot tools isn\'t in your PATH.')
690 parser.add_argument('--subrepo', 693 parser.add_argument('--subrepo',
691 help='Specify a subrepo directory to use. Gclient sync ' 694 help='Specify a subrepo directory to use. Gclient sync '
692 'will be skipped if this option is used and all git ' 695 'will be skipped if this option is used and all git '
693 'commands will be executed from the subrepo directory. ' 696 'commands will be executed from the subrepo directory. '
694 'This option doesn\'t work with --cloud.') 697 'This option doesn\'t work with --cloud.')
698 parser.add_argument('--silent',
699 action='store_true',
700 help='Less logging, no Ninja/GN output.')
695 701
696 build_group = parser.add_argument_group('ninja', 'Args to use with ninja/gn') 702 build_group = parser.add_argument_group('ninja', 'Args to use with ninja/gn')
697 build_group.add_argument('-j', 703 build_group.add_argument('-j',
698 dest='max_jobs', 704 dest='max_jobs',
699 help='Run N jobs in parallel.') 705 help='Run N jobs in parallel.')
700 build_group.add_argument('-l', 706 build_group.add_argument('-l',
701 dest='max_load_average', 707 dest='max_load_average',
702 help='Do not start new jobs if the load average is ' 708 help='Do not start new jobs if the load average is '
703 'greater than N.') 709 'greater than N.')
704 build_group.add_argument('--no-goma', 710 build_group.add_argument('--no-goma',
(...skipping 13 matching lines...) Expand all
718 action='store_true', 724 action='store_true',
719 help='Allow downstream targets to be built.') 725 help='Allow downstream targets to be built.')
720 build_group.add_argument('--target', 726 build_group.add_argument('--target',
721 default=_DEFAULT_ANDROID_TARGET, 727 default=_DEFAULT_ANDROID_TARGET,
722 help='GN APK target to build. Ignored for Linux. ' 728 help='GN APK target to build. Ignored for Linux. '
723 'Default %s.' % _DEFAULT_ANDROID_TARGET) 729 'Default %s.' % _DEFAULT_ANDROID_TARGET)
724 if len(sys.argv) == 1: 730 if len(sys.argv) == 1:
725 parser.print_help() 731 parser.print_help()
726 sys.exit() 732 sys.exit()
727 args = parser.parse_args() 733 args = parser.parse_args()
734 log_level = logging.INFO if args.silent else logging.DEBUG
735 logging.basicConfig(level=log_level,
736 format='%(levelname).1s %(relativeCreated)6d %(message)s')
728 build = _BuildHelper(args) 737 build = _BuildHelper(args)
729 if build.IsCloud() and args.subrepo: 738 if build.IsCloud() and args.subrepo:
730 parser.error('--subrepo doesn\'t work with --cloud') 739 parser.error('--subrepo doesn\'t work with --cloud')
731 740
732 subrepo = args.subrepo or _SRC_ROOT 741 subrepo = args.subrepo or _SRC_ROOT
733 _EnsureDirectoryClean(subrepo) 742 _EnsureDirectoryClean(subrepo)
734 _SetRestoreFunc(subrepo) 743 _SetRestoreFunc(subrepo)
735 if build.IsLinux(): 744 if build.IsLinux():
736 _VerifyUserAccepts('Linux diffs have known deficiencies (crbug/717550).') 745 _VerifyUserAccepts('Linux diffs have known deficiencies (crbug/717550).')
737 746
738 rev, reference_rev = _ValidateRevs( 747 rev, reference_rev = _ValidateRevs(
739 args.rev, args.reference_rev or args.rev + '^', subrepo) 748 args.rev, args.reference_rev or args.rev + '^', subrepo)
740 revs = _GenerateRevList(rev, reference_rev, args.all, subrepo) 749 revs = _GenerateRevList(rev, reference_rev, args.all, subrepo)
741 with _TmpCopyBinarySizeDir() as supersize_path: 750 with _TmpCopyBinarySizeDir() as supersize_path:
742 diffs = [NativeDiff(build.size_name, supersize_path)] 751 diffs = [NativeDiff(build.size_name, supersize_path)]
743 if build.IsAndroid(): 752 if build.IsAndroid():
744 diffs += [ 753 diffs += [
745 ResourceSizesDiff( 754 ResourceSizesDiff(
746 build.apk_name, slow_options=args.include_slow_options) 755 build.apk_name, slow_options=args.include_slow_options)
747 ] 756 ]
748 diff_mngr = _DiffArchiveManager( 757 diff_mngr = _DiffArchiveManager(
749 revs, args.archive_directory, diffs, build, subrepo) 758 revs, args.archive_directory, diffs, build, subrepo)
750 consecutive_failures = 0 759 consecutive_failures = 0
751 for i, archive in enumerate(diff_mngr.IterArchives()): 760 for i, archive in enumerate(diff_mngr.IterArchives()):
752 if archive.Exists(): 761 if archive.Exists():
753 _Print('Found matching metadata for {}, skipping build step.', 762 step = 'download' if build.IsCloud() else 'build'
754 archive.rev) 763 logging.info('Found matching metadata for %s, skipping %s step.',
764 archive.rev, step)
755 else: 765 else:
756 if build.IsCloud(): 766 if build.IsCloud():
757 _DownloadBuildArtifacts( 767 _DownloadBuildArtifacts(
758 archive, build, supersize_path, args.depot_tools_path) 768 archive, build, supersize_path, args.depot_tools_path)
759 else: 769 else:
760 build_success = _SyncAndBuild(archive, build, subrepo) 770 build_failure = _SyncAndBuild(archive, build, subrepo)
761 if not build_success: 771 if build_failure:
772 logging.info(
773 'Build failed for %s, diffs using this rev will be skipped.',
774 archive.rev)
762 consecutive_failures += 1 775 consecutive_failures += 1
763 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES: 776 if consecutive_failures > _ALLOWED_CONSECUTIVE_FAILURES:
764 _Die('{} builds failed in a row, last failure was {}.', 777 _Die('%d builds failed in a row, last failure was %s.',
765 consecutive_failures, archive.rev) 778 consecutive_failures, archive.rev)
766 else: 779 else:
767 archive.ArchiveBuildResults(supersize_path) 780 archive.ArchiveBuildResults(supersize_path)
768 consecutive_failures = 0 781 consecutive_failures = 0
769 782
770 if i != 0: 783 if i != 0:
771 diff_mngr.MaybeDiff(i - 1, i) 784 diff_mngr.MaybeDiff(i - 1, i)
772 785
773 diff_mngr.Summarize() 786 diff_mngr.Summarize()
774 787
775 _global_restore_checkout_func()
776 788
777 if __name__ == '__main__': 789 if __name__ == '__main__':
778 sys.exit(main()) 790 sys.exit(main())
779 791
OLDNEW
« no previous file with comments | « tools/binary_size/diagnose_apk_bloat.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698