| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Snapshot Build Bisect Tool | 6 """Snapshot Build Bisect Tool |
| 7 | 7 |
| 8 This script bisects a snapshot archive using binary search. It starts at | 8 This script bisects a snapshot archive using binary search. It starts at |
| 9 a bad revision (it will try to guess HEAD) and asks for a last known-good | 9 a bad revision (it will try to guess HEAD) and asks for a last known-good |
| 10 revision. It will then binary search across this revision range by downloading, | 10 revision. It will then binary search across this revision range by downloading, |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 86 import urllib | 86 import urllib |
| 87 from distutils.version import LooseVersion | 87 from distutils.version import LooseVersion |
| 88 from xml.etree import ElementTree | 88 from xml.etree import ElementTree |
| 89 import zipfile | 89 import zipfile |
| 90 | 90 |
| 91 | 91 |
| 92 class PathContext(object): | 92 class PathContext(object): |
| 93 """A PathContext is used to carry the information used to construct URLs and | 93 """A PathContext is used to carry the information used to construct URLs and |
| 94 paths when dealing with the storage server and archives.""" | 94 paths when dealing with the storage server and archives.""" |
| 95 def __init__(self, base_url, platform, good_revision, bad_revision, | 95 def __init__(self, base_url, platform, good_revision, bad_revision, |
| 96 is_official, is_aura, is_asan, use_local_repo, flash_path = None, | 96 is_official, is_asan, use_local_repo, flash_path = None, |
| 97 pdf_path = None): | 97 pdf_path = None): |
| 98 super(PathContext, self).__init__() | 98 super(PathContext, self).__init__() |
| 99 # Store off the input parameters. | 99 # Store off the input parameters. |
| 100 self.base_url = base_url | 100 self.base_url = base_url |
| 101 self.platform = platform # What's passed in to the '-a/--archive' option. | 101 self.platform = platform # What's passed in to the '-a/--archive' option. |
| 102 self.good_revision = good_revision | 102 self.good_revision = good_revision |
| 103 self.bad_revision = bad_revision | 103 self.bad_revision = bad_revision |
| 104 self.is_official = is_official | 104 self.is_official = is_official |
| 105 self.is_aura = is_aura | |
| 106 self.is_asan = is_asan | 105 self.is_asan = is_asan |
| 107 self.build_type = 'release' | 106 self.build_type = 'release' |
| 108 self.flash_path = flash_path | 107 self.flash_path = flash_path |
| 109 # Dictionary which stores svn revision number as key and it's | 108 # Dictionary which stores svn revision number as key and it's |
| 110 # corresponding git hash as value. This data is populated in | 109 # corresponding git hash as value. This data is populated in |
| 111 # _FetchAndParse and used later in GetDownloadURL while downloading | 110 # _FetchAndParse and used later in GetDownloadURL while downloading |
| 112 # the build. | 111 # the build. |
| 113 self.githash_svn_dict = {} | 112 self.githash_svn_dict = {} |
| 114 self.pdf_path = pdf_path | 113 self.pdf_path = pdf_path |
| 115 | 114 |
| (...skipping 28 matching lines...) Expand all Loading... |
| 144 self.archive_name = 'chrome-precise32bit.zip' | 143 self.archive_name = 'chrome-precise32bit.zip' |
| 145 self._archive_extract_dir = 'chrome-precise32bit' | 144 self._archive_extract_dir = 'chrome-precise32bit' |
| 146 elif self.platform == 'linux64': | 145 elif self.platform == 'linux64': |
| 147 self._listing_platform_dir = 'precise64bit/' | 146 self._listing_platform_dir = 'precise64bit/' |
| 148 self.archive_name = 'chrome-precise64bit.zip' | 147 self.archive_name = 'chrome-precise64bit.zip' |
| 149 self._archive_extract_dir = 'chrome-precise64bit' | 148 self._archive_extract_dir = 'chrome-precise64bit' |
| 150 elif self.platform == 'mac': | 149 elif self.platform == 'mac': |
| 151 self._listing_platform_dir = 'mac/' | 150 self._listing_platform_dir = 'mac/' |
| 152 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome' | 151 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome' |
| 153 elif self.platform == 'win': | 152 elif self.platform == 'win': |
| 154 if self.is_aura: | 153 self._listing_platform_dir = 'win/' |
| 155 self._listing_platform_dir = 'win-aura/' | |
| 156 else: | |
| 157 self._listing_platform_dir = 'win/' | |
| 158 else: | 154 else: |
| 159 if self.platform in ('linux', 'linux64', 'linux-arm'): | 155 if self.platform in ('linux', 'linux64', 'linux-arm'): |
| 160 self.archive_name = 'chrome-linux.zip' | 156 self.archive_name = 'chrome-linux.zip' |
| 161 self._archive_extract_dir = 'chrome-linux' | 157 self._archive_extract_dir = 'chrome-linux' |
| 162 if self.platform == 'linux': | 158 if self.platform == 'linux': |
| 163 self._listing_platform_dir = 'Linux/' | 159 self._listing_platform_dir = 'Linux/' |
| 164 elif self.platform == 'linux64': | 160 elif self.platform == 'linux64': |
| 165 self._listing_platform_dir = 'Linux_x64/' | 161 self._listing_platform_dir = 'Linux_x64/' |
| 166 elif self.platform == 'linux-arm': | 162 elif self.platform == 'linux-arm': |
| 167 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/' | 163 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/' |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 222 | 218 |
| 223 def GetLaunchPath(self, revision): | 219 def GetLaunchPath(self, revision): |
| 224 """Returns a relative path (presumably from the archive extraction location) | 220 """Returns a relative path (presumably from the archive extraction location) |
| 225 that is used to run the executable.""" | 221 that is used to run the executable.""" |
| 226 if self.is_asan: | 222 if self.is_asan: |
| 227 extract_dir = '%s-%d' % (self.GetASANBaseName(), revision) | 223 extract_dir = '%s-%d' % (self.GetASANBaseName(), revision) |
| 228 else: | 224 else: |
| 229 extract_dir = self._archive_extract_dir | 225 extract_dir = self._archive_extract_dir |
| 230 return os.path.join(extract_dir, self._binary_name) | 226 return os.path.join(extract_dir, self._binary_name) |
| 231 | 227 |
| 232 @staticmethod | |
| 233 def IsAuraBuild(build): | |
| 234 """Checks whether the given build is an Aura build.""" | |
| 235 return build.split('.')[3] == '1' | |
| 236 | |
| 237 @staticmethod | |
| 238 def IsOfficialASANBuild(build): | |
| 239 """Checks whether the given build is an ASAN build.""" | |
| 240 return build.split('.')[3] == '2' | |
| 241 | |
| 242 def ParseDirectoryIndex(self): | 228 def ParseDirectoryIndex(self): |
| 243 """Parses the Google Storage directory listing into a list of revision | 229 """Parses the Google Storage directory listing into a list of revision |
| 244 numbers.""" | 230 numbers.""" |
| 245 | 231 |
| 246 def _FetchAndParse(url): | 232 def _FetchAndParse(url): |
| 247 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If | 233 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If |
| 248 next-marker is not None, then the listing is a partial listing and another | 234 next-marker is not None, then the listing is a partial listing and another |
| 249 fetch should be performed with next-marker being the marker= GET | 235 fetch should be performed with next-marker being the marker= GET |
| 250 parameter.""" | 236 parameter.""" |
| 251 handle = urllib.urlopen(url) | 237 handle = urllib.urlopen(url) |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 420 for build_number in sorted(parsed_build_numbers): | 406 for build_number in sorted(parsed_build_numbers): |
| 421 path = (OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + | 407 path = (OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + |
| 422 self._listing_platform_dir + self.archive_name) | 408 self._listing_platform_dir + self.archive_name) |
| 423 i = i + 1 | 409 i = i + 1 |
| 424 try: | 410 try: |
| 425 connection = urllib.urlopen(path) | 411 connection = urllib.urlopen(path) |
| 426 connection.close() | 412 connection.close() |
| 427 if build_number > maxrev: | 413 if build_number > maxrev: |
| 428 break | 414 break |
| 429 if build_number >= minrev: | 415 if build_number >= minrev: |
| 430 # If we are bisecting Aura, we want to include only builds which | 416 final_list.append(str(build_number)) |
| 431 # ends with ".1". | |
| 432 if self.is_aura: | |
| 433 if self.IsAuraBuild(str(build_number)): | |
| 434 final_list.append(str(build_number)) | |
| 435 # If we are bisecting only official builds (without --aura), | |
| 436 # we can not include builds which ends with '.1' or '.2' since | |
| 437 # they have different folder hierarchy inside. | |
| 438 elif (not self.IsAuraBuild(str(build_number)) and | |
| 439 not self.IsOfficialASANBuild(str(build_number))): | |
| 440 final_list.append(str(build_number)) | |
| 441 except urllib.HTTPError: | 417 except urllib.HTTPError: |
| 442 pass | 418 pass |
| 443 return final_list | 419 return final_list |
| 444 | 420 |
| 445 def UnzipFilenameToDir(filename, directory): | 421 def UnzipFilenameToDir(filename, directory): |
| 446 """Unzip |filename| to |directory|.""" | 422 """Unzip |filename| to |directory|.""" |
| 447 cwd = os.getcwd() | 423 cwd = os.getcwd() |
| 448 if not os.path.isabs(filename): | 424 if not os.path.isabs(filename): |
| 449 filename = os.path.join(cwd, filename) | 425 filename = os.path.join(cwd, filename) |
| 450 zf = zipfile.ZipFile(filename) | 426 zf = zipfile.ZipFile(filename) |
| (...skipping 533 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 984 'respectively. Use %s to specify all extra arguments ' | 960 'respectively. Use %s to specify all extra arguments ' |
| 985 'as one string. Defaults to "%p %a". Note that any ' | 961 'as one string. Defaults to "%p %a". Note that any ' |
| 986 'extra paths specified should be absolute.') | 962 'extra paths specified should be absolute.') |
| 987 parser.add_option('-l', '--blink', | 963 parser.add_option('-l', '--blink', |
| 988 action='store_true', | 964 action='store_true', |
| 989 help='Use Blink bisect instead of Chromium. ') | 965 help='Use Blink bisect instead of Chromium. ') |
| 990 parser.add_option('', '--not-interactive', | 966 parser.add_option('', '--not-interactive', |
| 991 action='store_true', | 967 action='store_true', |
| 992 default=False, | 968 default=False, |
| 993 help='Use command exit code to tell good/bad revision.') | 969 help='Use command exit code to tell good/bad revision.') |
| 994 parser.add_option('--aura', | |
| 995 dest='aura', | |
| 996 action='store_true', | |
| 997 default=False, | |
| 998 help='Allow the script to bisect aura builds') | |
| 999 parser.add_option('--asan', | 970 parser.add_option('--asan', |
| 1000 dest='asan', | 971 dest='asan', |
| 1001 action='store_true', | 972 action='store_true', |
| 1002 default=False, | 973 default=False, |
| 1003 help='Allow the script to bisect ASAN builds') | 974 help='Allow the script to bisect ASAN builds') |
| 1004 parser.add_option('--use-local-repo', | 975 parser.add_option('--use-local-repo', |
| 1005 dest='use_local_repo', | 976 dest='use_local_repo', |
| 1006 action='store_true', | 977 action='store_true', |
| 1007 default=False, | 978 default=False, |
| 1008 help='Allow the script to convert git SHA1 to SVN ' | 979 help='Allow the script to convert git SHA1 to SVN ' |
| 1009 'revision using "git svn find-rev <SHA1>" ' | 980 'revision using "git svn find-rev <SHA1>" ' |
| 1010 'command from a Chromium checkout.') | 981 'command from a Chromium checkout.') |
| 1011 | 982 |
| 1012 (opts, args) = parser.parse_args() | 983 (opts, args) = parser.parse_args() |
| 1013 | 984 |
| 1014 if opts.archive is None: | 985 if opts.archive is None: |
| 1015 print 'Error: missing required parameter: --archive' | 986 print 'Error: missing required parameter: --archive' |
| 1016 print | 987 print |
| 1017 parser.print_help() | 988 parser.print_help() |
| 1018 return 1 | 989 return 1 |
| 1019 | 990 |
| 1020 if opts.aura: | |
| 1021 if opts.archive != 'win' or not opts.official_builds: | |
| 1022 print ('Error: Aura is supported only on Windows platform ' | |
| 1023 'and official builds.') | |
| 1024 return 1 | |
| 1025 | |
| 1026 if opts.asan: | 991 if opts.asan: |
| 1027 supported_platforms = ['linux', 'mac', 'win'] | 992 supported_platforms = ['linux', 'mac', 'win'] |
| 1028 if opts.archive not in supported_platforms: | 993 if opts.archive not in supported_platforms: |
| 1029 print 'Error: ASAN bisecting only supported on these platforms: [%s].' % ( | 994 print 'Error: ASAN bisecting only supported on these platforms: [%s].' % ( |
| 1030 '|'.join(supported_platforms)) | 995 '|'.join(supported_platforms)) |
| 1031 return 1 | 996 return 1 |
| 1032 if opts.official_builds: | 997 if opts.official_builds: |
| 1033 print 'Error: Do not yet support bisecting official ASAN builds.' | 998 print 'Error: Do not yet support bisecting official ASAN builds.' |
| 1034 return 1 | 999 return 1 |
| 1035 | 1000 |
| 1036 if opts.asan: | 1001 if opts.asan: |
| 1037 base_url = ASAN_BASE_URL | 1002 base_url = ASAN_BASE_URL |
| 1038 elif opts.blink: | 1003 elif opts.blink: |
| 1039 base_url = WEBKIT_BASE_URL | 1004 base_url = WEBKIT_BASE_URL |
| 1040 else: | 1005 else: |
| 1041 base_url = CHROMIUM_BASE_URL | 1006 base_url = CHROMIUM_BASE_URL |
| 1042 | 1007 |
| 1043 # Create the context. Initialize 0 for the revisions as they are set below. | 1008 # Create the context. Initialize 0 for the revisions as they are set below. |
| 1044 context = PathContext(base_url, opts.archive, opts.good, opts.bad, | 1009 context = PathContext(base_url, opts.archive, opts.good, opts.bad, |
| 1045 opts.official_builds, opts.aura, opts.asan, | 1010 opts.official_builds, opts.asan, opts.use_local_repo, |
| 1046 opts.use_local_repo, opts.flash_path, opts.pdf_path) | 1011 opts.flash_path, opts.pdf_path) |
| 1047 # Pick a starting point, try to get HEAD for this. | 1012 # Pick a starting point, try to get HEAD for this. |
| 1048 if not opts.bad: | 1013 if not opts.bad: |
| 1049 context.bad_revision = '999.0.0.0' | 1014 context.bad_revision = '999.0.0.0' |
| 1050 context.bad_revision = GetChromiumRevision( | 1015 context.bad_revision = GetChromiumRevision( |
| 1051 context, context.GetLastChangeURL()) | 1016 context, context.GetLastChangeURL()) |
| 1052 | 1017 |
| 1053 # Find out when we were good. | 1018 # Find out when we were good. |
| 1054 if not opts.good: | 1019 if not opts.good: |
| 1055 context.good_revision = '0.0.0.0' if opts.official_builds else 0 | 1020 context.good_revision = '0.0.0.0' if opts.official_builds else 0 |
| 1056 | 1021 |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1123 | 1088 |
| 1124 print 'CHANGELOG URL:' | 1089 print 'CHANGELOG URL:' |
| 1125 if opts.official_builds: | 1090 if opts.official_builds: |
| 1126 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev) | 1091 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev) |
| 1127 else: | 1092 else: |
| 1128 print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev) | 1093 print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev) |
| 1129 | 1094 |
| 1130 | 1095 |
| 1131 if __name__ == '__main__': | 1096 if __name__ == '__main__': |
| 1132 sys.exit(main()) | 1097 sys.exit(main()) |
| OLD | NEW |