Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(605)

Side by Side Diff: tools/bisect-builds.py

Issue 7493016: Added pre-fetching of builds. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 9 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 # Copyright (c) 2011 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Snapshot Build Bisect Tool 6 """Snapshot Build Bisect Tool
7 7
8 This script bisects a snapshot archive using binary search. It starts at 8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good 9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading, 10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision, 11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search. 12 it will ask you whether it is good or bad before continuing the search.
13 """ 13 """
14 14
15 # The root URL for storage. 15 # The root URL for storage.
16 BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots' 16 BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
17 BASE_URL_RECENT = 'http://build.chromium.org/f/chromium/snapshots'
18 17
19 # URL to the ViewVC commit page. 18 # URL to the ViewVC commit page.
20 BUILD_VIEWVC_URL = 'http://src.chromium.org/viewvc/chrome?view=rev&revision=%d' 19 BUILD_VIEWVC_URL = 'http://src.chromium.org/viewvc/chrome?view=rev&revision=%d'
21 20
22 # Changelogs URL. 21 # Changelogs URL.
23 CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \ 22 CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
24 'perf/dashboard/ui/changelog.html?url=/trunk/src&range=%d:%d' 23 'perf/dashboard/ui/changelog.html?url=/trunk/src&range=%d:%d'
25 24
26 ############################################################################### 25 ###############################################################################
27 26
28 import math 27 import math
29 import optparse 28 import optparse
30 import os 29 import os
31 import pipes 30 import pipes
32 import re 31 import re
33 import shutil 32 import shutil
33 import subprocess
34 import sys 34 import sys
35 import tempfile 35 import tempfile
36 import threading
36 import urllib 37 import urllib
37 from xml.etree import ElementTree 38 from xml.etree import ElementTree
38 import zipfile 39 import zipfile
39 40
40 class PathContext(object): 41 class PathContext(object):
41 """A PathContext is used to carry the information used to construct URLs and 42 """A PathContext is used to carry the information used to construct URLs and
42 paths when dealing with the storage server and archives.""" 43 paths when dealing with the storage server and archives."""
43 def __init__(self, platform, good_revision, bad_revision, use_recent): 44 def __init__(self, platform, good_revision, bad_revision):
44 super(PathContext, self).__init__() 45 super(PathContext, self).__init__()
45 # Store off the input parameters. 46 # Store off the input parameters.
46 self.platform = platform # What's passed in to the '-a/--archive' option. 47 self.platform = platform # What's passed in to the '-a/--archive' option.
47 self.good_revision = good_revision 48 self.good_revision = good_revision
48 self.bad_revision = bad_revision 49 self.bad_revision = bad_revision
49 self.use_recent = use_recent
50 50
51 # The name of the ZIP file in a revision directory on the server. 51 # The name of the ZIP file in a revision directory on the server.
52 self.archive_name = None 52 self.archive_name = None
53 53
54 # Set some internal members: 54 # Set some internal members:
55 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'. 55 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
56 # _archive_extract_dir = Uncompressed directory in the archive_name file. 56 # _archive_extract_dir = Uncompressed directory in the archive_name file.
57 # _binary_name = The name of the executable to run. 57 # _binary_name = The name of the executable to run.
58 if self.platform == 'linux' or self.platform == 'linux64': 58 if self.platform == 'linux' or self.platform == 'linux64':
59 self._listing_platform_dir = 'Linux/' 59 self._listing_platform_dir = 'Linux/'
60 self.archive_name = 'chrome-linux.zip' 60 self.archive_name = 'chrome-linux.zip'
61 self._archive_extract_dir = 'chrome-linux' 61 self._archive_extract_dir = 'chrome-linux'
62 self._binary_name = 'chrome' 62 self._binary_name = 'chrome'
63 # Linux and x64 share all the same path data except for the archive dir. 63 # Linux and x64 share all the same path data except for the archive dir.
64 if self.platform == 'linux64': 64 if self.platform == 'linux64':
65 self._listing_platform_dir = 'Linux_x64/' 65 self._listing_platform_dir = 'Linux_x64/'
66 elif self.platform == 'mac': 66 elif self.platform == 'mac':
67 self._listing_platform_dir = 'Mac/' 67 self._listing_platform_dir = 'Mac/'
68 self.archive_name = 'chrome-mac.zip' 68 self.archive_name = 'chrome-mac.zip'
69 self._archive_extract_dir = 'chrome-mac' 69 self._archive_extract_dir = 'chrome-mac'
70 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium' 70 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
71 elif self.platform == 'win': 71 elif self.platform == 'win':
72 self._listing_platform_dir = 'Win/' 72 self._listing_platform_dir = 'Win/'
73 self.archive_name = 'chrome-win32.zip' 73 self.archive_name = 'chrome-win32.zip'
74 self._archive_extract_dir = 'chrome-win32' 74 self._archive_extract_dir = 'chrome-win32'
75 self._binary_name = 'chrome.exe' 75 self._binary_name = 'chrome.exe'
76 else: 76 else:
77 raise Exception("Invalid platform") 77 raise Exception('Invalid platform: %s' % self.platform)
78 78
79 def GetListingURL(self, marker=None): 79 def GetListingURL(self, marker=None):
80 """Returns the URL for a directory listing, with an optional marker.""" 80 """Returns the URL for a directory listing, with an optional marker."""
81 marker_param = '' 81 marker_param = ''
82 if marker: 82 if marker:
83 marker_param = '&marker=' + str(marker) 83 marker_param = '&marker=' + str(marker)
84 return BASE_URL + '/?delimiter=/&prefix=' + self._listing_platform_dir + \ 84 return BASE_URL + '/?delimiter=/&prefix=' + self._listing_platform_dir + \
85 marker_param 85 marker_param
86 86
87 def GetListingURLRecent(self):
88 """Returns the URL for a directory listing of recent builds."""
89 return BASE_URL_RECENT + '/' + self._listing_platform_dir
90
91 def GetDownloadURL(self, revision): 87 def GetDownloadURL(self, revision):
92 """Gets the download URL for a build archive of a specific revision.""" 88 """Gets the download URL for a build archive of a specific revision."""
93 if self.use_recent: 89 return "%s/%s%d/%s" % (
94 return "%s/%s%d/%s" % ( 90 BASE_URL, self._listing_platform_dir, revision, self.archive_name)
95 BASE_URL_RECENT, self._listing_platform_dir, revision,
96 self.archive_name)
97 else:
98 return "%s/%s%d/%s" % (
99 BASE_URL, self._listing_platform_dir, revision, self.archive_name)
100 91
101 def GetLastChangeURL(self): 92 def GetLastChangeURL(self):
102 """Returns a URL to the LAST_CHANGE file.""" 93 """Returns a URL to the LAST_CHANGE file."""
103 return BASE_URL + '/' + self._listing_platform_dir + 'LAST_CHANGE' 94 return BASE_URL + '/' + self._listing_platform_dir + 'LAST_CHANGE'
104 95
105 def GetLaunchPath(self): 96 def GetLaunchPath(self):
106 """Returns a relative path (presumably from the archive extraction location) 97 """Returns a relative path (presumably from the archive extraction location)
107 that is used to run the executable.""" 98 that is used to run the executable."""
108 return os.path.join(self._archive_extract_dir, self._binary_name) 99 return os.path.join(self._archive_extract_dir, self._binary_name)
109 100
101 def ParseDirectoryIndex(self):
102 """Parses the Google Storage directory listing into a list of revision
103 numbers. The range starts with self.good_revision and goes until
104 self.bad_revision."""
105
106 def _FetchAndParse(url):
107 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
108 next-marker is not None, then the listing is a partial listing and another
109 fetch should be performed with next-marker being the marker= GET
110 parameter."""
111 handle = urllib.urlopen(url)
112 document = ElementTree.parse(handle)
113
114 # All nodes in the tree are namespaced. Get the root's tag name to extract
115 # the namespace. Etree does namespaces as |{namespace}tag|.
116 root_tag = document.getroot().tag
117 end_ns_pos = root_tag.find('}')
118 if end_ns_pos == -1:
119 raise Exception("Could not locate end namespace for directory index")
120 namespace = root_tag[:end_ns_pos + 1]
121
122 # Find the prefix (_listing_platform_dir) and whether or not the list is
123 # truncated.
124 prefix_len = len(document.find(namespace + 'Prefix').text)
125 next_marker = None
126 is_truncated = document.find(namespace + 'IsTruncated')
127 if is_truncated is not None and is_truncated.text.lower() == 'true':
128 next_marker = document.find(namespace + 'NextMarker').text
129
130 # Get a list of all the revisions.
131 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
132 namespace + 'Prefix')
133 # The <Prefix> nodes have content of the form of
134 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
135 # trailing slash to just have a number.
136 revisions = []
137 for prefix in all_prefixes:
138 revnum = prefix.text[prefix_len:-1]
139 try:
140 revnum = int(revnum)
141 revisions.append(revnum)
142 except ValueError:
143 pass
144 return (revisions, next_marker)
145
146 # Fetch the first list of revisions.
147 (revisions, next_marker) = _FetchAndParse(self.GetListingURL())
148
149 # If the result list was truncated, refetch with the next marker. Do this
150 # until an entire directory listing is done.
151 while next_marker:
152 next_url = self.GetListingURL(next_marker)
153 (new_revisions, next_marker) = _FetchAndParse(next_url)
154 revisions.extend(new_revisions)
155
156 return revisions
157
158 def GetRevList(self):
159 """Gets the list of revision numbers between self.good_revision and
160 self.bad_revision."""
161 # Download the revlist and filter for just the range between good and bad.
162 minrev = self.good_revision
163 maxrev = self.bad_revision
164 revlist = [int(x) for x in self.ParseDirectoryIndex()]
Robert Sesek 2011/07/25 16:13:07 map(int, self.ParseDirectoryIndex())?
szager 2011/07/25 17:39:04 Hmm... I am personally a fan of 'map', but my unde
165 revlist = [x for x in revlist if x >= minrev and x <= maxrev]
166 revlist.sort()
167 return revlist
168
110 169
111 def UnzipFilenameToDir(filename, dir): 170 def UnzipFilenameToDir(filename, dir):
112 """Unzip |filename| to directory |dir|.""" 171 """Unzip |filename| to directory |dir|."""
172 pushd = os.getcwd()
Robert Sesek 2011/07/25 16:13:07 I know it's not yours, but could you rename this?
szager 2011/07/25 17:39:04 Done.
173 if not os.path.isabs(filename):
174 filename = os.path.join(pushd, filename)
113 zf = zipfile.ZipFile(filename) 175 zf = zipfile.ZipFile(filename)
114 # Make base. 176 # Make base.
115 pushd = os.getcwd()
116 try: 177 try:
117 if not os.path.isdir(dir): 178 if not os.path.isdir(dir):
118 os.mkdir(dir) 179 os.mkdir(dir)
119 os.chdir(dir) 180 os.chdir(dir)
120 # Extract files. 181 # Extract files.
121 for info in zf.infolist(): 182 for info in zf.infolist():
122 name = info.filename 183 name = info.filename
123 if name.endswith('/'): # dir 184 if name.endswith('/'): # dir
124 if not os.path.isdir(name): 185 if not os.path.isdir(name):
125 os.makedirs(name) 186 os.makedirs(name)
126 else: # file 187 else: # file
127 dir = os.path.dirname(name) 188 dir = os.path.dirname(name)
128 if not os.path.isdir(dir): 189 if not os.path.isdir(dir):
129 os.makedirs(dir) 190 os.makedirs(dir)
130 out = open(name, 'wb') 191 out = open(name, 'wb')
131 out.write(zf.read(name)) 192 out.write(zf.read(name))
132 out.close() 193 out.close()
133 # Set permissions. Permission info in external_attr is shifted 16 bits. 194 # Set permissions. Permission info in external_attr is shifted 16 bits.
134 os.chmod(name, info.external_attr >> 16L) 195 os.chmod(name, info.external_attr >> 16L)
135 os.chdir(pushd) 196 os.chdir(pushd)
136 except Exception, e: 197 except Exception, e:
137 print >>sys.stderr, e 198 print >>sys.stderr, e
138 sys.exit(1) 199 sys.exit(1)
139 200
140 201
141 def ParseDirectoryIndex(context): 202 def FetchRevision(context, rev, filename, quit_event=None):
142 """Parses the Google Storage directory listing into a list of revision 203 """Downloads and unzips revision |rev|"""
Robert Sesek 2011/07/25 16:13:07 Document |quit_event|.
szager 2011/07/25 17:39:04 Done.
143 numbers. The range starts with context.good_revision and goes until the latest 204 def ReportHook(blocknum, blocksize, totalsize):
144 revision.""" 205 if quit_event and quit_event.is_set():
145 def _FetchAndParse(url): 206 raise RuntimeError("Aborting download of revision %d" % rev)
146 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
147 next-marker is not None, then the listing is a partial listing and another
148 fetch should be performed with next-marker being the marker= GET
149 parameter."""
150 handle = urllib.urlopen(url)
151 document = ElementTree.parse(handle)
152 207
153 # All nodes in the tree are namespaced. Get the root's tag name to extract 208 download_url = context.GetDownloadURL(rev)
154 # the namespace. Etree does namespaces as |{namespace}tag|. 209 try:
155 root_tag = document.getroot().tag 210 urllib.urlretrieve(download_url, filename, ReportHook)
156 end_ns_pos = root_tag.find('}') 211 except RuntimeError, e:
157 if end_ns_pos == -1: 212 pass
Robert Sesek 2011/07/25 16:13:07 Log?
szager 2011/07/25 17:39:04 This is not an actual error; this clause will be h
158 raise Exception("Could not locate end namespace for directory index")
159 namespace = root_tag[:end_ns_pos + 1]
160
161 # Find the prefix (_listing_platform_dir) and whether or not the list is
162 # truncated.
163 prefix_len = len(document.find(namespace + 'Prefix').text)
164 next_marker = None
165 is_truncated = document.find(namespace + 'IsTruncated')
166 if is_truncated is not None and is_truncated.text.lower() == 'true':
167 next_marker = document.find(namespace + 'NextMarker').text
168
169 # Get a list of all the revisions.
170 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
171 namespace + 'Prefix')
172 # The <Prefix> nodes have content of the form of
173 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
174 # trailing slash to just have a number.
175 revisions = []
176 for prefix in all_prefixes:
177 revnum = prefix.text[prefix_len:-1]
178 try:
179 revnum = int(revnum)
180 revisions.append(revnum)
181 except ValueError:
182 pass
183 return (revisions, next_marker)
184
185 # Fetch the first list of revisions.
186 (revisions, next_marker) = _FetchAndParse(context.GetListingURL())
187 # If the result list was truncated, refetch with the next marker. Do this
188 # until an entire directory listing is done.
189 while next_marker:
190 (new_revisions, next_marker) = _FetchAndParse(
191 context.GetListingURL(next_marker))
192 revisions.extend(new_revisions)
193
194 return revisions
195 213
196 214
197 def ParseDirectoryIndexRecent(context): 215 def RunRevision(context, revision, zipfile, profile, args) :
198 """Parses the recent builds directory listing into a list of revision 216 """Given a zipped revision, unzip it and run the test"""
Robert Sesek 2011/07/25 16:13:07 Comments require proper punctuation; here and else
szager 2011/07/25 17:39:04 Done.
199 numbers.""" 217 print "Trying revision %d..." % revision
200 handle = urllib.urlopen(context.GetListingURLRecent())
201 document = handle.read()
202 218
203 # Looking for: <a href="92976/">92976/</a> 219 # Create a temp directory and unzip the revision into it
204 return re.findall(r"<a href=\"(\d+)/\">\1/</a>", document)
205
206
207 def FilterRevList(context, revlist):
208 """Filter revlist to the revisions between |good_revision| and
209 |bad_revision| of the |context|."""
210 # Download the revlist and filter for just the range between good and bad.
211 rev_range = range(context.good_revision, context.bad_revision)
212 revlist = filter(lambda r: r in rev_range, revlist)
213 revlist.sort()
214 return revlist
215
216
217 def TryRevision(context, rev, profile, args):
218 """Downloads revision |rev|, unzips it, and opens it for the user to test.
219 |profile| is the profile to use."""
220 # Do this in a temp dir so we don't collide with user files.
221 cwd = os.getcwd() 220 cwd = os.getcwd()
222 tempdir = tempfile.mkdtemp(prefix='bisect_tmp') 221 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
222 UnzipFilenameToDir(zipfile, tempdir)
223 os.chdir(tempdir) 223 os.chdir(tempdir)
224 224
225 # Download the file. 225 # Run the test
226 download_url = context.GetDownloadURL(rev) 226 testargs = [context.GetLaunchPath(), '--user-data-dir=%s' % profile] + args
227 def _ReportHook(blocknum, blocksize, totalsize): 227 subproc = subprocess.Popen(testargs,
228 size = blocknum * blocksize 228 bufsize=-1,
229 if totalsize == -1: # Total size not known. 229 stdout=subprocess.PIPE,
230 progress = "Received %d bytes" % size 230 stderr=subprocess.PIPE)
231 else: 231 (stdout, stderr) = subproc.communicate()
232 size = min(totalsize, size)
233 progress = "Received %d of %d bytes, %.2f%%" % (
234 size, totalsize, 100.0 * size / totalsize)
235 # Send a \r to let all progress messages use just one line of output.
236 sys.stdout.write("\r" + progress)
237 sys.stdout.flush()
238 try:
239 print 'Fetching ' + download_url
240 urllib.urlretrieve(download_url, context.archive_name, _ReportHook)
241 print
242 # Throw an exception if the download was less than 1000 bytes.
243 if os.path.getsize(context.archive_name) < 1000: raise Exception()
244 except Exception, e:
245 print('Could not retrieve the download. Sorry.')
246 sys.exit(-1)
247
248 # Unzip the file.
249 print 'Unzipping ...'
250 UnzipFilenameToDir(context.archive_name, os.curdir)
251
252 # Tell the system to open the app.
253 args = ['--user-data-dir=%s' % profile] + args
254 flags = ' '.join(map(pipes.quote, args))
255 cmd = '%s %s' % (context.GetLaunchPath(), flags)
256 print 'Running %s' % cmd
257 os.system(cmd)
258 232
259 os.chdir(cwd) 233 os.chdir(cwd)
260 print 'Cleaning temp dir ...'
261 try: 234 try:
262 shutil.rmtree(tempdir, True) 235 shutil.rmtree(tempdir, True)
263 except Exception, e: 236 except Exception, e:
264 pass 237 pass
265 238
239 return (subproc.returncode, stdout, stderr)
266 240
267 def AskIsGoodBuild(rev): 241 def AskIsGoodBuild(rev, status, stdout, stderr):
268 """Ask the user whether build |rev| is good or bad.""" 242 """Ask the user whether build |rev| is good or bad."""
269 # Loop until we get a response that we can parse. 243 # Loop until we get a response that we can parse.
270 while True: 244 while True:
271 response = raw_input('\nBuild %d is [(g)ood/(b)ad]: ' % int(rev)) 245 response = raw_input('\nRevision %d is [(g)ood/(b)ad/(q)uit]: ' % int(rev))
272 if response and response in ('g', 'b'): 246 if response and response in ('g', 'b'):
273 return response == 'g' 247 return response == 'g'
248 if response and response == 'q':
249 raise SystemExit()
274 250
251 def Bisect(platform,
252 good_rev=0,
253 bad_rev=0,
254 try_args=(),
255 profile=None,
256 predicate=AskIsGoodBuild):
257 """Given known good and known bad revisions, run a binary search on all
258 archived revisions to determine the last known good revision.
275 259
276 def Bisect(revlist, 260 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
277 context, 261 @param good_rev Number/tag of the last known good revision.
278 try_args=(), 262 @param bad_rev Number/tag of the first known bad revision.
279 profile='profile', 263 @param try_args A tuple of arguments to pass to the test application.
280 predicate=AskIsGoodBuild): 264 @param profile The name of the user profile to run with.
281 """Tries to find the exact commit where a regression was introduced by
282 running a binary search on all archived builds in a given revision range.
283
284 @param revlist A list of chromium revision numbers to check.
285 @param context A PathContext object.
286 @param try_args A tuple of arguments to pass to the predicate function.
287 @param profile The user profile with which to run chromium.
288 @param predicate A predicate function which returns True iff the argument 265 @param predicate A predicate function which returns True iff the argument
289 chromium revision is good. 266 chromium revision is good.
290 """ 267 """
291 268
269 if profile is None:
Robert Sesek 2011/07/25 16:13:07 |if not profile:| is more idiomatic
szager 2011/07/25 17:39:04 Done.
270 profile = 'profile'
271
272 context = PathContext(platform, good_rev, bad_rev)
273 cwd = os.getcwd()
274
275 revlist = context.GetRevList()
276
277 # Get a list of revisions to bisect across.
278 if len(revlist) < 2: # Don't have enough builds to bisect
279 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
280 raise RuntimeError(msg)
281
282 # Figure out our bookends and first pivot point; fetch the pivot revision
292 good = 0 283 good = 0
293 bad = len(revlist) - 1 284 bad = len(revlist) - 1
294 last_known_good_rev = revlist[good] 285 pivot = bad/2
Robert Sesek 2011/07/25 16:13:07 nit: Spaces around operators
szager 2011/07/25 17:39:04 Done.
295 first_known_bad_rev = revlist[bad] 286 rev = revlist[pivot]
287 zipfile = os.path.join(cwd, '%d-%s' % (rev, context.archive_name))
Robert Sesek 2011/07/25 16:13:07 You do this format enough places that this could b
szager 2011/07/25 17:39:04 I created a local function for this; I prefer not
Robert Sesek 2011/07/26 21:00:26 Not really true because it manipulates paths for t
288 FetchRevision(context, rev, zipfile)
296 289
297 # Binary search time! 290 # Binary search time!
298 while good < bad: 291 while zipfile and bad - good > 1:
299 candidates = revlist[good:bad] 292 print "iterating with good=%d bad=%d pivot=%d" % (
Robert Sesek 2011/07/25 16:13:07 Use proper capitalization, but do you think this l
szager 2011/07/25 17:39:04 Yeah, this is a bit TMI; I have removed it.
300 num_poss = len(candidates) 293 revlist[good], revlist[bad], revlist[pivot])
301 if num_poss > 10:
302 print('%d candidates. %d tries left.' %
303 (num_poss, round(math.log(num_poss, 2))))
304 else:
305 print('Candidates: %s' % revlist[good:bad])
306 294
307 # Cut the problem in half... 295 # Pre-fetch next two possible pivots
Robert Sesek 2011/07/25 16:13:07 This block of code is hard to follow. It should ha
szager 2011/07/25 17:39:04 Added this to the comments for the Bisect function
308 test = int((bad - good) / 2) + good 296 down_pivot = int((pivot - good) / 2) + good
Robert Sesek 2011/07/25 16:13:07 I don't really understand the naming rationale of
szager 2011/07/25 17:39:04 Hopefully the high-level comments will clarify thi
309 test_rev = revlist[test] 297 down_thread = None
298 if down_pivot != pivot and down_pivot != good :
Robert Sesek 2011/07/25 16:13:07 nit: no space before ':'. Here and elsewhere.
szager 2011/07/25 17:39:04 nit scratched.
299 down_rev = revlist[down_pivot]
300 zipfile_base = '%d-%s' % (down_rev, context.archive_name)
301 down_zipfile = os.path.join(cwd, zipfile_base)
Robert Sesek 2011/07/25 16:13:07 This should really be a method on PathContext
szager 2011/07/25 17:39:04 See above comment
302 down_event = threading.Event()
303 fetchargs = (context, down_rev, down_zipfile, down_event)
304 down_thread = threading.Thread(target=FetchRevision,
305 name='down_fetch',
306 args=fetchargs)
307 down_thread.start()
310 308
311 # Let the user give this rev a spin (in her own profile, if she wants). 309 up_pivot = int((bad - pivot) / 2) + pivot
312 TryRevision(context, test_rev, profile, try_args) 310 up_thread = None
313 if predicate(test_rev): 311 if up_pivot != pivot and up_pivot != bad :
314 last_known_good_rev = test_rev 312 up_rev = revlist[up_pivot]
315 good = test + 1 313 zipfile_base = '%d-%s' % (up_rev, context.archive_name)
316 else: 314 up_zipfile = os.path.join(cwd, zipfile_base)
317 bad = test 315 up_event = threading.Event()
316 fetchargs = (context, up_rev, up_zipfile, up_event)
317 up_thread = threading.Thread(target=FetchRevision,
318 name='up_fetch',
319 args=fetchargs)
320 up_thread.start()
318 321
319 return (last_known_good_rev, first_known_bad_rev) 322 # Run test on the pivot revision
323 (status, stdout, stderr) = RunRevision(context,
324 rev,
325 zipfile,
326 profile,
327 try_args)
328 os.unlink(zipfile)
329 zipfile = None
330 try:
331 if predicate(rev, status, stdout, stderr) :
332 good = pivot
333 if down_thread :
334 down_event.set() # Kill the download of older revision
Robert Sesek 2011/07/25 16:13:07 nit: two spaces before comments
szager 2011/07/25 17:39:04 nit scratched.
335 down_thread.join()
336 os.unlink(down_zipfile)
337 if up_thread :
338 print "Downloading revision %d..." % up_rev
339 up_thread.join() # Wait for newer revision to finish downloading
340 pivot = up_pivot
341 zipfile = up_zipfile
342 else :
343 bad = pivot
344 if up_thread :
345 up_event.set() # Kill download of newer revision
346 up_thread.join()
347 os.unlink(up_zipfile)
348 if down_thread :
349 print "Downloading revision %d..." % down_rev
350 down_thread.join() # Wait for older revision to finish downloading
351 pivot = down_pivot
352 zipfile = down_zipfile
353 except SystemExit:
354 for f in [down_zipfile, up_zipfile]:
355 try:
356 os.unlink(f)
357 except OSError:
358 pass
359 sys.exit(0)
360
361 rev = revlist[pivot]
362
363 return (revlist[good], revlist[bad])
320 364
321 365
322 def main(): 366 def main():
323 usage = ('%prog [options] [-- chromium-options]\n' 367 usage = ('%prog [options] [-- chromium-options]\n'
324 'Perform binary search on the snapshot builds.\n' 368 'Perform binary search on the snapshot builds.\n'
325 '\n' 369 '\n'
326 'Tip: add "-- --no-first-run" to bypass the first run prompts.') 370 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
327 parser = optparse.OptionParser(usage=usage) 371 parser = optparse.OptionParser(usage=usage)
328 # Strangely, the default help output doesn't include the choice list. 372 # Strangely, the default help output doesn't include the choice list.
329 choices = ['mac', 'win', 'linux', 'linux64'] 373 choices = ['mac', 'win', 'linux', 'linux64']
(...skipping 17 matching lines...) Expand all
347 parser.print_help() 391 parser.print_help()
348 return 1 392 return 1
349 393
350 if opts.bad and opts.good and (opts.good > opts.bad): 394 if opts.bad and opts.good and (opts.good > opts.bad):
351 print ('The good revision (%d) must precede the bad revision (%d).\n' % 395 print ('The good revision (%d) must precede the bad revision (%d).\n' %
352 (opts.good, opts.bad)) 396 (opts.good, opts.bad))
353 parser.print_help() 397 parser.print_help()
354 return 1 398 return 1
355 399
356 # Create the context. Initialize 0 for the revisions as they are set below. 400 # Create the context. Initialize 0 for the revisions as they are set below.
357 context = PathContext(opts.archive, 0, 0, use_recent=False) 401 context = PathContext(opts.archive, 0, 0)
358 402
359 # Pick a starting point, try to get HEAD for this. 403 # Pick a starting point, try to get HEAD for this.
360 if opts.bad: 404 if opts.bad:
361 bad_rev = opts.bad 405 bad_rev = opts.bad
362 else: 406 else:
363 bad_rev = 0 407 bad_rev = 0
364 try: 408 try:
365 # Location of the latest build revision number 409 # Location of the latest build revision number
366 nh = urllib.urlopen(context.GetLastChangeURL()) 410 nh = urllib.urlopen(context.GetLastChangeURL())
367 latest = int(nh.read()) 411 latest = int(nh.read())
368 nh.close() 412 nh.close()
369 bad_rev = raw_input('Bad revision [HEAD:%d]: ' % latest) 413 bad_rev = raw_input('Bad revision [HEAD:%d]: ' % latest)
370 if (bad_rev == ''): 414 if (bad_rev == ''):
371 bad_rev = latest 415 bad_rev = latest
372 bad_rev = int(bad_rev) 416 bad_rev = int(bad_rev)
373 except Exception, e: 417 except Exception, e:
374 print('Could not determine latest revision. This could be bad...') 418 print('Could not determine latest revision. This could be bad...')
375 bad_rev = int(raw_input('Bad revision: ')) 419 bad_rev = int(raw_input('Bad revision: '))
376 420
377 # Find out when we were good. 421 # Find out when we were good.
378 if opts.good: 422 if opts.good:
379 good_rev = opts.good 423 good_rev = opts.good
380 else: 424 else:
381 good_rev = 0 425 good_rev = 0
382 try: 426 try:
383 good_rev = int(raw_input('Last known good [0]: ')) 427 good_rev = int(raw_input('Last known good [0]: '))
384 except Exception, e: 428 except Exception, e:
385 pass 429 pass
386 430
387 # Set the input parameters now that they've been validated.
388 context.good_revision = good_rev
389 context.bad_revision = bad_rev
390
391 # Get recent revision list and check whether it's sufficient.
392 all_revs_recent = map(int, ParseDirectoryIndexRecent(context))
393 all_revs_recent.sort()
394 # Skipping 0 since it might be deleted off the server soon:
395 all_revs_recent = all_revs_recent[1:]
396 oldest_recent_rev = all_revs_recent[0]
397 if good_rev >= oldest_recent_rev:
398 # The range is within recent builds, so switch on use_recent.
399 context.use_recent = True
400 elif bad_rev >= oldest_recent_rev:
401 # The range spans both old and recent builds.
402 # If oldest_recent_rev is good, we bisect the recent builds.
403 context.use_recent = True
404 TryRevision(context, oldest_recent_rev, opts.profile, args)
405 if AskIsGoodBuild(oldest_recent_rev):
406 # context.use_recent is True
407 context.good_revision = oldest_recent_rev
408 else:
409 context.use_recent = False
410 context.bad_revision = oldest_recent_rev
411
412 all_revs = []
413 if context.use_recent:
414 all_revs = all_revs_recent
415 else:
416 all_revs = map(int, ParseDirectoryIndex(context))
417
418 # Filter list of revisions to bisect across.
419 revlist = FilterRevList(context, all_revs)
420 if len(revlist) < 2: # Don't have enough builds to bisect
421 print 'We don\'t have enough builds to bisect. revlist: %s' % revlist
422 sys.exit(1)
423
424 (last_known_good_rev, first_known_bad_rev) = Bisect( 431 (last_known_good_rev, first_known_bad_rev) = Bisect(
425 revlist, context, args, opts.profile) 432 opts.archive, good_rev, bad_rev, args, opts.profile)
426 433
427 # We're done. Let the user know the results in an official manner. 434 # We're done. Let the user know the results in an official manner.
428 print('You are probably looking for build %d.' % first_known_bad_rev) 435 print('You are probably looking for build %d.' % first_known_bad_rev)
429 print('CHANGELOG URL:') 436 print('CHANGELOG URL:')
430 print(CHANGELOG_URL % (last_known_good_rev, first_known_bad_rev)) 437 print(CHANGELOG_URL % (last_known_good_rev, first_known_bad_rev))
431 print('Built at revision:') 438 print('Built at revision:')
432 print(BUILD_VIEWVC_URL % first_known_bad_rev) 439 print(BUILD_VIEWVC_URL % first_known_bad_rev)
433 440
434 if __name__ == '__main__': 441 if __name__ == '__main__':
435 sys.exit(main()) 442 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698