Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(171)

Side by Side Diff: third_party/WebKit/Tools/Scripts/webkitpy/common/net/buildbot.py

Issue 2112133002: Greatly simplify buildbot module and remove usage of master name. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Rebased Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright (c) 2009, Google Inc. All rights reserved. 1 # Copyright (c) 2009, Google Inc. All rights reserved.
2 # 2 #
3 # Redistribution and use in source and binary forms, with or without 3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are 4 # modification, are permitted provided that the following conditions are
5 # met: 5 # met:
6 # 6 #
7 # * Redistributions of source code must retain the above copyright 7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer. 8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above 9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer 10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the 11 # in the documentation and/or other materials provided with the
12 # distribution. 12 # distribution.
13 # * Neither the name of Google Inc. nor the names of its 13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from 14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission. 15 # this software without specific prior written permission.
16 # 16 #
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 28
29 import json
30 import re
31 import urllib
32 import urllib2 29 import urllib2
33 30
34 import webkitpy.common.config.urls as config_urls 31 import webkitpy.common.config.urls as config_urls
35 from webkitpy.common.memoized import memoized 32 from webkitpy.common.memoized import memoized
36 from webkitpy.common.net.layouttestresults import LayoutTestResults 33 from webkitpy.common.net.layouttestresults import LayoutTestResults
37 from webkitpy.common.net.networktransaction import NetworkTransaction 34 from webkitpy.common.net.networktransaction import NetworkTransaction
38 from webkitpy.common.system.logutils import get_logger 35 from webkitpy.common.system.logutils import get_logger
39 from webkitpy.thirdparty.BeautifulSoup import BeautifulSoup
40 36
41 37
42 _log = get_logger(__file__) 38 _log = get_logger(__file__)
43 39
44 40
45 class Builder(object): 41 class Builder(object):
46 42
47 def __init__(self, builder_name, buildbot, master_name='chromium.webkit'): 43 def __init__(self, builder_name, buildbot):
48 self._name = builder_name 44 self._name = builder_name
49 self._master_name = master_name
50 self._buildbot = buildbot 45 self._buildbot = buildbot
51 self._builds_cache = {}
52 self._revision_to_build_number = None
53 46
54 def name(self): 47 def name(self):
55 return self._name 48 return self._name
56 49
57 def master_name(self):
58 return self._master_name
59
60 def results_url(self): 50 def results_url(self):
61 return config_urls.chromium_results_url_base_for_builder(self._name) 51 return config_urls.chromium_results_url_base_for_builder(self._name)
62 52
63 def accumulated_results_url(self): 53 def latest_layout_test_results_url(self):
64 return config_urls.chromium_accumulated_results_url_base_for_builder(sel f._name) 54 return config_urls.chromium_accumulated_results_url_base_for_builder(sel f._name)
65 55
66 def latest_layout_test_results_url(self):
67 return self.accumulated_results_url() or self.latest_cached_build().resu lts_url()
68
69 @memoized 56 @memoized
70 def latest_layout_test_results(self): 57 def latest_layout_test_results(self):
71 return self.fetch_layout_test_results(self.latest_layout_test_results_ur l()) 58 return self.fetch_layout_test_results(self.latest_layout_test_results_ur l())
72 59
73 def _fetch_file_from_results(self, results_url, file_name): 60 def _fetch_file_from_results(self, results_url, file_name):
74 # It seems this can return None if the url redirects and then returns 40 4. 61 # It seems this can return None if the url redirects and then returns 40 4.
75 result = urllib2.urlopen("%s/%s" % (results_url, file_name)) 62 result = urllib2.urlopen("%s/%s" % (results_url, file_name))
76 if not result: 63 if not result:
77 return None 64 return None
78 # urlopen returns a file-like object which sometimes works fine with str () 65 # urlopen returns a file-like object which sometimes works fine with str ()
79 # but sometimes is a addinfourl object. In either case calling read() i s correct. 66 # but sometimes is a addinfourl object. In either case calling read() i s correct.
80 return result.read() 67 return result.read()
81 68
82 def fetch_layout_test_results(self, results_url): 69 def fetch_layout_test_results(self, results_url):
83 # FIXME: This should cache that the result was a 404 and stop hitting th e network. 70 # FIXME: This should cache that the result was a 404 and stop hitting th e network.
84 results_file = NetworkTransaction(convert_404_to_None=True).run( 71 results_file = NetworkTransaction(convert_404_to_None=True).run(
85 lambda: self._fetch_file_from_results(results_url, "failing_results. json")) 72 lambda: self._fetch_file_from_results(results_url, "failing_results. json"))
86 revision = NetworkTransaction(convert_404_to_None=True).run( 73 revision = NetworkTransaction(convert_404_to_None=True).run(
87 lambda: self._fetch_file_from_results(results_url, "LAST_CHANGE")) 74 lambda: self._fetch_file_from_results(results_url, "LAST_CHANGE"))
88 if not revision: 75 if not revision:
89 results_file = None 76 results_file = None
90 return LayoutTestResults.results_from_string(results_file, revision) 77 return LayoutTestResults.results_from_string(results_file, revision)
91 78
92 def url_encoded_name(self):
93 return urllib.quote(self._name)
94
95 def url(self):
96 buildbot_url = config_urls.chromium_buildbot_url(self.master_name())
97 return "%s/builders/%s" % (buildbot_url, self.url_encoded_name())
98
99 # This provides a single place to mock
100 def _fetch_build(self, build_number):
101 build_dictionary = self._buildbot.fetch_build_dictionary(self, build_num ber)
102 if not build_dictionary:
103 return None
104 revision_string = build_dictionary['sourceStamp'].get('revision')
105 return Build(self,
106 build_number=int(build_dictionary['number']),
107 # 'revision' may be None if a trunk build was started by th e force-build button on the web page.
108 revision=(int(revision_string) if revision_string else None ),
109 # Buildbot uses any number other than 0 to mean fail. Sinc e we fetch with
110 # filter=1, passing builds may contain no 'results' value.
111 is_green=(not build_dictionary.get('results')))
112
113 def build(self, build_number): 79 def build(self, build_number):
114 if not build_number: 80 return Build(self, build_number=build_number)
115 return None
116 cached_build = self._builds_cache.get(build_number)
117 if cached_build:
118 return cached_build
119
120 build = self._fetch_build(build_number)
121 self._builds_cache[build_number] = build
122 return build
123
124 def latest_cached_build(self):
125 revision_build_pairs = self.revision_build_pairs_with_results()
126 revision_build_pairs.sort(key=lambda i: i[1])
127 latest_build_number = revision_build_pairs[-1][1]
128 return self.build(latest_build_number)
129
130 file_name_regexp = re.compile(r"r(?P<revision>\d+) \((?P<build_number>\d+)\) ")
131
132 def _revision_and_build_for_filename(self, filename):
133 # Example: "r47483 (1)/" or "r47483 (1).zip"
134 match = self.file_name_regexp.match(filename)
135 if not match:
136 return None
137 return (int(match.group("revision")), int(match.group("build_number")))
138
139 def _fetch_revision_to_build_map(self):
140 # All _fetch requests go through _buildbot for easier mocking
141 # FIXME: This should use NetworkTransaction's 404 handling instead.
142 try:
143 # FIXME: This method is horribly slow due to the huge network load.
144 # FIXME: This is a poor way to do revision -> build mapping.
145 # Better would be to ask buildbot through some sort of API.
146 _log.info("Loading revision/build list from %s." % self.results_url( ))
147 _log.info("This may take a while...")
148 result_files = self._buildbot._fetch_twisted_directory_listing(self. results_url())
149 except urllib2.HTTPError as error:
150 if error.code != 404:
151 raise
152 _log.debug("Revision/build list failed to load.")
153 result_files = []
154 return dict(self._file_info_list_to_revision_to_build_list(result_files) )
155
156 def _file_info_list_to_revision_to_build_list(self, file_info_list):
157 # This assumes there was only one build per revision, which is false but we don't care for now.
158 revisions_and_builds = []
159 for file_info in file_info_list:
160 revision_and_build = self._revision_and_build_for_filename(file_info ["filename"])
161 if revision_and_build:
162 revisions_and_builds.append(revision_and_build)
163 return revisions_and_builds
164
165 def _revision_to_build_map(self):
166 if not self._revision_to_build_number:
167 self._revision_to_build_number = self._fetch_revision_to_build_map()
168 return self._revision_to_build_number
169
170 def revision_build_pairs_with_results(self):
171 return self._revision_to_build_map().items()
172
173 # This assumes there can be only one build per revision, which is false, but we don't care for now.
174 def build_for_revision(self, revision, allow_failed_lookups=False):
175 # NOTE: This lookup will fail if that exact revision was never built.
176 build_number = self._revision_to_build_map().get(int(revision))
177 if not build_number:
178 return None
179 build = self.build(build_number)
180 if not build and allow_failed_lookups:
181 # Builds for old revisions with fail to lookup via buildbot's json a pi.
182 build = Build(self,
183 build_number=build_number,
184 revision=revision,
185 is_green=False)
186 return build
187 81
188 82
189 class Build(object): 83 class Build(object):
190 84
191 def __init__(self, builder, build_number, revision, is_green): 85 def __init__(self, builder, build_number):
192 self._builder = builder 86 self._builder = builder
193 self._number = build_number 87 self._number = build_number
194 self._revision = revision
195 self._is_green = is_green
196
197 @staticmethod
198 def build_url(builder, build_number):
199 return "%s/builds/%s" % (builder.url(), build_number)
200
201 def url(self):
202 return self.build_url(self.builder(), self._number)
203 88
204 def results_url(self): 89 def results_url(self):
205 return "%s/%s/layout-test-results" % (self._builder.results_url(), self. _number) 90 return "%s/%s/layout-test-results" % (self._builder.results_url(), self. _number)
206 91
207 def results_zip_url(self):
208 return "%s.zip" % self.results_url()
209
210 def builder(self): 92 def builder(self):
211 return self._builder 93 return self._builder
212 94
213 def revision(self):
214 return self._revision
215
216 def is_green(self):
217 return self._is_green
218
219 def previous_build(self):
220 # previous_build() allows callers to avoid assuming build numbers are se quential.
221 # They may not be sequential across all master changes, or when non-trun k builds are made.
222 return self._builder.build(self._number - 1)
223
224 95
225 class BuildBot(object): 96 class BuildBot(object):
226 97
227 def __init__(self): 98 def builder_with_name(self, builder_name):
228 self._builder_by_name = {} 99 return Builder(builder_name, self)
229
230 def _parse_last_build_cell(self, builder, cell):
231 status_link = cell.find('a')
232 if status_link:
233 # Will be either a revision number or a build number
234 revision_string = status_link.string
235 # If revision_string has non-digits assume it's not a revision numbe r.
236 builder['built_revision'] = int(revision_string) if not re.match(r'\ D', revision_string) else None
237
238 # FIXME: We treat slave lost as green even though it is not to
239 # work around the Qts bot being on a broken internet connection.
240 # The real fix is https://bugs.webkit.org/show_bug.cgi?id=37099
241 builder['is_green'] = not re.search('fail', cell.renderContents()) o r \
242 not not re.search('lost', cell.renderContents())
243
244 status_link_regexp = r"builders/(?P<builder_name>.*)/builds/(?P<buil d_number>\d+)"
245 link_match = re.match(status_link_regexp, status_link['href'])
246 builder['build_number'] = int(link_match.group("build_number"))
247 else:
248 # We failed to find a link in the first cell, just give up. This
249 # can happen if a builder is just-added, the first cell will just
250 # be "no build"
251 # Other parts of the code depend on is_green being present.
252 builder['is_green'] = False
253 builder['built_revision'] = None
254 builder['build_number'] = None
255
256 def _parse_current_build_cell(self, builder, cell):
257 activity_lines = cell.renderContents().split("<br />")
258 builder["activity"] = activity_lines[0] # normally "building" or "idle"
259 # The middle lines document how long left for any current builds.
260 match = re.match(r'(?P<pending_builds>\d) pending', activity_lines[-1])
261 builder["pending_builds"] = int(match.group("pending_builds")) if match else 0
262
263 def _parse_builder_status_from_row(self, status_row):
264 status_cells = status_row.findAll('td')
265 builder = {}
266
267 # First cell is the name
268 name_link = status_cells[0].find('a')
269 builder["name"] = unicode(name_link.string)
270
271 self._parse_last_build_cell(builder, status_cells[1])
272 self._parse_current_build_cell(builder, status_cells[2])
273 return builder
274
275 def _matches_regexps(self, builder_name, name_regexps):
276 for name_regexp in name_regexps:
277 if re.match(name_regexp, builder_name):
278 return True
279 return False
280
281 # FIXME: These _fetch methods should move to a networking class.
282 @staticmethod
283 def fetch_build_dictionary(builder, build_number):
284 # Note: filter=1 will remove None and {} and '', which cuts noise but ca n
285 # cause keys to be missing which you might otherwise expect.
286 # FIXME: The bot sends a *huge* amount of data for each request, we shou ld
287 # find a way to reduce the response size further.
288 buildbot_url = config_urls.chromium_buildbot_url(builder.master_name())
289 json_url = "%s/json/builders/%s/builds/%s?filter=1" % (buildbot_url, url lib.quote(builder.name()), build_number)
290 try:
291 return json.load(urllib2.urlopen(json_url))
292 except urllib2.URLError as err:
293 build_url = Build.build_url(builder, build_number)
294 _log.error("Error fetching data for %s build %s (%s, json: %s): %s" %
295 (builder.name(), build_number, build_url, json_url, err))
296 return None
297 except ValueError as err:
298 build_url = Build.build_url(builder, build_number)
299 _log.error("Error decoding json data from %s: %s" % (build_url, err) )
300 return None
301
302 def _file_cell_text(self, file_cell):
303 """Traverses down through firstChild elements until one containing a str ing is found, then returns that string"""
304 element = file_cell
305 while element.string is None and element.contents:
306 element = element.contents[0]
307 return element.string
308
309 def _parse_twisted_file_row(self, file_row):
310 string_or_empty = lambda string: unicode(string) if string else u""
311 file_cells = file_row.findAll('td')
312 return {
313 "filename": string_or_empty(self._file_cell_text(file_cells[0])),
314 "size": string_or_empty(self._file_cell_text(file_cells[1])),
315 "type": string_or_empty(self._file_cell_text(file_cells[2])),
316 "encoding": string_or_empty(self._file_cell_text(file_cells[3])),
317 }
318
319 def _parse_twisted_directory_listing(self, page):
320 soup = BeautifulSoup(page)
321 # HACK: Match only table rows with a class to ignore twisted header/foot er rows.
322 file_rows = soup.find('table').findAll('tr', {'class': re.compile(r'\b(? :directory|file)\b')})
323 return [self._parse_twisted_file_row(file_row) for file_row in file_rows ]
324
325 # FIXME: There should be a better way to get this information directly from twisted.
326 def _fetch_twisted_directory_listing(self, url):
327 return self._parse_twisted_directory_listing(urllib2.urlopen(url))
328
329 def builders(self):
330 return [self.builder_with_name(status["name"]) for status in self.builde r_statuses()]
331
332 def builder_statuses(self):
333 buildbot_url = config_urls.chromium_buildbot_url('chromium.webkit')
334 builders_page_url = "%s/builders" % buildbot_url
335 builders_page_content = urllib2.urlopen(builders_page_url)
336 soup = BeautifulSoup(builders_page_content)
337 return [self._parse_builder_status_from_row(status_row) for status_row i n soup.find('table').findAll('tr')]
338
339 def builder_with_name(self, builder_name, master_name='chromium.webkit'):
340 builder = self._builder_by_name.get(builder_name)
341 if not builder:
342 builder = Builder(builder_name, self, master_name=master_name)
343 self._builder_by_name[builder_name] = builder
344 return builder
345
346 def _latest_builds_from_builders(self):
347 """Fetches a list of latest builds.
348
349 This makes fewer requests than calling Builder.latest_build would.
350 It grabs all builder statuses in one request by fetching from .../builde rs
351 instead of builder pages.
352 """
353 builder_statuses = self.builder_statuses()
354 return [self.builder_with_name(status["name"]).build(status["build_numbe r"]) for status in builder_statuses]
355
356 def _build_at_or_before_revision(self, build, revision):
357 while build:
358 if build.revision() <= revision:
359 return build
360 build = build.previous_build()
361
362 def _fetch_builder_page(self, builder):
363 buildbot_url = config_urls.chromium_buildbot_url('chromium.webkit')
364 builder_page_url = "%s/builders/%s?numbuilds=100" % (buildbot_url, urlli b2.quote(builder.name()))
365 return urllib2.urlopen(builder_page_url)
366
367 def _revisions_for_builder(self, builder):
368 soup = BeautifulSoup(self._fetch_builder_page(builder))
369 revisions = []
370 for status_row in soup.find('table').findAll('tr'):
371 revision_anchor = status_row.find('a')
372 table_cells = status_row.findAll('td')
373 if not table_cells or len(table_cells) < 3 or not table_cells[2].str ing:
374 continue
375 if revision_anchor and revision_anchor.string and re.match(r'^\d+$', revision_anchor.string):
376 revisions.append((int(revision_anchor.string), 'success' in tabl e_cells[2].string))
377 return revisions
378
379 def _find_green_revision(self, builder_revisions):
380 revision_statuses = {}
381 for builder in builder_revisions:
382 for revision, succeeded in builder_revisions[builder]:
383 revision_statuses.setdefault(revision, set())
384 if succeeded and revision_statuses[revision] is not None:
385 revision_statuses[revision].add(builder)
386 else:
387 revision_statuses[revision] = None
388
389 # In descending order, look for a revision X with successful builds
390 # Once we found X, check if remaining builders succeeded in the neighbor hood of X.
391 revisions_in_order = sorted(revision_statuses.keys(), reverse=True)
392 for i, revision in enumerate(revisions_in_order):
393 if not revision_statuses[revision]:
394 continue
395
396 builders_succeeded_in_future = set()
397 for future_revision in sorted(revisions_in_order[:i + 1]):
398 if not revision_statuses[future_revision]:
399 break
400 builders_succeeded_in_future = builders_succeeded_in_future.unio n(revision_statuses[future_revision])
401
402 builders_succeeded_in_past = set()
403 for past_revision in revisions_in_order[i:]:
404 if not revision_statuses[past_revision]:
405 break
406 builders_succeeded_in_past = builders_succeeded_in_past.union(re vision_statuses[past_revision])
407
408 if len(builders_succeeded_in_future) == len(builder_revisions) and l en(
409 builders_succeeded_in_past) == len(builder_revisions):
410 return revision
411 return None
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698