Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: tools/perf/dashboard/buildbot.py

Issue 976823003: [telemetry] DO NOT SUBMIT: WIP: Helper modules for querying buildbot perf data. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | tools/perf/dashboard/download.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 import collections
2 import cPickle
3 import json
4 import logging
5 import os
6 import re
7 import socket
8 import time
9 import urllib
10 import urllib2
11
12
13 PENDING = None
14 SUCCESS = 0
15 WARNING = 1
16 FAILURE = 2
17 EXCEPTION = 4
18 SLAVE_LOST = 5
19
20
21 BASE_URL = 'http://build.chromium.org/p'
22 CACHE_FILE_NAME = 'cache.dat'
23
24
25 StackTraceLine = collections.namedtuple(
26 'StackTraceLine', ('file', 'function', 'line', 'source'))
27
28
29 def _FetchData(master, url):
30 url = '%s/%s/json/%s' % (BASE_URL, master, url)
31 try:
32 logging.info('Retrieving ' + url)
33 return json.load(urllib2.urlopen(url))
34 except (urllib2.HTTPError, socket.error):
35 # Could be intermittent; try again.
36 try:
37 return json.load(urllib2.urlopen(url))
38 except (urllib2.HTTPError, socket.error):
39 logging.error('Error retrieving URL ' + url)
40 raise
41 except:
42 logging.error('Error retrieving URL ' + url)
43 raise
44
45
46 def Builders(master):
47 builders = {}
48
49 # Load builders from cache file.
50 if os.path.exists(master):
51 start_time = time.time()
52 for builder_name in os.listdir(master):
53 cache_file_path = os.path.join(master, builder_name, CACHE_FILE_NAME)
54 if os.path.exists(cache_file_path):
55 with open(cache_file_path, 'rb') as cache_file:
56 try:
57 builders[builder_name] = cPickle.load(cache_file)
58 except EOFError:
59 logging.error('File is corrupted: %s', cache_file_path)
60 raise
61 logging.info('Loaded builder caches in %0.2f seconds.',
62 time.time() - start_time)
63
64 return builders
65
66
67 def Update(master, builders):
68 # Update builders with latest information.
69 builder_data = _FetchData(master, 'builders')
70 for builder_name, builder_info in builder_data.iteritems():
71 if builder_name in builders:
72 builders[builder_name].Update(builder_info)
73 else:
74 builders[builder_name] = Builder(master, builder_name, builder_info)
75
76 return builders
77
78
79 class Builder(object):
80 def __init__(self, master, name, data):
81 self._master = master
82 self._name = name
83
84 self.Update(data)
85
86 self._builds = {}
87
88 def __setstate__(self, state):
89 self.__dict__ = state # pylint: disable=attribute-defined-outside-init
90 if not hasattr(self, '_builds'):
91 self._builds = {}
92
93 def __lt__(self, other):
94 return self.name < other.name
95
96 def __str__(self):
97 return self.name
98
99 def __getitem__(self, key):
100 if not isinstance(key, int):
101 raise TypeError('build numbers must be integers, not %s' %
102 type(key).__name__)
103
104 self._FetchBuilds(key)
105 return self._builds[key]
106
107 def _FetchBuilds(self, *build_numbers):
108 """Download build details, if not already cached.
109
110 Returns:
111 A tuple of downloaded build numbers.
112 """
113 build_numbers = tuple(build_number for build_number in build_numbers
114 if not (build_number in self._builds and
115 self._builds[build_number].complete))
116 if not build_numbers:
117 return ()
118
119 for build_number in build_numbers:
120 if build_number < 0:
121 raise ValueError('Invalid build number: %d' % build_number)
122
123 build_query = urllib.urlencode(
124 [('select', build) for build in build_numbers])
125 url = 'builders/%s/builds/?%s' % (urllib.quote(self.name), build_query)
126 builds = _FetchData(self.master, url)
127 for build_info in builds.itervalues():
128 self._builds[build_info['number']] = Build(self.master, build_info)
129
130 self._Cache()
131
132 return build_numbers
133
134 def FetchRecentBuilds(self, number_of_builds):
135 min_build = max(self.last_build - number_of_builds, -1)
136 return self._FetchBuilds(*xrange(self.last_build, min_build, -1))
137
138 def Update(self, data=None):
139 if not data:
140 data = _FetchData(self.master, 'builders/%s' % urllib.quote(self.name))
141 self._state = data['state']
142 self._pending_build_count = data['pendingBuilds']
143 self._current_builds = tuple(data['currentBuilds'])
144 self._cached_builds = tuple(data['cachedBuilds'])
145 self._slaves = tuple(data['slaves'])
146
147 self._Cache()
148
149 def _Cache(self):
150 cache_dir_path = os.path.join(self.master, self.name)
151 if not os.path.exists(cache_dir_path):
152 os.makedirs(cache_dir_path)
153 cache_file_path = os.path.join(cache_dir_path, CACHE_FILE_NAME)
154 with open(cache_file_path, 'wb') as cache_file:
155 cPickle.dump(self, cache_file, -1)
156
157 def LastBuilds(self, count):
158 min_build = max(self.last_build - count, -1)
159 for build_number in xrange(self.last_build, min_build, -1):
160 yield self._builds[build_number]
161
162 @property
163 def master(self):
164 return self._master
165
166 @property
167 def name(self):
168 return self._name
169
170 @property
171 def state(self):
172 return self._state
173
174 @property
175 def pending_build_count(self):
176 return self._pending_build_count
177
178 @property
179 def current_builds(self):
180 """List of build numbers currently building.
181
182 There may be multiple entries if there are multiple build slaves."""
183 return self._current_builds
184
185 @property
186 def cached_builds(self):
187 """Builds whose data are visible on the master in increasing order.
188
189 More builds may be available than this."""
190 return self._cached_builds
191
192 @property
193 def last_build(self):
194 """Last completed build."""
195 for build_number in reversed(self.cached_builds):
196 if build_number not in self.current_builds:
197 return build_number
198 return None
199
200 @property
201 def slaves(self):
202 return self._slaves
203
204
205 class Build(object):
206 def __init__(self, master, data):
207 self._master = master
208 self._builder_name = data['builderName']
209 self._number = data['number']
210 self._complete = not ('currentStep' in data and data['currentStep'])
211 self._start_time, self._end_time = data['times']
212
213 self._steps = {step_info['name']:
214 Step(self._master, self._builder_name, self._number, step_info)
215 for step_info in data['steps']}
216
217 def __str__(self):
218 return str(self.number)
219
220 def __lt__(self, other):
221 return self.number < other.number
222
223 @property
224 def builder_name(self):
225 return self._builder_name
226
227 @property
228 def number(self):
229 return self._number
230
231 @property
232 def complete(self):
233 return self._complete
234
235 @property
236 def start_time(self):
237 return self._start_time
238
239 @property
240 def end_time(self):
241 return self._end_time
242
243 @property
244 def steps(self):
245 return self._steps
246
247
248 def _ParseTraceFromLog(log):
249 """Search the log for a stack trace and return a structured representation.
250
251 This function supports both default Python-style stacks and Telemetry-style
252 stacks. It returns the first stack trace found in the log - sometimes a bug
253 leads to a cascade of failures, so the first one is usually the root cause.
254 """
255 log_iterator = iter(log.splitlines())
256 for line in log_iterator:
257 if line == 'Traceback (most recent call last):':
258 break
259 else:
260 return (None, None)
261
262 stack_trace = []
263 while True:
264 line = log_iterator.next()
265 match1 = re.match(r'\s*File "(?P<file>.+)", line (?P<line>[0-9]+), '
266 'in (?P<function>.+)', line)
267 match2 = re.match(r'\s*(?P<function>.+) at '
268 '(?P<file>.+):(?P<line>[0-9]+)', line)
269 match = match1 or match2
270 if not match:
271 exception = line
272 break
273 trace_line = match.groupdict()
274 # Use the base name, because the path will be different
275 # across platforms and configurations.
276 file_base_name = trace_line['file'].split('/')[-1].split('\\')[-1]
277 source = log_iterator.next().strip()
278 stack_trace.append(StackTraceLine(
279 file_base_name, trace_line['function'], trace_line['line'], source))
280
281 return tuple(stack_trace), exception
282
283
284 class Step(object):
285 def __init__(self, master, builder_name, build_number, data):
286 self._master = master
287 self._builder_name = builder_name
288 self._build_number = build_number
289 self._name = data['name']
290 self._result = data['results'][0]
291 self._start_time, self._end_time = data['times']
292
293 self._log_link = None
294 self._results_link = None
295 for link_name, link_url in data['logs']:
296 if link_name == 'stdio':
297 self._log_link = link_url + '/text'
298 elif link_name == 'json.output':
299 self._results_link = link_url + '/text'
300
301 self._log = None
302 self._results = None
303 self._stack_trace = None
304
305 def __getstate__(self):
306 return {
307 '_master': self._master,
308 '_builder_name': self._builder_name,
309 '_build_number': self._build_number,
310 '_name': self._name,
311 '_result': self._result,
312 '_start_time': self._start_time,
313 '_end_time': self._end_time,
314 '_log_link': self._log_link,
315 '_results_link': self._results_link,
316 }
317
318 def __setstate__(self, state):
319 self.__dict__ = state # pylint: disable=attribute-defined-outside-init
320 self._log = None
321 self._results = None
322 self._stack_trace = None
323
324 def __str__(self):
325 return self.name
326
327 @property
328 def name(self):
329 return self._name
330
331 @property
332 def result(self):
333 return self._result
334
335 @property
336 def start_time(self):
337 return self._start_time
338
339 @property
340 def end_time(self):
341 return self._end_time
342
343 @property
344 def log_link(self):
345 return self._log_link
346
347 @property
348 def results_link(self):
349 return self._results_link
350
351 @property
352 def log(self):
353 if self._log is None:
354 if not self.log_link:
355 return None
356 cache_file_path = os.path.join(
357 self._master, self._builder_name,
358 str(self._build_number), self._name, 'log')
359 if os.path.exists(cache_file_path):
360 # Load cache file, if it exists.
361 with open(cache_file_path, 'rb') as cache_file:
362 self._log = cache_file.read()
363 else:
364 # Otherwise, download it.
365 logging.info('Retrieving ' + self.log_link)
366 try:
367 data = urllib2.urlopen(self.log_link).read()
368 except (urllib2.HTTPError, socket.error):
369 # Could be intermittent; try again.
370 try:
371 data = urllib2.urlopen(self.log_link).read()
372 except (urllib2.HTTPError, socket.error):
373 logging.error('Error retrieving URL ' + self.log_link)
374 raise
375 except:
376 logging.error('Error retrieving URL ' + self.log_link)
377 raise
378 # And cache the newly downloaded data.
379 cache_dir_path = os.path.dirname(cache_file_path)
380 if not os.path.exists(cache_dir_path):
381 os.makedirs(cache_dir_path)
382 with open(cache_file_path, 'wb') as cache_file:
383 cache_file.write(data)
384 self._log = data
385 return self._log
386
387 @property
388 def results(self):
389 if self._results is None:
390 if not self.results_link:
391 return None
392 cache_file_path = os.path.join(
393 self._master, self._builder_name,
394 str(self._build_number), self._name, 'results')
395 if os.path.exists(cache_file_path):
396 # Load cache file, if it exists.
397 try:
398 with open(cache_file_path, 'rb') as cache_file:
399 self._results = cPickle.load(cache_file)
400 except EOFError:
401 os.remove(cache_file_path)
402 return self.results
403 else:
404 # Otherwise, download it.
405 logging.info('Retrieving ' + self.results_link)
406 try:
407 data = json.load(urllib2.urlopen(self.results_link))
408 except (urllib2.HTTPError, socket.error):
409 # Could be intermittent; try again.
410 try:
411 data = json.load(urllib2.urlopen(self.results_link))
412 except (urllib2.HTTPError, socket.error):
413 logging.error('Error retrieving URL ' + self.results_link)
414 raise
415 except ValueError:
416 # If the build had an exception, the results might not be valid.
417 data = None
418 except:
419 logging.error('Error retrieving URL ' + self.results_link)
420 raise
421 # And cache the newly downloaded data.
422 cache_dir_path = os.path.dirname(cache_file_path)
423 if not os.path.exists(cache_dir_path):
424 os.makedirs(cache_dir_path)
425 with open(cache_file_path, 'wb') as cache_file:
426 cPickle.dump(data, cache_file, -1)
427 self._results = data
428 return self._results
429
430 @property
431 def stack_trace(self):
432 if self._stack_trace is None:
433 self._stack_trace = _ParseTraceFromLog(self.log)
434 return self._stack_trace
435
436 @property
437 def chrome_stack_trace(self):
438 raise NotImplementedError()
OLDNEW
« no previous file with comments | « no previous file | tools/perf/dashboard/download.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698