OLD | NEW |
1 # coding=utf8 | 1 # coding=utf8 |
2 # Copyright (c) 2011 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2011 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 """Sends patches to the Try server and reads back results. | 5 """Sends patches to the Try server and reads back results. |
6 """ | 6 """ |
7 | 7 |
8 import datetime | 8 import datetime |
9 import logging | 9 import logging |
10 import urllib2 | 10 import urllib2 |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
94 self.bad_revisions = None | 94 self.bad_revisions = None |
95 | 95 |
96 def update_all(self): | 96 def update_all(self): |
97 """Grabs the status of all try jobs for all builders on try server's | 97 """Grabs the status of all try jobs for all builders on try server's |
98 cache. | 98 cache. |
99 | 99 |
100 Does one request per builder since it's quite heavy weight. | 100 Does one request per builder since it's quite heavy weight. |
101 """ | 101 """ |
102 # TODO(maruel): Use listening StatusPush instead of hammering the try | 102 # TODO(maruel): Use listening StatusPush instead of hammering the try |
103 # server. | 103 # server. |
104 logging.info('Fetching all try jobs status') | |
105 for builder in self.builders: | 104 for builder in self.builders: |
106 try: | 105 try: |
107 new_jobs = self._urlreadasjson('builders/%s/builds/_all' % builder) | 106 new_jobs = self._urlreadasjson('builders/%s/builds/_all' % builder) |
108 logging.debug('Received %d builds from %s' % (len(new_jobs), builder)) | 107 logging.debug('Received %d builds from %s' % (len(new_jobs), builder)) |
109 self.jobs[builder].update(new_jobs) | 108 self.jobs[builder].update(new_jobs) |
110 # Only keep the last 1000. | 109 # Only keep the last 1000. |
111 if len(self.jobs[builder]) > 1000: | 110 if len(self.jobs[builder]) > 1000: |
112 keys = sorted(self.jobs[builder].keys()) | 111 keys = sorted(self.jobs[builder].keys()) |
113 for key in keys[:-1000]: | 112 for key in keys[:-1000]: |
114 del self.jobs[builder][key] | 113 del self.jobs[builder][key] |
115 except urllib2.HTTPError, e: | 114 except urllib2.HTTPError, e: |
116 if e.code != 503: | 115 if e.code != 503: |
117 raise | 116 raise |
118 # The try server is rebooting. We'll get more data once the try server | 117 # The try server is rebooting. We'll get more data once the try server |
119 # is back. | 118 # is back. |
120 | 119 |
121 def get_job(self, job, incremental): | 120 def get_job(self, job, incremental): |
122 """Gets the build status for a try job.""" | 121 """Gets the build status for a try job.""" |
123 if incremental: | 122 if incremental or unicode(job.build) not in self.jobs[job.builder]: |
124 url = 'builders/%s/builds/%s' % (job.builder, job.build) | 123 url = 'builders/%s/builds/%s' % (job.builder, job.build) |
125 self.jobs[job.builder][unicode(job.build)] = self._urlreadasjson(str(url)) | 124 try: |
| 125 self.jobs[job.builder][unicode(job.build)] = self._urlreadasjson( |
| 126 str(url)) |
| 127 except urllib2.HTTPError: |
| 128 # The try job is way too old. |
| 129 return None |
126 return self.jobs[job.builder][unicode(job.build)] | 130 return self.jobs[job.builder][unicode(job.build)] |
127 | 131 |
128 def find_job(self, job, previous_jobs, reason): | 132 def find_job(self, job, previous_jobs, reason): |
129 """Finds a build status dict corresponding to a job.""" | 133 """Finds a build status dict corresponding to a job.""" |
130 for build_number, build in self.jobs[job.builder].iteritems(): | 134 for build_number, build in self.jobs[job.builder].iteritems(): |
131 def get_rev(): | 135 def get_rev(): |
132 return build.get(u'sourceStamp', {}).get(u'changes', [{}])[0].get( | 136 return build.get(u'sourceStamp', {}).get(u'changes', [{}])[0].get( |
133 u'revision', None) | 137 u'revision', None) |
134 | 138 |
135 if (build.get(u'reason', None) == reason and | 139 if (build.get(u'reason', None) == reason and |
136 int(build_number) not in previous_jobs and | 140 int(build_number) not in previous_jobs and |
137 unicode(get_rev()) == unicode(job.revision)): | 141 unicode(get_rev()) == unicode(job.revision)): |
138 if build: | 142 if build: |
139 # Note the build number to remember it started. | 143 # Note the build number to remember it started. |
140 job.build = build_number | 144 job.build = build_number |
141 return build | 145 return build |
142 | 146 |
143 def update_known_revisions(self): | 147 def update_known_revisions(self): |
144 # Look for any revision that passed all jobs, patch or not. That is not | 148 # Look for any revision that passed all jobs, patch or not. That is not |
145 # exactly a valid check but much more useful in practice. | 149 # exactly a valid check but much more useful in practice. |
146 if self.good_revisions is None: | 150 if self.good_revisions is None: |
147 self.good_revisions = set() | 151 self.good_revisions = set() |
148 self.bad_revisions = set() | 152 self.bad_revisions = set() |
149 successes = [ | 153 successes = [] |
150 set(job[u'sourceStamp'][u'changes'][0][u'revision'] | 154 for jobs in self.jobs.itervalues(): |
151 for job in jobs.itervalues() | 155 successes.append(set()) |
152 if job.get(u'results') in (SUCCESS, WARNINGS, SKIPPED)) | 156 for job in jobs.itervalues(): |
153 for jobs in self.jobs.itervalues()] | 157 if (job.get(u'results', None) in (SUCCESS, WARNINGS, SKIPPED) and |
| 158 len(job.get(u'sourceStamp', {}).get(u'changes', [])) >= 1): |
| 159 successes[-1].add(job[u'sourceStamp'][u'changes'][0][u'revision']) |
154 good_revisions = reduce(lambda x, y: x & y, successes) | 160 good_revisions = reduce(lambda x, y: x & y, successes) |
155 new_good_revisions = good_revisions - self.good_revisions | 161 new_good_revisions = good_revisions - self.good_revisions |
156 if new_good_revisions: | 162 if new_good_revisions: |
157 logging.info( | 163 logging.info( |
158 'New good revisions: %s' % ', '.join(map(str, new_good_revisions))) | 164 'New good revisions: %s' % ', '.join(map(str, new_good_revisions))) |
159 self.good_revisions |= new_good_revisions | 165 self.good_revisions |= new_good_revisions |
160 | 166 |
161 # Look for any revision that failed on a try job without patch. Mark it as | 167 # Look for any revision that failed on a try job without patch. Mark it as |
162 # bad. That has bad side effects with regard to flaky tests. | 168 # bad. That has bad side effects with regard to flaky tests. |
163 failures = [ | 169 failures = [ |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
264 '--bot', ','.join(self.builders), | 270 '--bot', ','.join(self.builders), |
265 '--revision', str(revision), | 271 '--revision', str(revision), |
266 '--name', job_name, | 272 '--name', job_name, |
267 '--user', self.commit_user.split('@', 1)[0]] | 273 '--user', self.commit_user.split('@', 1)[0]] |
268 cmd.extend(extra_args or []) | 274 cmd.extend(extra_args or []) |
269 cmd.extend(self.extra_flags) | 275 cmd.extend(self.extra_flags) |
270 if self.tests: | 276 if self.tests: |
271 cmd.extend(('--testfilter', ','.join(self.tests))) | 277 cmd.extend(('--testfilter', ','.join(self.tests))) |
272 if isinstance(emails, (list, tuple)): | 278 if isinstance(emails, (list, tuple)): |
273 emails = ','.join(emails) | 279 emails = ','.join(emails) |
| 280 if isinstance(emails, unicode): |
| 281 emails = str(emails) |
274 if isinstance(emails, str): | 282 if isinstance(emails, str): |
275 cmd.extend(('--email', emails)) | 283 cmd.extend(('--email', emails)) |
276 else: | 284 else: |
277 assert emails is None | 285 assert emails is None, repr(emails) |
278 trychange.TryChange( | 286 trychange.TryChange( |
279 cmd, | 287 cmd, |
280 file_list=[], | 288 file_list=[], |
281 swallow_exception=True) | 289 swallow_exception=True) |
282 | 290 |
283 def _can_update_incrementally(self, queue): | 291 def _can_update_incrementally(self, queue): |
284 """Determines if the slow code path must be used. | 292 """Determines if the slow code path must be used. |
285 | 293 |
286 It's slow on the try server, not here. | 294 It's slow on the try server, not here. |
287 """ | 295 """ |
288 if self.status.good_revisions is None: | 296 if self.status.good_revisions is None: |
| 297 logging.info('Fetching all try jobs status because of good_revisions') |
289 return False | 298 return False |
290 # Fetching the build status for all try jobs is expensive, so we try to | 299 # Fetching the build status for all try jobs is expensive, so we try to |
291 # fetch the status just for the jobs we care about. We need the full set | 300 # fetch the status just for the jobs we care about. We need the full set |
292 # only when we have newly pending commits and we don't know their try job | 301 # only when we have newly pending commits and we don't know their try job |
293 # build numbers. TODO(maruel): revisit when reitveld gives us better APIs | 302 # build numbers. TODO(maruel): revisit when reitveld gives us better APIs |
294 # to get the build numbers from rietveld instead from the try server. | 303 # to get the build numbers from rietveld instead from the try server. |
295 for _, jobs in self.loop(queue, TryJobs): | 304 for pending, jobs in self.loop(queue, TryJobs): |
296 if not jobs.try_jobs or jobs.get_state() != base.PROCESSING: | 305 if not jobs.try_jobs or jobs.get_state() != base.PROCESSING: |
297 continue | 306 continue |
298 if any(True for job in jobs.try_jobs if job.build is None): | 307 if any(True for job in jobs.try_jobs if job.build is None): |
299 # We need to regenerate the whole data anyway. | 308 # We need to regenerate the whole data anyway. |
| 309 logging.info( |
| 310 'Fetching all try jobs status because of %s' % |
| 311 pending.pending_name()) |
300 return False | 312 return False |
301 return True | 313 return True |
302 | 314 |
303 def _find_job(self, pending, jobs, job): | 315 def _find_job(self, pending, jobs, job): |
304 """Searches on the try server if the try job for |job| has started.""" | 316 """Searches on the try server if the try job for |job| has started.""" |
305 reason = u'\'%s: %s\' try job' % ( | 317 reason = u'\'%s: %s\' try job' % ( |
306 self.commit_user.split('@', 1)[0], pending.pending_name()) | 318 self.commit_user.split('@', 1)[0], pending.pending_name()) |
307 # Search for try jobs already sent for this issue. This happens if a | 319 # Search for try jobs already sent for this issue. This happens if a |
308 # the commit bit was set, the try job fails because of a sick slave | 320 # the commit bit was set, the try job fails because of a sick slave |
309 # and the user checks it again. | 321 # and the user checks it again. |
(...skipping 20 matching lines...) Expand all Loading... |
330 job.builder, | 342 job.builder, |
331 self.status.build_status_url(job))) | 343 self.status.build_status_url(job))) |
332 jobs.error_message = msg | 344 jobs.error_message = msg |
333 else: | 345 else: |
334 msg = (u'Try job status for %s on %s: %s\n%s' % ( | 346 msg = (u'Try job status for %s on %s: %s\n%s' % ( |
335 pending.pending_name(), | 347 pending.pending_name(), |
336 job.builder, | 348 job.builder, |
337 job.result, | 349 job.result, |
338 self.status.build_status_url(job))) | 350 self.status.build_status_url(job))) |
339 logging.info(msg) | 351 logging.info(msg) |
OLD | NEW |