OLD | NEW |
---|---|
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import datetime | 5 import datetime |
6 import hashlib | 6 import hashlib |
7 import json | 7 import json |
8 import os | 8 import os |
9 import pytz | 9 import pytz |
10 import time | 10 import time |
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
223 | 223 |
224 Returns: | 224 Returns: |
225 A deferred object to be called once the operation completes. | 225 A deferred object to be called once the operation completes. |
226 """ | 226 """ |
227 | 227 |
228 log.msg('[RPWC] Poll started') | 228 log.msg('[RPWC] Poll started') |
229 log.msg('[RPWC] Downloading %s...' % self._pending_jobs_url) | 229 log.msg('[RPWC] Downloading %s...' % self._pending_jobs_url) |
230 pollDeferred = client.getPage(self._pending_jobs_url, agent='buildbot', | 230 pollDeferred = client.getPage(self._pending_jobs_url, agent='buildbot', |
231 timeout=2*60) | 231 timeout=2*60) |
232 pollDeferred.addCallback(self._ProcessResults) | 232 pollDeferred.addCallback(self._ProcessResults) |
233 pollDeferred.addErrback(log.err, 'error in RietveldPollerWithCache') | 233 pollDeferred.addErrback(log.err, '[RPWC] error') |
234 return pollDeferred | 234 return pollDeferred |
235 | 235 |
236 def setServiceParent(self, parent): | 236 def setServiceParent(self, parent): |
237 base.PollingChangeSource.setServiceParent(self, parent) | 237 base.PollingChangeSource.setServiceParent(self, parent) |
238 self._try_job_rietveld = parent | 238 self._try_job_rietveld = parent |
239 | 239 |
240 @defer.inlineCallbacks | 240 @defer.inlineCallbacks |
241 def _InitProcessedKeysCache(self): | 241 def _InitProcessedKeysCache(self): |
242 log.msg('[RPWC] Initializing processed keys cache...') | 242 log.msg('[RPWC] Initializing processed keys cache...') |
243 | 243 |
244 # Get all BuildBot build requests. | 244 # Get all BuildBot build requests. |
245 brdicts = yield self.master.db.buildrequests.getBuildRequests() | 245 brdicts = yield self.master.db.buildrequests.getBuildRequests() |
246 | 246 |
247 log.msg('[RPWC] Received build request dicts') | |
248 | |
247 def asNaiveUTC(dt): | 249 def asNaiveUTC(dt): |
248 if dt is None: | 250 if dt is None: |
249 return datetime.datetime.now() | 251 return datetime.datetime.now() |
250 if dt.tzinfo is None: | 252 if dt.tzinfo is None: |
251 return dt | 253 return dt |
252 utc_datetime = dt.astimezone(pytz.utc) | 254 utc_datetime = dt.astimezone(pytz.utc) |
253 return utc_datetime.replace(tzinfo=None) | 255 return utc_datetime.replace(tzinfo=None) |
254 | 256 |
255 # Compose a map of buildset ids to the submission timestamp. | 257 # Compose a map of buildset ids to the submission timestamp. |
256 buildsets = {} | 258 buildsets = {} |
257 for brdict in brdicts: | 259 for brdict in brdicts: |
258 bsid = brdict.get('buildsetid') | 260 bsid = brdict.get('buildsetid') |
259 if bsid is not None: | 261 if bsid is not None: |
260 buildsets[bsid] = asNaiveUTC(brdict.get('submitted_at')) | 262 buildsets[bsid] = asNaiveUTC(brdict.get('submitted_at')) |
261 | 263 |
262 # Find jobs for each buildset and add them to the processed keys cache. | 264 # Find jobs for each buildset and add them to the processed keys cache. |
263 self._processed_keys = {} | 265 self._processed_keys = {} |
264 for bsid in buildsets.keys(): | 266 for bsid in buildsets.keys(): |
267 log.msg('[RPWC] Loading properties of the buildset %d' % bsid) | |
265 bsprops = yield self.master.db.buildsets.getBuildsetProperties(bsid) | 268 bsprops = yield self.master.db.buildsets.getBuildsetProperties(bsid) |
266 if 'try_job_key' in bsprops: | 269 if 'try_job_key' in bsprops: |
267 key = bsprops['try_job_key'][0] | 270 key = bsprops['try_job_key'][0] |
268 self._processed_keys[key] = buildsets[bsid] | 271 self._processed_keys[key] = buildsets[bsid] |
269 | 272 |
270 log.msg('[RPWC] Initialized processed keys cache from master with %d ' | 273 log.msg('[RPWC] Initialized processed keys cache from master with %d ' |
271 'jobs.' % len(self._processed_keys)) | 274 'jobs.' % len(self._processed_keys)) |
272 | 275 |
273 @defer.inlineCallbacks | 276 @defer.inlineCallbacks |
274 def _ProcessResults(self, first_page_json): | 277 def _ProcessResults(self, first_page_json): |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
316 yield self._try_job_rietveld.SubmitJobs(new_jobs) | 319 yield self._try_job_rietveld.SubmitJobs(new_jobs) |
317 else: | 320 else: |
318 log.msg('[RPWC] No new jobs.') | 321 log.msg('[RPWC] No new jobs.') |
319 | 322 |
320 # Update processed keys cache. | 323 # Update processed keys cache. |
321 new_processed_keys = {} | 324 new_processed_keys = {} |
322 for job in new_jobs: | 325 for job in new_jobs: |
323 parsed_timestamp = datetime.datetime.strptime(job['timestamp'], | 326 parsed_timestamp = datetime.datetime.strptime(job['timestamp'], |
324 '%Y-%m-%d %H:%M:%S.%f') | 327 '%Y-%m-%d %H:%M:%S.%f') |
325 new_processed_keys[job['key']] = parsed_timestamp | 328 new_processed_keys[job['key']] = parsed_timestamp |
329 if len(new_processed_keys): | |
Paweł Hajdan Jr.
2014/06/02 13:45:47
nit: No need to use len in the condition.
Actuall
Sergiy Byelozyorov
2014/06/02 13:52:27
Done.
| |
330 log.msg('[RPWC] Added %d new jobs to the cache.' % | |
331 len(new_processed_keys)) | |
332 num_removed = 0 | |
326 for processed_key, timestamp in self._processed_keys.iteritems(): | 333 for processed_key, timestamp in self._processed_keys.iteritems(): |
327 if timestamp > cutoff_timestamp: | 334 if timestamp > cutoff_timestamp: |
328 new_processed_keys[processed_key] = timestamp | 335 new_processed_keys[processed_key] = timestamp |
336 else: | |
337 num_removed += 1 | |
338 if num_removed: | |
Paweł Hajdan Jr.
2014/06/02 13:45:47
Same here, why not make the log statement uncondit
Sergiy Byelozyorov
2014/06/02 13:52:27
Done.
| |
339 log.msg('[RPWC] Removed %d old jobs from the cache.' % num_removed) | |
329 self._processed_keys = new_processed_keys | 340 self._processed_keys = new_processed_keys |
330 | 341 |
331 | 342 |
332 class TryJobRietveld(TryJobBase): | 343 class TryJobRietveld(TryJobBase): |
333 """A try job source that gets jobs from pending Rietveld patch sets.""" | 344 """A try job source that gets jobs from pending Rietveld patch sets.""" |
334 | 345 |
335 def __init__(self, name, pools, properties=None, last_good_urls=None, | 346 def __init__(self, name, pools, properties=None, last_good_urls=None, |
336 code_review_sites=None, project=None, filter_master=False, | 347 code_review_sites=None, project=None, filter_master=False, |
337 cachepath=None, cache_processed_jobs=False): | 348 cachepath=None, cache_processed_jobs=False): |
338 """Creates a try job source for Rietveld patch sets. | 349 """Creates a try job source for Rietveld patch sets. |
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
482 else: | 493 else: |
483 self.processed_keys.add(job['key']) | 494 self.processed_keys.add(job['key']) |
484 log.err('Rietveld not updated: no corresponding service found.') | 495 log.err('Rietveld not updated: no corresponding service found.') |
485 | 496 |
486 # TryJobBase overrides: | 497 # TryJobBase overrides: |
487 def setServiceParent(self, parent): | 498 def setServiceParent(self, parent): |
488 TryJobBase.setServiceParent(self, parent) | 499 TryJobBase.setServiceParent(self, parent) |
489 self._poller.setServiceParent(self) | 500 self._poller.setServiceParent(self) |
490 self._poller.master = self.master | 501 self._poller.master = self.master |
491 self._valid_users.setServiceParent(self) | 502 self._valid_users.setServiceParent(self) |
OLD | NEW |