Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(20)

Side by Side Diff: commit-queue/pending_manager.py

Issue 83173008: CQ: don't treat the commit box as unchecked on network errors (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/
Patch Set: Created 7 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # coding=utf8 1 # coding=utf8
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 """Commit queue manager class. 5 """Commit queue manager class.
6 6
7 Security implications: 7 Security implications:
8 8
9 The following hypothesis are made: 9 The following hypothesis are made:
10 - Commit queue: 10 - Commit queue:
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
168 names = [x.name for x in pre_patch_verifiers + verifiers] 168 names = [x.name for x in pre_patch_verifiers + verifiers]
169 assert len(names) == len(set(names)) 169 assert len(names) == len(set(names))
170 for verifier in self.pre_patch_verifiers: 170 for verifier in self.pre_patch_verifiers:
171 assert not isinstance(verifier, base.VerifierCheckout) 171 assert not isinstance(verifier, base.VerifierCheckout)
172 172
173 def look_for_new_pending_commit(self): 173 def look_for_new_pending_commit(self):
174 """Looks for new reviews on self.context.rietveld with c+ set. 174 """Looks for new reviews on self.context.rietveld with c+ set.
175 175
176 Calls _new_pending_commit() on all new review found. 176 Calls _new_pending_commit() on all new review found.
177 """ 177 """
178 new_issues = self._fetch_pending_issues() 178 try:
179 new_issues = self.context.rietveld.get_pending_issues()
180 except urllib2.URLError as e:
181 if 'timed out' in e.reason:
182 # Handle timeouts gracefully. Log them and pretend there are no
183 # pending issues. We'll retry on the next iteration.
184 logging.warn('request to fetch pending issues timed out: %s' % e)
185 return
186
187 raise
179 188
180 # If there is an issue in processed_issues that is not in new_issues, 189 # If there is an issue in processed_issues that is not in new_issues,
181 # discard it. 190 # discard it.
182 for pending in self.queue.iterate(): 191 for pending in self.queue.iterate():
183 # Note that pending.issue is a int but self.queue.pending_commits keys 192 # Note that pending.issue is a int but self.queue.pending_commits keys
184 # are str due to json support. 193 # are str due to json support.
185 if pending.issue not in new_issues: 194 if pending.issue not in new_issues:
186 logging.info('Flushing issue %d' % pending.issue) 195 logging.info('Flushing issue %d' % pending.issue)
Paweł Hajdan Jr. 2013/11/22 18:01:10 Note that now when we catch an error we won't get
187 self.context.status.send( 196 self.context.status.send(
188 pending, 197 pending,
189 { 'verification': 'abort', 198 { 'verification': 'abort',
190 'payload': { 199 'payload': {
191 'output': 'CQ bit was unchecked on CL. Ignoring.' }}) 200 'output': 'CQ bit was unchecked on CL. Ignoring.' }})
192 pending.get_state = lambda: base.IGNORED 201 pending.get_state = lambda: base.IGNORED
193 self._discard_pending(pending, None) 202 self._discard_pending(pending, None)
194 203
195 # Find new issues. 204 # Find new issues.
196 for issue_id in new_issues: 205 for issue_id in new_issues:
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
236 self.queue.add( 245 self.queue.add(
237 PendingCommit( 246 PendingCommit(
238 issue=issue_id, 247 issue=issue_id,
239 owner=issue_data['owner_email'], 248 owner=issue_data['owner_email'],
240 reviewers=issue_data['reviewers'], 249 reviewers=issue_data['reviewers'],
241 patchset=issue_data['patchsets'][-1], 250 patchset=issue_data['patchsets'][-1],
242 base_url=issue_data['base_url'], 251 base_url=issue_data['base_url'],
243 description=issue_data['description'].replace('\r', ''), 252 description=issue_data['description'].replace('\r', ''),
244 messages=issue_data['messages'])) 253 messages=issue_data['messages']))
245 254
246 def _fetch_pending_issues(self):
247 """Returns the list of issue number for reviews on Rietveld with their last
248 patchset with commit+ flag set.
249 """
250 try:
251 return self.context.rietveld.get_pending_issues()
252 except urllib2.URLError as e:
253 if 'timed out' in e.reason:
254 # Handle timeouts gracefully. Log them and pretend there are no
255 # pending issues. We'll retry on the next iteration.
256 logging.warn('request to fetch pending issues timed out: %s' % e)
257 return []
258
259 raise
260
261 def process_new_pending_commit(self): 255 def process_new_pending_commit(self):
262 """Starts verification on newly found pending commits.""" 256 """Starts verification on newly found pending commits."""
263 expected = set(i.name for i in self.all_verifiers) 257 expected = set(i.name for i in self.all_verifiers)
264 for pending in self.queue.iterate(): 258 for pending in self.queue.iterate():
265 try: 259 try:
266 # Take in account the case where a verifier was removed. 260 # Take in account the case where a verifier was removed.
267 done = set(pending.verifications.keys()) 261 done = set(pending.verifications.keys())
268 missing = expected - done 262 missing = expected - done
269 if (not missing or pending.get_state() != base.PROCESSING): 263 if (not missing or pending.get_state() != base.PROCESSING):
270 continue 264 continue
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after
561 """Loads the commit queue state from a JSON file.""" 555 """Loads the commit queue state from a JSON file."""
562 self.queue = model.load_from_json_file(filename) 556 self.queue = model.load_from_json_file(filename)
563 557
564 def save(self, filename): 558 def save(self, filename):
565 """Save the commit queue state in a simple JSON file.""" 559 """Save the commit queue state in a simple JSON file."""
566 model.save_to_json_file(filename, self.queue) 560 model.save_to_json_file(filename, self.queue)
567 561
568 def close(self): 562 def close(self):
569 """Close all the active pending manager items.""" 563 """Close all the active pending manager items."""
570 self.context.status.close() 564 self.context.status.close()
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698