Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(138)

Side by Side Diff: pending_manager.py

Issue 145293006: Sort issues by their age in the commit queue. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/commit-queue
Patch Set: Addressing the comments Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« context.py ('K') | « context.py ('k') | projects.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # coding=utf8 1 # coding=utf8
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 """Commit queue manager class. 5 """Commit queue manager class.
6 6
7 Security implications: 7 Security implications:
8 8
9 The following hypothesis are made: 9 The following hypothesis are made:
10 - Commit queue: 10 - Commit queue:
11 - Impersonate the same svn credentials that the patchset owner. 11 - Impersonate the same svn credentials that the patchset owner.
12 - Can't impersonate a non committer. 12 - Can't impersonate a non committer.
13 - SVN will check the committer write access. 13 - SVN will check the committer write access.
14 """ 14 """
15 15
16 import datetime
16 import errno 17 import errno
17 import logging 18 import logging
18 import os 19 import os
19 import socket 20 import socket
20 import ssl 21 import ssl
21 import time 22 import time
22 import traceback 23 import traceback
23 import urllib2 24 import urllib2
24 25
25 import find_depot_tools # pylint: disable=W0611 26 import find_depot_tools # pylint: disable=W0611
(...skipping 14 matching lines...) Expand all
40 issue = int 41 issue = int
41 patchset = int 42 patchset = int
42 description = unicode 43 description = unicode
43 files = list 44 files = list
44 # Only a cache, these values can be regenerated. 45 # Only a cache, these values can be regenerated.
45 owner = unicode 46 owner = unicode
46 reviewers = list 47 reviewers = list
47 base_url = unicode 48 base_url = unicode
48 messages = list 49 messages = list
49 relpath = unicode 50 relpath = unicode
51 sort_key = unicode
50 # Only used after a patch was committed. Keeping here for try job retries. 52 # Only used after a patch was committed. Keeping here for try job retries.
51 revision = (None, int, unicode) 53 revision = (None, int, unicode)
52 54
53 def __init__(self, **kwargs): 55 def __init__(self, **kwargs):
54 super(PendingCommit, self).__init__(**kwargs) 56 super(PendingCommit, self).__init__(**kwargs)
55 for message in self.messages: 57 for message in self.messages:
56 # Save storage, no verifier really need 'text', just 'approval'. 58 # Save storage, no verifier really need 'text', just 'approval'.
57 if 'text' in message: 59 if 'text' in message:
58 del message['text'] 60 del message['text']
59 61
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
110 """ 112 """
111 pending_commits = dict 113 pending_commits = dict
112 114
113 def add(self, item): 115 def add(self, item):
114 self.pending_commits[str(item.issue)] = item 116 self.pending_commits[str(item.issue)] = item
115 117
116 def get(self, key): 118 def get(self, key):
117 return self.pending_commits[str(key)] 119 return self.pending_commits[str(key)]
118 120
119 def iterate(self): 121 def iterate(self):
120 """Returns the items sorted by issue id to ease testability.""" 122 """Returns the items sorted by .sort_key to ease testability."""
121 return sorted(self.pending_commits.itervalues(), key=lambda x: x.issue) 123 return sorted(self.pending_commits.itervalues(), key=lambda x: x.sort_key)
122 124
123 def remove(self, key): 125 def remove(self, key):
124 self.pending_commits.pop(str(key), None) 126 self.pending_commits.pop(str(key), None)
125 127
126 128
127 class PendingManager(object): 129 class PendingManager(object):
128 """Fetch new issues from rietveld, pass the issues through all of verifiers 130 """Fetch new issues from rietveld, pass the issues through all of verifiers
129 and then commit the patches with checkout. 131 and then commit the patches with checkout.
130 """ 132 """
131 FAILED_NO_MESSAGE = ( 133 FAILED_NO_MESSAGE = (
(...skipping 24 matching lines...) Expand all
156 patch. 158 patch.
157 verifiers: Verifiers object run after applying the patch. 159 verifiers: Verifiers object run after applying the patch.
158 """ 160 """
159 if not(len(pre_patch_verifiers) or len(verifiers)): 161 if not(len(pre_patch_verifiers) or len(verifiers)):
160 raise ValueError('at least one verifier should be defined (in project %s)' 162 raise ValueError('at least one verifier should be defined (in project %s)'
161 % project_name) 163 % project_name)
162 164
163 self.context = context_obj 165 self.context = context_obj
164 self.pre_patch_verifiers = pre_patch_verifiers or [] 166 self.pre_patch_verifiers = pre_patch_verifiers or []
165 self.verifiers = verifiers or [] 167 self.verifiers = verifiers or []
168 self.user = self.context.user
166 self.all_verifiers = pre_patch_verifiers + verifiers 169 self.all_verifiers = pre_patch_verifiers + verifiers
167 self.queue = PendingQueue() 170 self.queue = PendingQueue()
168 # Keep the timestamps of the last few commits so that we can control the 171 # Keep the timestamps of the last few commits so that we can control the
169 # pace (burstiness) of commits. 172 # pace (burstiness) of commits.
170 self.recent_commit_timestamps = [] 173 self.recent_commit_timestamps = []
171 # Assert names are unique. 174 # Assert names are unique.
172 names = [x.name for x in pre_patch_verifiers + verifiers] 175 names = [x.name for x in pre_patch_verifiers + verifiers]
173 assert len(names) == len(set(names)) 176 assert len(names) == len(set(names))
174 for verifier in self.pre_patch_verifiers: 177 for verifier in self.pre_patch_verifiers:
175 assert not isinstance(verifier, base.VerifierCheckout) 178 assert not isinstance(verifier, base.VerifierCheckout)
176 179
180 def _set_sort_key(self, issue_data):
181 """Compute the sorting key. Must add .sort_key to issue_data.
182
183 Previously we used issue id, but a better key is the last
184 timestamp when Commit box was checked.
185 """
186 for m in issue_data['messages']:
187 if (m['sender'] == self.user
188 and 'CQ is trying da patch.' in m['text']):
Paweł Hajdan Jr. 2014/01/29 02:38:24 nit: Put the "and" at the end of the previous line
Sergey Berezin 2014/01/29 17:41:43 Done.
189 issue_data['sort_key'] = m['date']
190 if 'sort_key' not in issue_data:
191 # Set a default value: the current time.
192 issue_data['sort_key'] = str(datetime.datetime.utcnow())
193
177 def look_for_new_pending_commit(self): 194 def look_for_new_pending_commit(self):
178 """Looks for new reviews on self.context.rietveld with c+ set. 195 """Looks for new reviews on self.context.rietveld with c+ set.
179 196
180 Calls _new_pending_commit() on all new review found. 197 Calls _new_pending_commit() on all new review found.
181 """ 198 """
182 try: 199 try:
183 new_issues = self.context.rietveld.get_pending_issues() 200 new_issues = self.context.rietveld.get_pending_issues()
184 except urllib2.URLError as e: 201 except urllib2.URLError as e:
185 if 'timed out' in e.reason: 202 if 'timed out' in e.reason:
186 # Handle timeouts gracefully. Log them and pretend there are no 203 # Handle timeouts gracefully. Log them and pretend there are no
(...skipping 12 matching lines...) Expand all
199 logging.info('Flushing issue %d' % pending.issue) 216 logging.info('Flushing issue %d' % pending.issue)
200 pending.get_state = lambda: base.IGNORED 217 pending.get_state = lambda: base.IGNORED
201 self._discard_pending(pending, 'CQ bit was unchecked on CL. Ignoring.') 218 self._discard_pending(pending, 'CQ bit was unchecked on CL. Ignoring.')
202 219
203 # Find new issues. 220 # Find new issues.
204 for issue_id in new_issues: 221 for issue_id in new_issues:
205 if str(issue_id) not in self.queue.pending_commits: 222 if str(issue_id) not in self.queue.pending_commits:
206 try: 223 try:
207 issue_data = self.context.rietveld.get_issue_properties( 224 issue_data = self.context.rietveld.get_issue_properties(
208 issue_id, True) 225 issue_id, True)
226 self._set_sort_key(issue_data)
209 except urllib2.HTTPError as e: 227 except urllib2.HTTPError as e:
210 if e.code in (500, 502, 503): 228 if e.code in (500, 502, 503):
211 # Temporary AppEngine hiccup. Just log it and continue. 229 # Temporary AppEngine hiccup. Just log it and continue.
212 logging.warning('%s while accessing %s. Ignoring error.' % ( 230 logging.warning('%s while accessing %s. Ignoring error.' % (
213 str(e), e.url)) 231 str(e), e.url))
214 continue 232 continue
215 raise 233 raise
216 except urllib2.URLError as e: 234 except urllib2.URLError as e:
217 # Temporary AppEngine hiccup. Just log it and continue. 235 # Temporary AppEngine hiccup. Just log it and continue.
218 if 'timed out' in e.reason: 236 if 'timed out' in e.reason:
(...skipping 23 matching lines...) Expand all
242 if issue_data['patchsets'] and issue_data['commit']: 260 if issue_data['patchsets'] and issue_data['commit']:
243 logging.info('Found new issue %d' % issue_id) 261 logging.info('Found new issue %d' % issue_id)
244 self.queue.add( 262 self.queue.add(
245 PendingCommit( 263 PendingCommit(
246 issue=issue_id, 264 issue=issue_id,
247 owner=issue_data['owner_email'], 265 owner=issue_data['owner_email'],
248 reviewers=issue_data['reviewers'], 266 reviewers=issue_data['reviewers'],
249 patchset=issue_data['patchsets'][-1], 267 patchset=issue_data['patchsets'][-1],
250 base_url=issue_data['base_url'], 268 base_url=issue_data['base_url'],
251 description=issue_data['description'].replace('\r', ''), 269 description=issue_data['description'].replace('\r', ''),
252 messages=issue_data['messages'])) 270 messages=issue_data['messages'],
271 sort_key=issue_data['sort_key']))
253 272
254 def process_new_pending_commit(self): 273 def process_new_pending_commit(self):
255 """Starts verification on newly found pending commits.""" 274 """Starts verification on newly found pending commits."""
256 expected = set(i.name for i in self.all_verifiers) 275 expected = set(i.name for i in self.all_verifiers)
257 for pending in self.queue.iterate(): 276 for pending in self.queue.iterate():
258 try: 277 try:
259 # Take in account the case where a verifier was removed. 278 # Take in account the case where a verifier was removed.
260 done = set(pending.verifications.keys()) 279 done = set(pending.verifications.keys())
261 missing = expected - done 280 missing = expected - done
262 if (not missing or pending.get_state() != base.PROCESSING): 281 if (not missing or pending.get_state() != base.PROCESSING):
263 continue 282 continue
264 logging.info( 283 logging.info(
265 'Processing issue %s (%s, %d)' % ( 284 'Processing issue %s @ %s (%s, %d)' % (
266 pending.issue, missing, pending.get_state())) 285 pending.issue, pending.sort_key, missing, pending.get_state()))
267 self._verify_pending(pending) 286 self._verify_pending(pending)
268 except base.DiscardPending as e: 287 except base.DiscardPending as e:
269 message = e.status 288 message = e.status
270 if not message: 289 if not message:
271 message = 'process_new_pending_commit: ' + self.FAILED_NO_MESSAGE 290 message = 'process_new_pending_commit: ' + self.FAILED_NO_MESSAGE
272 self._discard_pending(e.pending, message) 291 self._discard_pending(e.pending, message)
273 292
274 def update_status(self): 293 def update_status(self):
275 """Updates the status for each pending commit verifier.""" 294 """Updates the status for each pending commit verifier."""
276 why_nots = dict((p.issue, p.why_not()) for p in self.queue.iterate()) 295 why_nots = dict((p.issue, p.why_not()) for p in self.queue.iterate())
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
570 """Loads the commit queue state from a JSON file.""" 589 """Loads the commit queue state from a JSON file."""
571 self.queue = model.load_from_json_file(filename) 590 self.queue = model.load_from_json_file(filename)
572 591
573 def save(self, filename): 592 def save(self, filename):
574 """Save the commit queue state in a simple JSON file.""" 593 """Save the commit queue state in a simple JSON file."""
575 model.save_to_json_file(filename, self.queue) 594 model.save_to_json_file(filename, self.queue)
576 595
577 def close(self): 596 def close(self):
578 """Close all the active pending manager items.""" 597 """Close all the active pending manager items."""
579 self.context.status.close() 598 self.context.status.close()
OLDNEW
« context.py ('K') | « context.py ('k') | projects.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698