OLD | NEW |
1 # coding=utf8 | 1 # coding=utf8 |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 """Commit queue manager class. | 5 """Commit queue manager class. |
6 | 6 |
7 Security implications: | 7 Security implications: |
8 | 8 |
9 The following hypothesis are made: | 9 The following hypothesis are made: |
10 - Commit queue: | 10 - Commit queue: |
11 - Impersonate the same svn credentials that the patchset owner. | 11 - Impersonate the same svn credentials that the patchset owner. |
12 - Can't impersonate a non committer. | 12 - Can't impersonate a non committer. |
13 - SVN will check the committer write access. | 13 - SVN will check the committer write access. |
14 """ | 14 """ |
15 | 15 |
| 16 import datetime |
16 import errno | 17 import errno |
17 import logging | 18 import logging |
18 import os | 19 import os |
19 import socket | 20 import socket |
20 import ssl | 21 import ssl |
21 import time | 22 import time |
22 import traceback | 23 import traceback |
23 import urllib2 | 24 import urllib2 |
24 | 25 |
25 import find_depot_tools # pylint: disable=W0611 | 26 import find_depot_tools # pylint: disable=W0611 |
(...skipping 14 matching lines...) Expand all Loading... |
40 issue = int | 41 issue = int |
41 patchset = int | 42 patchset = int |
42 description = unicode | 43 description = unicode |
43 files = list | 44 files = list |
44 # Only a cache, these values can be regenerated. | 45 # Only a cache, these values can be regenerated. |
45 owner = unicode | 46 owner = unicode |
46 reviewers = list | 47 reviewers = list |
47 base_url = unicode | 48 base_url = unicode |
48 messages = list | 49 messages = list |
49 relpath = unicode | 50 relpath = unicode |
| 51 sort_key = unicode |
50 # Only used after a patch was committed. Keeping here for try job retries. | 52 # Only used after a patch was committed. Keeping here for try job retries. |
51 revision = (None, int, unicode) | 53 revision = (None, int, unicode) |
52 | 54 |
53 def __init__(self, **kwargs): | 55 def __init__(self, **kwargs): |
54 super(PendingCommit, self).__init__(**kwargs) | 56 super(PendingCommit, self).__init__(**kwargs) |
55 for message in self.messages: | 57 for message in self.messages: |
56 # Save storage, no verifier really need 'text', just 'approval'. | 58 # Save storage, no verifier really need 'text', just 'approval'. |
57 if 'text' in message: | 59 if 'text' in message: |
58 del message['text'] | 60 del message['text'] |
59 | 61 |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
110 """ | 112 """ |
111 pending_commits = dict | 113 pending_commits = dict |
112 | 114 |
113 def add(self, item): | 115 def add(self, item): |
114 self.pending_commits[str(item.issue)] = item | 116 self.pending_commits[str(item.issue)] = item |
115 | 117 |
116 def get(self, key): | 118 def get(self, key): |
117 return self.pending_commits[str(key)] | 119 return self.pending_commits[str(key)] |
118 | 120 |
119 def iterate(self): | 121 def iterate(self): |
120 """Returns the items sorted by issue id to ease testability.""" | 122 """Returns the items sorted by .sort_key to ease testability.""" |
121 return sorted(self.pending_commits.itervalues(), key=lambda x: x.issue) | 123 return sorted(self.pending_commits.itervalues(), key=lambda x: x.sort_key) |
122 | 124 |
123 def remove(self, key): | 125 def remove(self, key): |
124 self.pending_commits.pop(str(key), None) | 126 self.pending_commits.pop(str(key), None) |
125 | 127 |
126 | 128 |
127 class PendingManager(object): | 129 class PendingManager(object): |
128 """Fetch new issues from rietveld, pass the issues through all of verifiers | 130 """Fetch new issues from rietveld, pass the issues through all of verifiers |
129 and then commit the patches with checkout. | 131 and then commit the patches with checkout. |
130 """ | 132 """ |
131 FAILED_NO_MESSAGE = ( | 133 FAILED_NO_MESSAGE = ( |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
167 self.queue = PendingQueue() | 169 self.queue = PendingQueue() |
168 # Keep the timestamps of the last few commits so that we can control the | 170 # Keep the timestamps of the last few commits so that we can control the |
169 # pace (burstiness) of commits. | 171 # pace (burstiness) of commits. |
170 self.recent_commit_timestamps = [] | 172 self.recent_commit_timestamps = [] |
171 # Assert names are unique. | 173 # Assert names are unique. |
172 names = [x.name for x in pre_patch_verifiers + verifiers] | 174 names = [x.name for x in pre_patch_verifiers + verifiers] |
173 assert len(names) == len(set(names)) | 175 assert len(names) == len(set(names)) |
174 for verifier in self.pre_patch_verifiers: | 176 for verifier in self.pre_patch_verifiers: |
175 assert not isinstance(verifier, base.VerifierCheckout) | 177 assert not isinstance(verifier, base.VerifierCheckout) |
176 | 178 |
| 179 def _get_user(self): |
| 180 """Get the CQ's rietveld user name. |
| 181 |
| 182 We need it to look for messages posted by the CQ, to figure out |
| 183 when the commit box was checked. TODO(sergeyberezin): when |
| 184 Rietveld can tell this info natively, this function will be |
| 185 obsolete. |
| 186 """ |
| 187 # Rietveld object or its email may be missing in some unittests. |
| 188 if self.context.rietveld and self.context.rietveld.email: |
| 189 return self.context.rietveld.email |
| 190 else: |
| 191 return 'commit-bot@chromium.org' |
| 192 |
| 193 def _set_sort_key(self, issue_data): |
| 194 """Compute the sorting key. Must add .sort_key to issue_data. |
| 195 |
| 196 Previously we used issue id, but a better key is the last |
| 197 timestamp when Commit box was checked. |
| 198 """ |
| 199 for m in issue_data['messages']: |
| 200 if (m['sender'] == self._get_user() and |
| 201 'CQ is trying da patch.' in m['text']): |
| 202 issue_data['sort_key'] = m['date'] |
| 203 if 'sort_key' not in issue_data: |
| 204 # Set a default value: the current time. |
| 205 issue_data['sort_key'] = str(datetime.datetime.utcnow()) |
| 206 |
177 def look_for_new_pending_commit(self): | 207 def look_for_new_pending_commit(self): |
178 """Looks for new reviews on self.context.rietveld with c+ set. | 208 """Looks for new reviews on self.context.rietveld with c+ set. |
179 | 209 |
180 Calls _new_pending_commit() on all new review found. | 210 Calls _new_pending_commit() on all new review found. |
181 """ | 211 """ |
182 try: | 212 try: |
183 new_issues = self.context.rietveld.get_pending_issues() | 213 new_issues = self.context.rietveld.get_pending_issues() |
184 except urllib2.URLError as e: | 214 except urllib2.URLError as e: |
185 if 'timed out' in e.reason: | 215 if 'timed out' in e.reason: |
186 # Handle timeouts gracefully. Log them and pretend there are no | 216 # Handle timeouts gracefully. Log them and pretend there are no |
(...skipping 12 matching lines...) Expand all Loading... |
199 logging.info('Flushing issue %d' % pending.issue) | 229 logging.info('Flushing issue %d' % pending.issue) |
200 pending.get_state = lambda: base.IGNORED | 230 pending.get_state = lambda: base.IGNORED |
201 self._discard_pending(pending, 'CQ bit was unchecked on CL. Ignoring.') | 231 self._discard_pending(pending, 'CQ bit was unchecked on CL. Ignoring.') |
202 | 232 |
203 # Find new issues. | 233 # Find new issues. |
204 for issue_id in new_issues: | 234 for issue_id in new_issues: |
205 if str(issue_id) not in self.queue.pending_commits: | 235 if str(issue_id) not in self.queue.pending_commits: |
206 try: | 236 try: |
207 issue_data = self.context.rietveld.get_issue_properties( | 237 issue_data = self.context.rietveld.get_issue_properties( |
208 issue_id, True) | 238 issue_id, True) |
| 239 self._set_sort_key(issue_data) |
209 except urllib2.HTTPError as e: | 240 except urllib2.HTTPError as e: |
210 if e.code in (500, 502, 503): | 241 if e.code in (500, 502, 503): |
211 # Temporary AppEngine hiccup. Just log it and continue. | 242 # Temporary AppEngine hiccup. Just log it and continue. |
212 logging.warning('%s while accessing %s. Ignoring error.' % ( | 243 logging.warning('%s while accessing %s. Ignoring error.' % ( |
213 str(e), e.url)) | 244 str(e), e.url)) |
214 continue | 245 continue |
215 raise | 246 raise |
216 except urllib2.URLError as e: | 247 except urllib2.URLError as e: |
217 # Temporary AppEngine hiccup. Just log it and continue. | 248 # Temporary AppEngine hiccup. Just log it and continue. |
218 if 'timed out' in e.reason: | 249 if 'timed out' in e.reason: |
(...skipping 23 matching lines...) Expand all Loading... |
242 if issue_data['patchsets'] and issue_data['commit']: | 273 if issue_data['patchsets'] and issue_data['commit']: |
243 logging.info('Found new issue %d' % issue_id) | 274 logging.info('Found new issue %d' % issue_id) |
244 self.queue.add( | 275 self.queue.add( |
245 PendingCommit( | 276 PendingCommit( |
246 issue=issue_id, | 277 issue=issue_id, |
247 owner=issue_data['owner_email'], | 278 owner=issue_data['owner_email'], |
248 reviewers=issue_data['reviewers'], | 279 reviewers=issue_data['reviewers'], |
249 patchset=issue_data['patchsets'][-1], | 280 patchset=issue_data['patchsets'][-1], |
250 base_url=issue_data['base_url'], | 281 base_url=issue_data['base_url'], |
251 description=issue_data['description'].replace('\r', ''), | 282 description=issue_data['description'].replace('\r', ''), |
252 messages=issue_data['messages'])) | 283 messages=issue_data['messages'], |
| 284 sort_key=issue_data['sort_key'])) |
253 | 285 |
254 def process_new_pending_commit(self): | 286 def process_new_pending_commit(self): |
255 """Starts verification on newly found pending commits.""" | 287 """Starts verification on newly found pending commits.""" |
256 expected = set(i.name for i in self.all_verifiers) | 288 expected = set(i.name for i in self.all_verifiers) |
257 for pending in self.queue.iterate(): | 289 for pending in self.queue.iterate(): |
258 try: | 290 try: |
259 # Take in account the case where a verifier was removed. | 291 # Take in account the case where a verifier was removed. |
260 done = set(pending.verifications.keys()) | 292 done = set(pending.verifications.keys()) |
261 missing = expected - done | 293 missing = expected - done |
262 if (not missing or pending.get_state() != base.PROCESSING): | 294 if (not missing or pending.get_state() != base.PROCESSING): |
263 continue | 295 continue |
264 logging.info( | 296 logging.info( |
265 'Processing issue %s (%s, %d)' % ( | 297 'Processing issue %s @ %s (%s, %d)' % ( |
266 pending.issue, missing, pending.get_state())) | 298 pending.issue, pending.sort_key, missing, pending.get_state())) |
267 self._verify_pending(pending) | 299 self._verify_pending(pending) |
268 except base.DiscardPending as e: | 300 except base.DiscardPending as e: |
269 message = e.status | 301 message = e.status |
270 if not message: | 302 if not message: |
271 message = 'process_new_pending_commit: ' + self.FAILED_NO_MESSAGE | 303 message = 'process_new_pending_commit: ' + self.FAILED_NO_MESSAGE |
272 self._discard_pending(e.pending, message) | 304 self._discard_pending(e.pending, message) |
273 | 305 |
274 def update_status(self): | 306 def update_status(self): |
275 """Updates the status for each pending commit verifier.""" | 307 """Updates the status for each pending commit verifier.""" |
276 why_nots = dict((p.issue, p.why_not()) for p in self.queue.iterate()) | 308 why_nots = dict((p.issue, p.why_not()) for p in self.queue.iterate()) |
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
570 """Loads the commit queue state from a JSON file.""" | 602 """Loads the commit queue state from a JSON file.""" |
571 self.queue = model.load_from_json_file(filename) | 603 self.queue = model.load_from_json_file(filename) |
572 | 604 |
573 def save(self, filename): | 605 def save(self, filename): |
574 """Save the commit queue state in a simple JSON file.""" | 606 """Save the commit queue state in a simple JSON file.""" |
575 model.save_to_json_file(filename, self.queue) | 607 model.save_to_json_file(filename, self.queue) |
576 | 608 |
577 def close(self): | 609 def close(self): |
578 """Close all the active pending manager items.""" | 610 """Close all the active pending manager items.""" |
579 self.context.status.close() | 611 self.context.status.close() |
OLD | NEW |