OLD | NEW |
| (Empty) |
1 # coding=utf8 | |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 """Commit queue manager class. | |
6 | |
7 Security implications: | |
8 | |
9 The following hypothesis are made: | |
10 - Commit queue: | |
11 - Impersonate the same svn credentials that the patchset owner. | |
12 - Can't impersonate a non committer. | |
13 - SVN will check the committer write access. | |
14 """ | |
15 | |
16 import datetime | |
17 import errno | |
18 import logging | |
19 import os | |
20 import socket | |
21 import ssl | |
22 import time | |
23 import traceback | |
24 import urllib2 | |
25 | |
26 import find_depot_tools # pylint: disable=W0611 | |
27 import checkout | |
28 import git_cl | |
29 import patch | |
30 import subprocess2 | |
31 | |
32 import errors | |
33 import model | |
34 from verification import base | |
35 | |
36 | |
37 class PendingCommit(base.Verified): | |
38 """Represents a pending commit that is being processed.""" | |
39 # Important since they tell if we need to revalidate and send try jobs | |
40 # again or not if any of these value changes. | |
41 issue = int | |
42 patchset = int | |
43 description = unicode | |
44 files = list | |
45 # Only a cache, these values can be regenerated. | |
46 owner = unicode | |
47 reviewers = list | |
48 base_url = unicode | |
49 messages = list | |
50 relpath = unicode | |
51 sort_key = unicode | |
52 # Only used after a patch was committed. Keeping here for try job retries. | |
53 revision = (None, int, unicode) | |
54 | |
55 def __init__(self, **kwargs): | |
56 super(PendingCommit, self).__init__(**kwargs) | |
57 for message in self.messages: | |
58 # Save storage, no verifier really need 'text', just 'approval'. | |
59 if 'text' in message: | |
60 del message['text'] | |
61 | |
62 def pending_name(self): | |
63 """The name that should be used for try jobs. | |
64 | |
65 It makes it possible to regenerate the try_jobs array if ever needed.""" | |
66 return '%d-%d' % (self.issue, self.patchset) | |
67 | |
68 def prepare_for_patch(self, context_obj): | |
69 self.revision = context_obj.checkout.prepare(self.revision) | |
70 # Verify revision consistency. | |
71 if not self.revision: | |
72 raise base.DiscardPending( | |
73 self, 'Internal error: failed to checkout. Please try again.') | |
74 | |
75 def apply_patch(self, context_obj, prepare): | |
76 """Applies the pending patch to the checkout and throws if it fails.""" | |
77 try: | |
78 if prepare: | |
79 self.prepare_for_patch(context_obj) | |
80 patches = context_obj.rietveld.get_patch(self.issue, self.patchset) | |
81 if not patches: | |
82 raise base.DiscardPending( | |
83 self, 'No diff was found for this patchset.') | |
84 if self.relpath: | |
85 patches.set_relpath(self.relpath) | |
86 self.files = [p.filename for p in patches] | |
87 if not self.files: | |
88 raise base.DiscardPending( | |
89 self, 'No file was found in this patchset.') | |
90 context_obj.checkout.apply_patch(patches) | |
91 except (checkout.PatchApplicationFailed, patch.UnsupportedPatchFormat) as e: | |
92 raise base.DiscardPending(self, str(e)) | |
93 except subprocess2.CalledProcessError as e: | |
94 out = 'Failed to apply the patch.' | |
95 if e.stdout: | |
96 out += '\n%s' % e.stdout | |
97 raise base.DiscardPending(self, out) | |
98 except (ssl.SSLError, urllib2.HTTPError, urllib2.URLError) as e: | |
99 raise base.DiscardPending( | |
100 self, | |
101 ('Failed to request the patch to try. Please note that binary files ' | |
102 'are still unsupported at the moment, this is being worked on.\n\n' | |
103 'Thanks for your patience.\n\n%s') % e) | |
104 | |
105 | |
106 class PendingQueue(model.PersistentMixIn): | |
107 """Represents the queue of pending commits being processed. | |
108 | |
109 Each entry is keyed by the issue number as a string to be json-compatible. | |
110 There can only be one pending commit per issue and they are fine to be | |
111 processed out of order. | |
112 """ | |
113 pending_commits = dict | |
114 | |
115 def add(self, item): | |
116 self.pending_commits[str(item.issue)] = item | |
117 | |
118 def get(self, key): | |
119 return self.pending_commits[str(key)] | |
120 | |
121 def iterate(self): | |
122 """Returns the items sorted by .sort_key to ease testability.""" | |
123 return sorted(self.pending_commits.itervalues(), key=lambda x: x.sort_key) | |
124 | |
125 def remove(self, key): | |
126 self.pending_commits.pop(str(key), None) | |
127 | |
128 | |
129 class PendingManager(object): | |
130 """Fetch new issues from rietveld, pass the issues through all of verifiers | |
131 and then commit the patches with checkout. | |
132 """ | |
133 FAILED_NO_MESSAGE = ( | |
134 'Commit queue patch verification failed without an error message.\n' | |
135 'Something went wrong, probably a crash, a hickup or simply\n' | |
136 'the monkeys went out for dinner.\n' | |
137 'Please email commit-bot@chromium.org with the CL url.') | |
138 INTERNAL_EXCEPTION = ( | |
139 'Commit queue had an internal error.\n' | |
140 'Something went really wrong, probably a crash, a hickup or\n' | |
141 'simply the monkeys went out for dinner.\n' | |
142 'Please email commit-bot@chromium.org with the CL url.') | |
143 DESCRIPTION_UPDATED = ( | |
144 'Commit queue rejected this change because the description was changed\n' | |
145 'between the time the change entered the commit queue and the time it\n' | |
146 'was ready to commit. You can safely check the commit box again.') | |
147 TRYING_PATCH = 'CQ is trying da patch. Follow status at\n' | |
148 # Maximum number of commits done in a burst. | |
149 MAX_COMMIT_BURST = 4 | |
150 # Delay (secs) between commit bursts. | |
151 COMMIT_BURST_DELAY = 8*60 | |
152 | |
153 def __init__(self, context_obj, pre_patch_verifiers, verifiers, | |
154 project_name=''): | |
155 """ | |
156 Args: | |
157 pre_patch_verifiers: Verifiers objects that are run before applying the | |
158 patch. | |
159 verifiers: Verifiers object run after applying the patch. | |
160 """ | |
161 if not(len(pre_patch_verifiers) or len(verifiers)): | |
162 raise ValueError('at least one verifier should be defined (in project %s)' | |
163 % project_name) | |
164 | |
165 self.context = context_obj | |
166 self.pre_patch_verifiers = pre_patch_verifiers or [] | |
167 self.verifiers = verifiers or [] | |
168 self.all_verifiers = pre_patch_verifiers + verifiers | |
169 self.queue = PendingQueue() | |
170 # Keep the timestamps of the last few commits so that we can control the | |
171 # pace (burstiness) of commits. | |
172 self.recent_commit_timestamps = [] | |
173 # Assert names are unique. | |
174 names = [x.name for x in pre_patch_verifiers + verifiers] | |
175 assert len(names) == len(set(names)) | |
176 for verifier in self.pre_patch_verifiers: | |
177 assert not isinstance(verifier, base.VerifierCheckout) | |
178 | |
179 def _get_user(self): | |
180 """Get the CQ's rietveld user name. | |
181 | |
182 We need it to look for messages posted by the CQ, to figure out | |
183 when the commit box was checked. TODO(sergeyberezin): when | |
184 Rietveld can tell this info natively, this function will be | |
185 obsolete. | |
186 """ | |
187 # Rietveld object or its email may be missing in some unittests. | |
188 if self.context.rietveld and self.context.rietveld.email: | |
189 return self.context.rietveld.email | |
190 else: | |
191 return 'commit-bot@chromium.org' | |
192 | |
193 def _set_sort_key(self, issue_data): | |
194 """Compute the sorting key. Must add .sort_key to issue_data. | |
195 | |
196 Previously we used issue id, but a better key is the last | |
197 timestamp when Commit box was checked. | |
198 """ | |
199 for m in issue_data['messages']: | |
200 if (m['sender'] == self._get_user() and | |
201 'CQ is trying da patch.' in m['text']): | |
202 issue_data['sort_key'] = m['date'] | |
203 if 'sort_key' not in issue_data: | |
204 # Set a default value: the current time. | |
205 issue_data['sort_key'] = str(datetime.datetime.utcnow()) | |
206 | |
207 def look_for_new_pending_commit(self): | |
208 """Looks for new reviews on self.context.rietveld with c+ set. | |
209 | |
210 Calls _new_pending_commit() on all new review found. | |
211 """ | |
212 try: | |
213 new_issues = self.context.rietveld.get_pending_issues() | |
214 except urllib2.URLError as e: | |
215 if 'timed out' in e.reason: | |
216 # Handle timeouts gracefully. Log them and pretend there are no | |
217 # pending issues. We'll retry on the next iteration. | |
218 logging.warn('request to fetch pending issues timed out: %s' % e) | |
219 return | |
220 | |
221 raise | |
222 | |
223 # If there is an issue in processed_issues that is not in new_issues, | |
224 # discard it. | |
225 for pending in self.queue.iterate(): | |
226 # Note that pending.issue is a int but self.queue.pending_commits keys | |
227 # are str due to json support. | |
228 if pending.issue not in new_issues: | |
229 logging.info('Flushing issue %d' % pending.issue) | |
230 self.context.status.send( | |
231 pending, | |
232 { 'verification': 'abort', | |
233 'payload': { | |
234 'output': 'CQ bit was unchecked on CL. Ignoring.' }}) | |
235 pending.get_state = lambda: base.IGNORED | |
236 self._discard_pending(pending, None) | |
237 | |
238 # Find new issues. | |
239 for issue_id in new_issues: | |
240 if str(issue_id) not in self.queue.pending_commits: | |
241 try: | |
242 issue_data = self.context.rietveld.get_issue_properties( | |
243 issue_id, True) | |
244 self._set_sort_key(issue_data) | |
245 except urllib2.HTTPError as e: | |
246 if e.code in (500, 502, 503): | |
247 # Temporary AppEngine hiccup. Just log it and continue. | |
248 logging.warning('%s while accessing %s. Ignoring error.' % ( | |
249 str(e), e.url)) | |
250 continue | |
251 raise | |
252 except urllib2.URLError as e: | |
253 # Temporary AppEngine hiccup. Just log it and continue. | |
254 if 'timed out' in e.reason: | |
255 logging.warning( | |
256 '%s while accessing rietveld issue %s. Ignoring error.' % ( | |
257 str(e), str(issue_id))) | |
258 continue | |
259 raise | |
260 except socket.error as e: | |
261 # Temporary AppEngine hiccup. Just log it and continue. | |
262 if e.errno == errno.ECONNRESET: | |
263 logging.warning( | |
264 '%s while accessing rietveld issue %s. Ignoring error.' % ( | |
265 str(e), str(issue_id))) | |
266 continue | |
267 raise | |
268 except IOError as e: | |
269 # Temporary AppEngine hiccup. Just log it and continue. | |
270 if e.errno == 'socket error': | |
271 logging.warning( | |
272 '%s while accessing rietveld issue %s. Ignoring error.' % ( | |
273 str(e), str(issue_id))) | |
274 continue | |
275 raise | |
276 # This assumption needs to hold. | |
277 assert issue_id == issue_data['issue'] | |
278 if issue_data['patchsets'] and issue_data['commit']: | |
279 logging.info('Found new issue %d' % issue_id) | |
280 self.queue.add( | |
281 PendingCommit( | |
282 issue=issue_id, | |
283 owner=issue_data['owner_email'], | |
284 reviewers=issue_data['reviewers'], | |
285 patchset=issue_data['patchsets'][-1], | |
286 base_url=issue_data['base_url'], | |
287 description=issue_data['description'].replace('\r', ''), | |
288 messages=issue_data['messages'], | |
289 sort_key=issue_data['sort_key'])) | |
290 | |
291 def process_new_pending_commit(self): | |
292 """Starts verification on newly found pending commits.""" | |
293 expected = set(i.name for i in self.all_verifiers) | |
294 for pending in self.queue.iterate(): | |
295 try: | |
296 # Take in account the case where a verifier was removed. | |
297 done = set(pending.verifications.keys()) | |
298 missing = expected - done | |
299 if (not missing or pending.get_state() != base.PROCESSING): | |
300 continue | |
301 logging.info( | |
302 'Processing issue %s @ %s (%s, %d)' % ( | |
303 pending.issue, pending.sort_key, missing, pending.get_state())) | |
304 self._verify_pending(pending) | |
305 except base.DiscardPending as e: | |
306 self._discard_pending(e.pending, e.status) | |
307 | |
308 def update_status(self): | |
309 """Updates the status for each pending commit verifier.""" | |
310 why_nots = dict((p.issue, p.why_not()) for p in self.queue.iterate()) | |
311 | |
312 for verifier in self.all_verifiers: | |
313 try: | |
314 verifier.update_status(self.queue.iterate()) | |
315 except base.DiscardPending as e: | |
316 # It's not efficient since it takes a full loop for each pending | |
317 # commit to discard. | |
318 self._discard_pending(e.pending, e.status) | |
319 | |
320 for pending in self.queue.iterate(): | |
321 why_not = pending.why_not() | |
322 if why_nots[pending.issue] != why_not: | |
323 self.context.status.send( | |
324 pending, | |
325 {'verification': 'why not', | |
326 'payload': {'message': why_not}}) | |
327 | |
328 | |
329 def scan_results(self): | |
330 """Scans pending commits that can be committed or discarded.""" | |
331 for pending in self.queue.iterate(): | |
332 state = pending.get_state() | |
333 if state == base.FAILED: | |
334 self._discard_pending( | |
335 pending, pending.error_message() or self.FAILED_NO_MESSAGE) | |
336 elif state == base.SUCCEEDED: | |
337 if self._throttle(pending): | |
338 continue | |
339 try: | |
340 # Runs checks. It's be nice to run the test before the postpone, | |
341 # especially if the tree is closed for a long moment but at the same | |
342 # time it would keep fetching the rietveld status constantly. | |
343 self._last_minute_checks(pending) | |
344 self.context.status.send( | |
345 pending, | |
346 {'verification': 'why not', | |
347 'payload': {'message': ''}}) | |
348 | |
349 self._commit_patch(pending) | |
350 except base.DiscardPending as e: | |
351 self._discard_pending(e.pending, e.status) | |
352 except Exception as e: | |
353 self._discard_pending(pending, self.INTERNAL_EXCEPTION) | |
354 raise | |
355 else: | |
356 # When state is IGNORED, we need to keep this issue so it's not fetched | |
357 # another time but we can't discard it since we don't want to remove the | |
358 # commit bit for another project hosted on the same code review | |
359 # instance. | |
360 assert state in (base.PROCESSING, base.IGNORED) | |
361 | |
362 def _verify_pending(self, pending): | |
363 """Initiates all the verifiers on a pending change.""" | |
364 # Do not apply the patch if not necessary. It will be applied at commit | |
365 # time anyway so if the patch doesn't apply, it'll be catch later. | |
366 if not self._pending_run_verifiers(pending, self.pre_patch_verifiers): | |
367 return | |
368 | |
369 if self.verifiers: | |
370 pending.prepare_for_patch(self.context) | |
371 | |
372 # This CL is real business, alert the user that we're going to try his | |
373 # patch. Note that this is done *after* syncing but *before* applying the | |
374 # patch. | |
375 self.context.status.send( | |
376 pending, | |
377 { 'verification': 'initial', | |
378 'payload': {'revision': pending.revision}}) | |
379 self.context.rietveld.add_comment( | |
380 pending.issue, | |
381 self.TRYING_PATCH + '%s/%s/%d/%d\n' % ( | |
382 self.context.status.url, pending.owner, | |
383 pending.issue, pending.patchset)) | |
384 | |
385 if self.verifiers: | |
386 pending.apply_patch(self.context, False) | |
387 previous_cwd = os.getcwd() | |
388 try: | |
389 os.chdir(self.context.checkout.project_path) | |
390 self._pending_run_verifiers(pending, self.verifiers) | |
391 finally: | |
392 os.chdir(previous_cwd) | |
393 | |
394 # Send the initial 'why not' message. | |
395 if pending.why_not(): | |
396 self.context.status.send( | |
397 pending, | |
398 {'verification': 'why not', | |
399 'payload': {'message': pending.why_not()}}) | |
400 | |
401 @classmethod | |
402 def _pending_run_verifiers(cls, pending, verifiers): | |
403 """Runs verifiers on a pending change. | |
404 | |
405 Returns True if all Verifiers were run. | |
406 """ | |
407 for verifier in verifiers: | |
408 assert verifier.name not in pending.verifications | |
409 verifier.verify(pending) | |
410 assert verifier.name in pending.verifications | |
411 if pending.get_state() == base.IGNORED: | |
412 assert pending.verifications[verifier.name].get_state() == base.IGNORED | |
413 # Remove all the other verifiers since we need to keep it in the | |
414 # 'datastore' to not retry this issue constantly. | |
415 for key in pending.verifications.keys(): | |
416 if key != verifier.name: | |
417 del pending.verifications[key] | |
418 return False | |
419 if pending.get_state() == base.FAILED: | |
420 # Throw if it didn't pass, so the error message is not lost. | |
421 raise base.DiscardPending( | |
422 pending, pending.error_message() or cls.FAILED_NO_MESSAGE) | |
423 return True | |
424 | |
425 def _last_minute_checks(self, pending): | |
426 """Does last minute checks on Rietvld before committing a pending patch.""" | |
427 pending_data = self.context.rietveld.get_issue_properties( | |
428 pending.issue, True) | |
429 if pending_data['commit'] != True: | |
430 raise base.DiscardPending(pending, None) | |
431 if pending_data['closed'] != False: | |
432 raise base.DiscardPending(pending, None) | |
433 if pending.description != pending_data['description'].replace('\r', ''): | |
434 raise base.DiscardPending(pending, self.DESCRIPTION_UPDATED) | |
435 commit_user = set([self.context.rietveld.email]) | |
436 expected = set(pending.reviewers) - commit_user | |
437 actual = set(pending_data['reviewers']) - commit_user | |
438 # Try to be nice, if there was a drive-by review and the new reviewer left | |
439 # a lgtm, don't abort. | |
440 def is_approver(r): | |
441 return any( | |
442 m.get('approval') for m in pending_data['messages'] | |
443 if m['sender'] == r) | |
444 drivers_by = [r for r in (actual - expected) if not is_approver(r)] | |
445 if drivers_by: | |
446 # That annoying driver-by. | |
447 raise base.DiscardPending( | |
448 pending, | |
449 'List of reviewers changed. %s did a drive-by without LGTM\'ing!' % | |
450 ','.join(drivers_by)) | |
451 if pending.patchset != pending_data['patchsets'][-1]: | |
452 raise base.DiscardPending(pending, | |
453 'Commit queue failed due to new patchset.') | |
454 | |
455 def _discard_pending(self, pending, message): | |
456 """Discards a pending commit. Attach an optional message to the review.""" | |
457 logging.debug('_discard_pending(%s, %s)', pending.issue, message) | |
458 try: | |
459 try: | |
460 if pending.get_state() != base.IGNORED: | |
461 self.context.rietveld.set_flag( | |
462 pending.issue, pending.patchset, 'commit', False) | |
463 except urllib2.HTTPError as e: | |
464 logging.error( | |
465 'Failed to set the flag to False for %s with message %s' % ( | |
466 pending.pending_name(), message)) | |
467 traceback.print_stack() | |
468 logging.error(str(e)) | |
469 errors.send_stack(e) | |
470 if message: | |
471 try: | |
472 self.context.rietveld.add_comment(pending.issue, message) | |
473 except urllib2.HTTPError as e: | |
474 logging.error( | |
475 'Failed to add comment for %s with message %s' % ( | |
476 pending.pending_name(), message)) | |
477 traceback.print_stack() | |
478 errors.send_stack(e) | |
479 self.context.status.send( | |
480 pending, | |
481 { 'verification': 'abort', | |
482 'payload': { | |
483 'output': message }}) | |
484 finally: | |
485 # Most importantly, remove the PendingCommit from the queue. | |
486 self.queue.remove(pending.issue) | |
487 | |
488 def _commit_patch(self, pending): | |
489 """Commits the pending patch to the repository. | |
490 | |
491 Do the checkout and applies the patch. | |
492 """ | |
493 try: | |
494 try: | |
495 # Make sure to apply on HEAD. | |
496 pending.revision = None | |
497 pending.apply_patch(self.context, True) | |
498 # Commit it. | |
499 commit_desc = git_cl.ChangeDescription(pending.description) | |
500 if (self.context.server_hooks_missing and | |
501 self.context.rietveld.email != pending.owner): | |
502 commit_desc.update_reviewers(pending.reviewers) | |
503 commit_desc.append_footer('Author: ' + pending.owner) | |
504 commit_desc.append_footer('Review URL: %s/%s' % ( | |
505 self.context.rietveld.url, | |
506 pending.issue)) | |
507 pending.revision = self.context.checkout.commit( | |
508 commit_desc.description, pending.owner) | |
509 if not pending.revision: | |
510 raise base.DiscardPending(pending, 'Failed to commit patch.') | |
511 | |
512 # Note that the commit succeeded for commit throttling. | |
513 self.recent_commit_timestamps.append(time.time()) | |
514 self.recent_commit_timestamps = ( | |
515 self.recent_commit_timestamps[-(self.MAX_COMMIT_BURST + 1):]) | |
516 | |
517 viewvc_url = self.context.checkout.get_settings('VIEW_VC') | |
518 issue_desc = git_cl.ChangeDescription(pending.description) | |
519 msg = 'Committed: %s' % pending.revision | |
520 if viewvc_url: | |
521 viewvc_url = '%s%s' % (viewvc_url.rstrip('/'), pending.revision) | |
522 msg = 'Committed: %s' % viewvc_url | |
523 issue_desc.append_footer(msg) | |
524 | |
525 # Update the CQ dashboard. | |
526 self.context.status.send( | |
527 pending, | |
528 { 'verification': 'commit', | |
529 'payload': { | |
530 'revision': pending.revision, | |
531 'output': msg, | |
532 'url': viewvc_url}}) | |
533 | |
534 # Closes the issue on Rietveld. | |
535 # TODO(csharp): Retry if exceptions are encountered. | |
536 try: | |
537 self.context.rietveld.close_issue(pending.issue) | |
538 self.context.rietveld.update_description( | |
539 pending.issue, issue_desc.description) | |
540 self.context.rietveld.add_comment( | |
541 pending.issue, 'Change committed as %s' % pending.revision) | |
542 except (urllib2.HTTPError, urllib2.URLError) as e: | |
543 # Ignore AppEngine flakiness. | |
544 logging.warning('Unable to fully close the issue') | |
545 # And finally remove the issue. If the close_issue() call above failed, | |
546 # it is possible the dashboard will be confused but it is harmless. | |
547 try: | |
548 self.queue.get(pending.issue) | |
549 except KeyError: | |
550 logging.error('Internal inconsistency for %d', pending.issue) | |
551 self.queue.remove(pending.issue) | |
552 except ( | |
553 checkout.PatchApplicationFailed, patch.UnsupportedPatchFormat) as e: | |
554 raise base.DiscardPending(pending, str(e)) | |
555 except subprocess2.CalledProcessError as e: | |
556 stdout = getattr(e, 'stdout', None) | |
557 out = 'Failed to apply the patch.' | |
558 if stdout: | |
559 out += '\n%s' % stdout | |
560 raise base.DiscardPending(pending, out) | |
561 except base.DiscardPending as e: | |
562 self._discard_pending(e.pending, e.status) | |
563 | |
564 def _throttle(self, pending): | |
565 """Returns True if a commit should be delayed.""" | |
566 if pending.postpone(): | |
567 self.context.status.send( | |
568 pending, | |
569 {'verification': 'why not', | |
570 'payload': { | |
571 'message': pending.why_not()}}) | |
572 return True | |
573 if not self.recent_commit_timestamps: | |
574 return False | |
575 cutoff = time.time() - self.COMMIT_BURST_DELAY | |
576 bursted = len([True for i in self.recent_commit_timestamps if i > cutoff]) | |
577 | |
578 if bursted >= self.MAX_COMMIT_BURST: | |
579 self.context.status.send( | |
580 pending, | |
581 {'verification': 'why not', | |
582 'payload': { | |
583 'message': ('Patch is ready to commit, but the CQ is delaying ' | |
584 'it because CQ has already submitted %d patches in ' | |
585 'the last %d seconds' % | |
586 (self.MAX_COMMIT_BURST, self.COMMIT_BURST_DELAY))}}) | |
587 return True | |
588 | |
589 return False | |
590 | |
591 def load(self, filename): | |
592 """Loads the commit queue state from a JSON file.""" | |
593 self.queue = model.load_from_json_file(filename) | |
594 | |
595 def save(self, filename): | |
596 """Save the commit queue state in a simple JSON file.""" | |
597 model.save_to_json_file(filename, self.queue) | |
598 | |
599 def close(self): | |
600 """Close all the active pending manager items.""" | |
601 self.context.status.close() | |
OLD | NEW |