OLD | NEW |
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """A git command for managing a local cache of git repositories.""" | 6 """A git command for managing a local cache of git repositories.""" |
7 | 7 |
8 from __future__ import print_function | 8 from __future__ import print_function |
9 import errno | 9 import errno |
10 import logging | 10 import logging |
(...skipping 26 matching lines...) Expand all Loading... |
37 | 37 |
38 class LockError(Exception): | 38 class LockError(Exception): |
39 pass | 39 pass |
40 | 40 |
41 class RefsHeadsFailedToFetch(Exception): | 41 class RefsHeadsFailedToFetch(Exception): |
42 pass | 42 pass |
43 | 43 |
44 class Lockfile(object): | 44 class Lockfile(object): |
45 """Class to represent a cross-platform process-specific lockfile.""" | 45 """Class to represent a cross-platform process-specific lockfile.""" |
46 | 46 |
47 def __init__(self, path): | 47 def __init__(self, path, timeout=0): |
48 self.path = os.path.abspath(path) | 48 self.path = os.path.abspath(path) |
| 49 self.timeout = timeout |
49 self.lockfile = self.path + ".lock" | 50 self.lockfile = self.path + ".lock" |
50 self.pid = os.getpid() | 51 self.pid = os.getpid() |
51 | 52 |
52 def _read_pid(self): | 53 def _read_pid(self): |
53 """Read the pid stored in the lockfile. | 54 """Read the pid stored in the lockfile. |
54 | 55 |
55 Note: This method is potentially racy. By the time it returns the lockfile | 56 Note: This method is potentially racy. By the time it returns the lockfile |
56 may have been unlocked, removed, or stolen by some other process. | 57 may have been unlocked, removed, or stolen by some other process. |
57 """ | 58 """ |
58 try: | 59 try: |
(...skipping 25 matching lines...) Expand all Loading... |
84 if exitcode == 0: | 85 if exitcode == 0: |
85 return | 86 return |
86 time.sleep(3) | 87 time.sleep(3) |
87 raise LockError('Failed to remove lock: %s' % lockfile) | 88 raise LockError('Failed to remove lock: %s' % lockfile) |
88 else: | 89 else: |
89 os.remove(self.lockfile) | 90 os.remove(self.lockfile) |
90 | 91 |
91 def lock(self): | 92 def lock(self): |
92 """Acquire the lock. | 93 """Acquire the lock. |
93 | 94 |
94 Note: This is a NON-BLOCKING FAIL-FAST operation. | 95 This will block with a deadline of self.timeout seconds. |
95 Do. Or do not. There is no try. | 96 If self.timeout is zero, this is a NON-BLOCKING FAIL-FAST operation. |
96 """ | 97 """ |
97 try: | 98 elapsed = 0 |
98 self._make_lockfile() | 99 while True: |
99 except OSError as e: | 100 try: |
100 if e.errno == errno.EEXIST: | 101 self._make_lockfile() |
101 raise LockError("%s is already locked" % self.path) | 102 return |
102 else: | 103 except OSError as e: |
103 raise LockError("Failed to create %s (err %s)" % (self.path, e.errno)) | 104 if elapsed < self.timeout: |
| 105 sleep_time = min(3, self.timeout - elapsed) |
| 106 logging.info('Could not create git cache lockfile; ' |
| 107 'will retry after sleep(%d).', sleep_time); |
| 108 elapsed += sleep_time |
| 109 time.sleep(sleep_time) |
| 110 continue |
| 111 if e.errno == errno.EEXIST: |
| 112 raise LockError("%s is already locked" % self.path) |
| 113 else: |
| 114 raise LockError("Failed to create %s (err %s)" % (self.path, e.errno)) |
104 | 115 |
105 def unlock(self): | 116 def unlock(self): |
106 """Release the lock.""" | 117 """Release the lock.""" |
107 try: | 118 try: |
108 if not self.is_locked(): | 119 if not self.is_locked(): |
109 raise LockError("%s is not locked" % self.path) | 120 raise LockError("%s is not locked" % self.path) |
110 if not self.i_am_locking(): | 121 if not self.i_am_locking(): |
111 raise LockError("%s is locked, but not by me" % self.path) | 122 raise LockError("%s is locked, but not by me" % self.path) |
112 self._remove_lockfile() | 123 self._remove_lockfile() |
113 except WinErr: | 124 except WinErr: |
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
394 for spec in fetch_specs: | 405 for spec in fetch_specs: |
395 try: | 406 try: |
396 self.print('Fetching %s' % spec) | 407 self.print('Fetching %s' % spec) |
397 self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True) | 408 self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True) |
398 except subprocess.CalledProcessError: | 409 except subprocess.CalledProcessError: |
399 if spec == '+refs/heads/*:refs/heads/*': | 410 if spec == '+refs/heads/*:refs/heads/*': |
400 raise RefsHeadsFailedToFetch | 411 raise RefsHeadsFailedToFetch |
401 logging.warn('Fetch of %s failed' % spec) | 412 logging.warn('Fetch of %s failed' % spec) |
402 | 413 |
403 def populate(self, depth=None, shallow=False, bootstrap=False, | 414 def populate(self, depth=None, shallow=False, bootstrap=False, |
404 verbose=False, ignore_lock=False): | 415 verbose=False, ignore_lock=False, lock_timeout=0): |
405 assert self.GetCachePath() | 416 assert self.GetCachePath() |
406 if shallow and not depth: | 417 if shallow and not depth: |
407 depth = 10000 | 418 depth = 10000 |
408 gclient_utils.safe_makedirs(self.GetCachePath()) | 419 gclient_utils.safe_makedirs(self.GetCachePath()) |
409 | 420 |
410 lockfile = Lockfile(self.mirror_path) | 421 lockfile = Lockfile(self.mirror_path, lock_timeout) |
411 if not ignore_lock: | 422 if not ignore_lock: |
412 lockfile.lock() | 423 lockfile.lock() |
413 | 424 |
414 tempdir = None | 425 tempdir = None |
415 try: | 426 try: |
416 tempdir = self._ensure_bootstrapped(depth, bootstrap) | 427 tempdir = self._ensure_bootstrapped(depth, bootstrap) |
417 rundir = tempdir or self.mirror_path | 428 rundir = tempdir or self.mirror_path |
418 self._fetch(rundir, verbose, depth) | 429 self._fetch(rundir, verbose, depth) |
419 except RefsHeadsFailedToFetch: | 430 except RefsHeadsFailedToFetch: |
420 # This is a major failure, we need to clean and force a bootstrap. | 431 # This is a major failure, we need to clean and force a bootstrap. |
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
575 if not len(args) == 1: | 586 if not len(args) == 1: |
576 parser.error('git cache populate only takes exactly one repo url.') | 587 parser.error('git cache populate only takes exactly one repo url.') |
577 url = args[0] | 588 url = args[0] |
578 | 589 |
579 mirror = Mirror(url, refs=options.ref) | 590 mirror = Mirror(url, refs=options.ref) |
580 kwargs = { | 591 kwargs = { |
581 'verbose': options.verbose, | 592 'verbose': options.verbose, |
582 'shallow': options.shallow, | 593 'shallow': options.shallow, |
583 'bootstrap': not options.no_bootstrap, | 594 'bootstrap': not options.no_bootstrap, |
584 'ignore_lock': options.ignore_locks, | 595 'ignore_lock': options.ignore_locks, |
| 596 'lock_timeout': options.timeout, |
585 } | 597 } |
586 if options.depth: | 598 if options.depth: |
587 kwargs['depth'] = options.depth | 599 kwargs['depth'] = options.depth |
588 mirror.populate(**kwargs) | 600 mirror.populate(**kwargs) |
589 | 601 |
590 | 602 |
591 @subcommand.usage('Fetch new commits into cache and current checkout') | 603 @subcommand.usage('Fetch new commits into cache and current checkout') |
592 def CMDfetch(parser, args): | 604 def CMDfetch(parser, args): |
593 """Update mirror, and fetch in cwd.""" | 605 """Update mirror, and fetch in cwd.""" |
594 parser.add_option('--all', action='store_true', help='Fetch all remotes') | 606 parser.add_option('--all', action='store_true', help='Fetch all remotes') |
(...skipping 23 matching lines...) Expand all Loading... |
618 remotes = [upstream] | 630 remotes = [upstream] |
619 if not remotes: | 631 if not remotes: |
620 remotes = ['origin'] | 632 remotes = ['origin'] |
621 | 633 |
622 cachepath = Mirror.GetCachePath() | 634 cachepath = Mirror.GetCachePath() |
623 git_dir = os.path.abspath(subprocess.check_output( | 635 git_dir = os.path.abspath(subprocess.check_output( |
624 [Mirror.git_exe, 'rev-parse', '--git-dir'])) | 636 [Mirror.git_exe, 'rev-parse', '--git-dir'])) |
625 git_dir = os.path.abspath(git_dir) | 637 git_dir = os.path.abspath(git_dir) |
626 if git_dir.startswith(cachepath): | 638 if git_dir.startswith(cachepath): |
627 mirror = Mirror.FromPath(git_dir) | 639 mirror = Mirror.FromPath(git_dir) |
628 mirror.populate(bootstrap=not options.no_bootstrap) | 640 mirror.populate( |
| 641 bootstrap=not options.no_bootstrap, lock_timeout=options.timeout) |
629 return 0 | 642 return 0 |
630 for remote in remotes: | 643 for remote in remotes: |
631 remote_url = subprocess.check_output( | 644 remote_url = subprocess.check_output( |
632 [Mirror.git_exe, 'config', 'remote.%s.url' % remote]).strip() | 645 [Mirror.git_exe, 'config', 'remote.%s.url' % remote]).strip() |
633 if remote_url.startswith(cachepath): | 646 if remote_url.startswith(cachepath): |
634 mirror = Mirror.FromPath(remote_url) | 647 mirror = Mirror.FromPath(remote_url) |
635 mirror.print = lambda *args: None | 648 mirror.print = lambda *args: None |
636 print('Updating git cache...') | 649 print('Updating git cache...') |
637 mirror.populate(bootstrap=not options.no_bootstrap) | 650 mirror.populate( |
| 651 bootstrap=not options.no_bootstrap, lock_timeout=options.timeout) |
638 subprocess.check_call([Mirror.git_exe, 'fetch', remote]) | 652 subprocess.check_call([Mirror.git_exe, 'fetch', remote]) |
639 return 0 | 653 return 0 |
640 | 654 |
641 | 655 |
642 @subcommand.usage('[url of repo to unlock, or -a|--all]') | 656 @subcommand.usage('[url of repo to unlock, or -a|--all]') |
643 def CMDunlock(parser, args): | 657 def CMDunlock(parser, args): |
644 """Unlock one or all repos if their lock files are still around.""" | 658 """Unlock one or all repos if their lock files are still around.""" |
645 parser.add_option('--force', '-f', action='store_true', | 659 parser.add_option('--force', '-f', action='store_true', |
646 help='Actually perform the action') | 660 help='Actually perform the action') |
647 parser.add_option('--all', '-a', action='store_true', | 661 parser.add_option('--all', '-a', action='store_true', |
(...skipping 28 matching lines...) Expand all Loading... |
676 """Wrapper class for OptionParser to handle global options.""" | 690 """Wrapper class for OptionParser to handle global options.""" |
677 | 691 |
678 def __init__(self, *args, **kwargs): | 692 def __init__(self, *args, **kwargs): |
679 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs) | 693 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs) |
680 self.add_option('-c', '--cache-dir', | 694 self.add_option('-c', '--cache-dir', |
681 help='Path to the directory containing the cache') | 695 help='Path to the directory containing the cache') |
682 self.add_option('-v', '--verbose', action='count', default=1, | 696 self.add_option('-v', '--verbose', action='count', default=1, |
683 help='Increase verbosity (can be passed multiple times)') | 697 help='Increase verbosity (can be passed multiple times)') |
684 self.add_option('-q', '--quiet', action='store_true', | 698 self.add_option('-q', '--quiet', action='store_true', |
685 help='Suppress all extraneous output') | 699 help='Suppress all extraneous output') |
| 700 self.add_option('--timeout', type='int', default=0, |
| 701 help='Timeout for acquiring cache lock, in seconds') |
686 | 702 |
687 def parse_args(self, args=None, values=None): | 703 def parse_args(self, args=None, values=None): |
688 options, args = optparse.OptionParser.parse_args(self, args, values) | 704 options, args = optparse.OptionParser.parse_args(self, args, values) |
689 if options.quiet: | 705 if options.quiet: |
690 options.verbose = 0 | 706 options.verbose = 0 |
691 | 707 |
692 levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] | 708 levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG] |
693 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)]) | 709 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)]) |
694 | 710 |
695 try: | 711 try: |
(...skipping 14 matching lines...) Expand all Loading... |
710 dispatcher = subcommand.CommandDispatcher(__name__) | 726 dispatcher = subcommand.CommandDispatcher(__name__) |
711 return dispatcher.execute(OptionParser(), argv) | 727 return dispatcher.execute(OptionParser(), argv) |
712 | 728 |
713 | 729 |
714 if __name__ == '__main__': | 730 if __name__ == '__main__': |
715 try: | 731 try: |
716 sys.exit(main(sys.argv[1:])) | 732 sys.exit(main(sys.argv[1:])) |
717 except KeyboardInterrupt: | 733 except KeyboardInterrupt: |
718 sys.stderr.write('interrupted\n') | 734 sys.stderr.write('interrupted\n') |
719 sys.exit(1) | 735 sys.exit(1) |
OLD | NEW |