Chromium Code Reviews| Index: git_cache.py |
| diff --git a/git_cache.py b/git_cache.py |
| index 52e42c59274781f580491428b67afa4d6bf66280..a7c81178e83683356d4f27719c2150655600435a 100755 |
| --- a/git_cache.py |
| +++ b/git_cache.py |
| @@ -349,9 +349,40 @@ class Mirror(object): |
| def unlock(self): |
| lf = Lockfile(self.mirror_path) |
| config_lock = os.path.join(self.mirror_path, 'config.lock') |
| + did_unlock = False |
| if os.path.exists(config_lock): |
| os.remove(config_lock) |
| - lf.break_lock() |
| + did_unlock = True |
| + if lf.break_lock(): |
| + did_unlock = True |
| + return did_unlock |
| + |
| + @classmethod |
| + def UnlockAll(cls): |
| + cachepath = cls.GetCachePath() |
| + dirlist = os.listdir(cachepath) |
| + repo_dirs = set([os.path.join(cachepath, path) for path in dirlist |
| + if os.path.isdir(os.path.join(cachepath, path))]) |
| + for dirent in dirlist: |
| + if (not dirent.endswith('.lock') or |
|
Ryan Tseng
2014/05/12 18:45:18
opinion: I think the positive version looks more r
szager1
2014/05/13 20:17:21
Done.
|
| + not os.path.isfile(os.path.join(cachepath, dirent))): |
| + continue |
| + repo_dirs.add(os.path.join(cachepath, dirent[:-5])) |
| + |
| + unlocked_repos = [] |
| + for repo_dir in repo_dirs: |
| + lf = Lockfile(repo_dir) |
| + config_lock = os.path.join(repo_dir, 'config.lock') |
|
Ryan Tseng
2014/05/12 18:45:18
Theres repeated code here. Lets consolidate them.
szager1
2014/05/13 20:17:21
Done.
|
| + unlocked = False |
| + if os.path.exists(config_lock): |
| + os.remove(config_lock) |
| + unlocked = True |
| + if lf.break_lock(): |
| + unlocked = True |
| + if unlocked: |
| + unlocked_repos.append(repo_dir) |
| + |
| + return unlocked_repos |
| @subcommand.usage('[url of repo to check for caching]') |
| def CMDexists(parser, args): |
| @@ -427,54 +458,26 @@ def CMDunlock(parser, args): |
| if len(args) > 1 or (len(args) == 0 and not options.all): |
| parser.error('git cache unlock takes exactly one repo url, or --all') |
| - repo_dirs = [] |
| - if not options.all: |
| - url = args[0] |
| - repo_dirs.append(Mirror(url).mirror_path) |
| - else: |
| + if not options.force: |
| cachepath = Mirror.GetCachePath() |
| - repo_dirs = [os.path.join(cachepath, path) |
| + lockfiles = [os.path.join(cachepath, path) |
| for path in os.listdir(cachepath) |
| - if os.path.isdir(os.path.join(cachepath, path))] |
| - repo_dirs.extend([os.path.join(cachepath, |
| - lockfile.replace('.lock', '')) |
| - for lockfile in os.listdir(cachepath) |
| - if os.path.isfile(os.path.join(cachepath, |
| - lockfile)) |
| - and lockfile.endswith('.lock') |
| - and os.path.join(cachepath, lockfile) |
| - not in repo_dirs]) |
| - lockfiles = [repo_dir + '.lock' for repo_dir in repo_dirs |
| - if os.path.exists(repo_dir + '.lock')] |
| - |
| - if not options.force: |
| + if path.endswith('.lock') and os.path.isfile(path)] |
| parser.error('git cache unlock requires -f|--force to do anything. ' |
| 'Refusing to unlock the following repo caches: ' |
| ', '.join(lockfiles)) |
| unlocked_repos = [] |
| - untouched_repos = [] |
| - for repo_dir in repo_dirs: |
| - lf = Lockfile(repo_dir) |
| - config_lock = os.path.join(repo_dir, 'config.lock') |
| - unlocked = False |
| - if os.path.exists(config_lock): |
| - os.remove(config_lock) |
| - unlocked = True |
| - if lf.break_lock(): |
| - unlocked = True |
| - |
| - if unlocked: |
| - unlocked_repos.append(repo_dir) |
| - else: |
| - untouched_repos.append(repo_dir) |
| + if options.all: |
| + unlocked_repos.extend(Mirror.UnlockAll()) |
| + else: |
| + m = Mirror(args[0]) |
| + if m.unlock(): |
| + unlocked_repos.append(m.mirror_path) |
| if unlocked_repos: |
| logging.info('Broke locks on these caches:\n %s' % '\n '.join( |
| unlocked_repos)) |
| - if untouched_repos: |
| - logging.debug('Did not touch these caches:\n %s' % '\n '.join( |
| - untouched_repos)) |
| class OptionParser(optparse.OptionParser): |