OLD | NEW |
---|---|
(Empty) | |
1 #!/usr/bin/env python | |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 | |
6 """A git command for managing a local cache of git repositories.""" | |
7 | |
8 import errno | |
9 import logging | |
10 import optparse | |
11 import os | |
12 import subprocess | |
13 import sys | |
14 import urlparse | |
15 | |
16 import gclient_utils | |
17 import subcommand | |
18 | |
19 | |
20 def UrlToCacheDir(url): | |
21 """Converts a git url to a normalized form for the cache dir path.""" | |
22 parsed = urlparse.urlparse(url) | |
23 norm_url = parsed.netloc + parsed.path | |
24 if not norm_url.endswith('.git'): | |
25 norm_url += '.git' | |
26 return norm_url.replace('-', '--').replace('/', '-') | |
27 | |
28 | |
29 def RunGit(cmd, **kwargs): | |
30 """Runs git in a subprocess.""" | |
31 kwargs.setdefault('cwd', os.getcwd()) | |
32 if kwargs.get('filter_fn'): | |
33 kwargs['filter_fn'] = gclient_utils.GitFilter(kwargs.get('filter_fn')) | |
34 kwargs.setdefault('print_stdout', False) | |
35 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy()) | |
36 env.setdefault('GIT_ASKPASS', 'true') | |
37 env.setdefault('SSH_ASKPASS', 'true') | |
38 else: | |
39 kwargs.setdefault('print_stdout', True) | |
40 stdout = kwargs.get('stdout', sys.stdout) | |
41 print >>stdout, 'running "git %s" in "%s"' % (' '.join(cmd), kwargs['cwd']) | |
42 gclient_utils.CheckCallAndFilter(['git'] + cmd, **kwargs) | |
43 | |
44 | |
45 class LockError(Exception): | |
46 pass | |
47 | |
48 | |
49 class Lockfile(object): | |
50 """Class to represent a cross-platform process-specific lockfile.""" | |
51 def __init__(self, path): | |
52 self.path = os.path.abspath(path) | |
53 self.lockfile = self.path + ".lock" | |
54 self.pid = os.getpid() | |
55 | |
56 def _read_pid(self): | |
57 """Reads the pid stored in the lockfile. | |
58 | |
59 Note: This method is potentially racy. By the time it returns the lockfile | |
60 may have been unlocked, removed, or stolen by some other process. | |
61 """ | |
62 try: | |
63 with open(self.lockfile, 'r') as f: | |
64 pid = int(f.readline().strip()) | |
65 except (IOError, ValueError): | |
66 pid = None | |
67 return pid | |
68 | |
69 def _make_lockfile(self): | |
70 """Safely creates a lockfile containing the current pid.""" | |
71 open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) | |
72 fd = os.open(self.lockfile, open_flags, 0o644) | |
73 f = os.fdopen(fd, 'w') | |
74 print >>f, self.pid | |
75 f.close() | |
76 | |
77 def _remove_lockfile(self): | |
78 """Deletes the lockfile. Complains (implicitly) if it doesn't exist.""" | |
79 os.remove(self.lockfile) | |
80 | |
81 def lock(self): | |
82 """Acquire the lock. | |
83 | |
84 Note: This is a NON-BLOCKING FAIL-FAST operation. | |
85 Do. Or do not. There is no try. | |
86 """ | |
87 try: | |
88 self._make_lockfile() | |
89 except OSError as e: | |
90 if e.errno == errno.EEXIST: | |
91 raise LockError("%s is already locked" % self.path) | |
92 else: | |
93 raise LockError("Failed to create %s (err %s)" % (self.path, e.errno)) | |
94 | |
95 def unlock(self): | |
96 """Release the lock.""" | |
97 if not self.is_locked(): | |
98 raise LockError("%s is not locked" % self.path) | |
99 if not self.i_am_locking(): | |
100 raise LockError("%s is locked, but not by me" % self.path) | |
101 self._remove_lockfile() | |
102 | |
103 def break_lock(self): | |
104 """Remove the lock, even if it was created by someone else.""" | |
105 try: | |
106 self._remove_lockfile() | |
107 return True | |
108 except OSError as exc: | |
109 if exc.errno == errno.ENOENT: | |
110 return False | |
111 else: | |
112 raise | |
113 | |
114 def is_locked(self): | |
115 """Test if the file is locked by anyone. | |
116 | |
117 Note: This method is potentially racy. By the time it returns the lockfile | |
118 may have been unlocked, removed, or stolen by some other process. | |
119 """ | |
120 return os.path.exists(self.lockfile) | |
121 | |
122 def i_am_locking(self): | |
123 """Test if the file is locked by this process.""" | |
124 return self.is_locked() and self.pid == self._read_pid() | |
125 | |
126 def __enter__(self): | |
127 self.lock() | |
128 return self | |
129 | |
130 def __exit__(self, *_exc): | |
131 self.unlock() | |
132 | |
133 | |
134 @subcommand.usage('[url of repo to check for caching]') | |
135 def CMDexists(parser, args): | |
136 """Checks to see if there already is a cache of the given repo.""" | |
137 options, args = parser.parse_args(args) | |
138 if not len(args) == 1: | |
139 parser.error('git cache exists only takes exactly one repo url.') | |
140 url = args[0] | |
141 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | |
142 flag_file = os.path.join(repo_dir, 'config') | |
143 if os.path.isdir(repo_dir) and os.path.isfile(flag_file): | |
144 print repo_dir | |
145 return 0 | |
146 return 1 | |
147 | |
148 | |
149 @subcommand.usage('[url of repo to add to or update in cache]') | |
150 def CMDpopulate(parser, args): | |
151 """Bare clones or updates a repository in the cache.""" | |
152 parser.add_option('--local', | |
153 help='local repository to initialize from') | |
154 options, args = parser.parse_args(args) | |
155 if not len(args) == 1: | |
156 parser.error('git cache populate only takes exactly one repo url.') | |
157 url = args[0] | |
158 | |
159 gclient_utils.safe_makedirs(options.cache_dir) | |
160 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | |
161 | |
162 # If we've been supplied with a local repository to help out, | |
163 # make sure that it is a full direct clone before relying on it. | |
164 local_objects = local_altfile = '' | |
165 if options.local: | |
166 local_objects = os.path.join( | |
167 os.path.abspath(options.local), '.git', 'objects') | |
168 local_altfile = os.path.join(local_objects, 'info', 'alternates') | |
169 use_reference = ( | |
170 os.path.exists(local_objects) and not os.path.exists(local_altfile)) | |
171 altfile = os.path.join(repo_dir, 'objects', 'info', 'alternates') | |
172 | |
173 v = [] | |
174 filter_fn = lambda l: '[up to date]' not in l | |
175 if options.verbose: | |
176 v = ['-v', '--progress'] | |
177 filter_fn = None | |
178 | |
179 with Lockfile(repo_dir): | |
180 # Do a full clone if the repo is new or is in a bad state. | |
181 if not os.path.exists(os.path.join(repo_dir, 'config')): | |
182 gclient_utils.rmtree(repo_dir) | |
183 cmd = ['clone'] + v + ['-c', 'core.deltaBaseCacheLimit=2g', | |
Ryan Tseng
2014/02/19 22:56:15
We talked about this yesterday, but we want to mak
| |
184 '--bare'] | |
185 | |
186 if use_reference: | |
187 cmd += ['--reference', os.path.abspath(options.local)] | |
188 | |
189 RunGit(cmd + [url, repo_dir], | |
190 filter_fn=filter_fn, cwd=options.cache_dir, retry=True) | |
191 | |
192 else: | |
193 if use_reference: | |
194 with open(altfile, 'w') as f: | |
195 f.write(os.path.abspath(local_objects)) | |
196 | |
197 RunGit(['fetch'] + v + ['--multiple', '--all'], | |
198 filter_fn=filter_fn, cwd=repo_dir, retry=True) | |
199 | |
200 # If the clone has an object dependency on the local repo, break it | |
201 # with repack and remove the linkage. | |
202 if os.path.exists(altfile): | |
203 RunGit(['repack', '-a'], cwd=repo_dir) | |
204 os.remove(altfile) | |
205 | |
206 | |
207 @subcommand.usage('[url of repo to unlock, or -a|--all]') | |
208 def CMDunlock(parser, args): | |
209 """Unlocks one or all repos if their lock files are still around.""" | |
210 parser.add_option('--force', '-f', action='store_true', | |
211 help='actually perform the action') | |
212 parser.add_option('--all', '-a', action='store_true', | |
213 help='unlock all repository caches') | |
214 options, args = parser.parse_args(args) | |
215 if len(args) > 1 or (len(args) == 0 and not options.all): | |
216 parser.error('git cache unlock takes exactly one repo url, or --all') | |
217 | |
218 if not options.all: | |
219 url = args[0] | |
220 repo_dirs = [os.path.join(options.cache_dir, UrlToCacheDir(url))] | |
221 else: | |
222 repo_dirs = [path for path in os.listdir(options.cache_dir) | |
223 if os.path.isdir(path)] | |
224 lockfiles = [repo_dir + '.lock' for repo_dir in repo_dirs | |
225 if os.path.exists(repo_dir + '.lock')] | |
226 | |
227 if not options.force: | |
228 logging.warn('Not performing any actions. ' | |
229 'Pass -f|--force to remove the following lockfiles: ' | |
230 '%s' % lockfiles) | |
231 return | |
232 | |
233 unlocked = untouched = [] | |
234 for repo_dir in repo_dirs: | |
235 lf = Lockfile(repo_dir) | |
236 if lf.break_lock(): | |
237 unlocked.append(repo_dir) | |
238 else: | |
239 untouched.append(repo_dir) | |
240 | |
241 if unlocked: | |
242 logging.info('Broke locks on these caches: %s' % unlocked) | |
243 if untouched: | |
244 logging.debug('Did not touch these caches: %s' % untouched) | |
245 | |
246 | |
247 class OptionParser(optparse.OptionParser): | |
248 """Wrapper class for OptionParser to handle global options.""" | |
249 def __init__(self, *args, **kwargs): | |
250 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs) | |
251 self.add_option('-c', '--cache-dir', | |
252 help='Path to the directory containing the cache.') | |
253 self.add_option('-v', '--verbose', action='count', default=0, | |
254 help='Increase verbosity (can be passed multiple times).') | |
255 | |
256 def parse_args(self, args=None, values=None): | |
257 options, args = optparse.OptionParser.parse_args(self, args, values) | |
258 | |
259 try: | |
260 global_cache_dir = subprocess.check_output( | |
261 ['git', 'config', '--global', 'cache.cachepath']).strip() | |
262 if options.cache_dir: | |
263 logging.warn('Overriding globally-configured cache directory.') | |
264 else: | |
265 options.cache_dir = global_cache_dir | |
266 except subprocess.CalledProcessError: | |
267 if not options.cache_dir: | |
268 self.error('No cache directory specified on command line ' | |
269 'or in cache.cachepath.') | |
270 options.cache_dir = os.path.abspath(options.cache_dir) | |
271 | |
272 levels = [logging.WARNING, logging.INFO, logging.DEBUG] | |
273 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)]) | |
274 | |
275 return options, args | |
276 | |
277 | |
278 def main(argv): | |
279 dispatcher = subcommand.CommandDispatcher(__name__) | |
280 return dispatcher.execute(OptionParser(), argv) | |
281 | |
282 | |
283 if __name__ == '__main__': | |
284 sys.exit(main(sys.argv[1:])) | |
OLD | NEW |