OLD | NEW |
---|---|
(Empty) | |
1 #!/usr/bin/env python | |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 | |
6 """A git command for managing a local cache of git repositories.""" | |
7 | |
8 import errno | |
9 import logging | |
10 import optparse | |
11 import os | |
12 import tempfile | |
13 import subprocess | |
14 import sys | |
15 import urlparse | |
16 | |
17 import gclient_utils | |
18 import subcommand | |
19 | |
20 | |
21 GIT_EXECUTABLE = 'git.bat' if sys.platform.startswith('win') else 'git' | |
22 | |
23 | |
24 def NormalizeUrl(url): | |
25 """Convert a git url to a normalized form.""" | |
26 parsed = urlparse.urlparse(url) | |
27 norm_url = 'https://' + parsed.netloc + parsed.path | |
28 if not norm_url.endswith('.git'): | |
29 norm_url += '.git' | |
30 return norm_url | |
31 | |
32 | |
33 def UrlToCacheDir(url): | |
34 """Convert a git url to a normalized form for the cache dir path.""" | |
35 parsed = urlparse.urlparse(url) | |
36 norm_url = parsed.netloc + parsed.path | |
37 if norm_url.endswith('.git'): | |
38 norm_url = norm_url[:-len('.git')] | |
39 return norm_url.replace('-', '--').replace('/', '-').lower() | |
40 | |
41 | |
42 def RunGit(cmd, **kwargs): | |
43 """Run git in a subprocess.""" | |
44 kwargs.setdefault('cwd', os.getcwd()) | |
45 if kwargs.get('filter_fn'): | |
46 kwargs['filter_fn'] = gclient_utils.GitFilter(kwargs.get('filter_fn')) | |
47 kwargs.setdefault('print_stdout', False) | |
48 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy()) | |
49 env.setdefault('GIT_ASKPASS', 'true') | |
50 env.setdefault('SSH_ASKPASS', 'true') | |
51 else: | |
52 kwargs.setdefault('print_stdout', True) | |
53 stdout = kwargs.get('stdout', sys.stdout) | |
54 print >> stdout, 'running "git %s" in "%s"' % (' '.join(cmd), kwargs['cwd']) | |
55 gclient_utils.CheckCallAndFilter([GIT_EXECUTABLE] + cmd, **kwargs) | |
56 | |
57 | |
58 class LockError(Exception): | |
59 pass | |
60 | |
61 | |
62 class Lockfile(object): | |
63 """Class to represent a cross-platform process-specific lockfile.""" | |
64 | |
65 def __init__(self, path): | |
66 self.path = os.path.abspath(path) | |
67 self.lockfile = self.path + ".lock" | |
68 self.pid = os.getpid() | |
69 | |
70 def _read_pid(self): | |
71 """Read the pid stored in the lockfile. | |
72 | |
73 Note: This method is potentially racy. By the time it returns the lockfile | |
74 may have been unlocked, removed, or stolen by some other process. | |
75 """ | |
76 try: | |
77 with open(self.lockfile, 'r') as f: | |
78 pid = int(f.readline().strip()) | |
79 except (IOError, ValueError): | |
80 pid = None | |
81 return pid | |
82 | |
83 def _make_lockfile(self): | |
84 """Safely creates a lockfile containing the current pid.""" | |
85 open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) | |
86 fd = os.open(self.lockfile, open_flags, 0o644) | |
87 f = os.fdopen(fd, 'w') | |
88 print >> f, self.pid | |
89 f.close() | |
90 | |
91 def _remove_lockfile(self): | |
92 """Delete the lockfile. Complains (implicitly) if it doesn't exist.""" | |
93 os.remove(self.lockfile) | |
94 | |
95 def lock(self): | |
96 """Acquire the lock. | |
97 | |
98 Note: This is a NON-BLOCKING FAIL-FAST operation. | |
99 Do. Or do not. There is no try. | |
100 """ | |
101 try: | |
102 self._make_lockfile() | |
103 except OSError as e: | |
104 if e.errno == errno.EEXIST: | |
105 raise LockError("%s is already locked" % self.path) | |
106 else: | |
107 raise LockError("Failed to create %s (err %s)" % (self.path, e.errno)) | |
108 | |
109 def unlock(self): | |
110 """Release the lock.""" | |
111 if not self.is_locked(): | |
112 raise LockError("%s is not locked" % self.path) | |
113 if not self.i_am_locking(): | |
114 raise LockError("%s is locked, but not by me" % self.path) | |
115 self._remove_lockfile() | |
116 | |
117 def break_lock(self): | |
118 """Remove the lock, even if it was created by someone else.""" | |
119 try: | |
120 self._remove_lockfile() | |
121 return True | |
122 except OSError as exc: | |
123 if exc.errno == errno.ENOENT: | |
124 return False | |
125 else: | |
126 raise | |
127 | |
128 def is_locked(self): | |
129 """Test if the file is locked by anyone. | |
130 | |
131 Note: This method is potentially racy. By the time it returns the lockfile | |
132 may have been unlocked, removed, or stolen by some other process. | |
133 """ | |
134 return os.path.exists(self.lockfile) | |
135 | |
136 def i_am_locking(self): | |
137 """Test if the file is locked by this process.""" | |
138 return self.is_locked() and self.pid == self._read_pid() | |
139 | |
140 def __enter__(self): | |
141 self.lock() | |
142 return self | |
143 | |
144 def __exit__(self, *_exc): | |
145 self.unlock() | |
146 | |
147 | |
148 @subcommand.usage('[url of repo to check for caching]') | |
149 def CMDexists(parser, args): | |
150 """Check to see if there already is a cache of the given repo.""" | |
151 options, args = parser.parse_args(args) | |
152 if not len(args) == 1: | |
153 parser.error('git cache exists only takes exactly one repo url.') | |
154 url = args[0] | |
155 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | |
156 flag_file = os.path.join(repo_dir, 'config') | |
157 if os.path.isdir(repo_dir) and os.path.isfile(flag_file): | |
158 print repo_dir | |
159 return 0 | |
160 return 1 | |
161 | |
162 | |
163 @subcommand.usage('[url of repo to add to or update in cache]') | |
164 def CMDpopulate(parser, args): | |
165 """Bare clone or update a repository in the cache.""" | |
iannucci
2014/02/24 19:43:03
I actually think this reveals too much implementat
agable
2014/02/24 22:11:41
Done.
| |
166 parser.add_option('--depth', type='int', | |
167 help='Only cache DEPTH commits of history') | |
168 parser.add_option('--shallow', '-s', action='store_true', | |
169 help='Only cache 10000 commits of history') | |
170 parser.add_option('--ref', action='append', | |
171 help='Specify additional refs to be fetched') | |
172 options, args = parser.parse_args(args) | |
173 if options.shallow and not options.depth: | |
174 options.depth = 10000 | |
175 if not len(args) == 1: | |
176 parser.error('git cache populate only takes exactly one repo url.') | |
177 url = args[0] | |
178 | |
179 gclient_utils.safe_makedirs(options.cache_dir) | |
180 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | |
181 | |
182 v = [] | |
183 filter_fn = lambda l: '[up to date]' not in l | |
184 if options.verbose: | |
185 v = ['-v', '--progress'] | |
186 filter_fn = None | |
187 | |
188 d = [] | |
189 if options.depth: | |
190 d = ['--depth', '%d' % options.depth] | |
191 | |
192 def _config(directory): | |
193 RunGit(['config', 'core.deltaBaseCacheLimit', '2g'], | |
194 cwd=directory) | |
195 RunGit(['config', 'remote.origin.url', NormalizeUrl(url)], | |
196 cwd=directory) | |
197 RunGit(['config', '--replace-all', 'remote.origin.fetch', | |
198 '+refs/heads/*:refs/heads/*'], | |
199 cwd=directory) | |
200 for ref in options.ref or []: | |
201 ref = ref.rstrip('/') | |
202 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) | |
203 RunGit(['config', '--add', 'remote.origin.fetch', refspec], | |
204 cwd=directory) | |
205 | |
206 with Lockfile(repo_dir): | |
207 # Setup from scratch if the repo is new or is in a bad state. | |
208 if not os.path.exists(os.path.join(repo_dir, 'config')): | |
209 gclient_utils.rmtree(repo_dir) | |
210 tempdir = tempfile.mkdtemp(suffix=UrlToCacheDir(url), | |
211 dir=options.cache_dir) | |
212 RunGit(['init', '--bare'], cwd=tempdir) | |
213 _config(tempdir) | |
214 fetch_cmd = ['fetch'] + v + d + ['--tags', 'origin'] | |
215 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=tempdir, retry=True) | |
216 os.rename(tempdir, repo_dir) | |
217 else: | |
218 _config(repo_dir) | |
219 if options.depth and os.path.exists(os.path.join(repo_dir, 'shallow')): | |
220 logging.warn('Shallow fetch requested, but repo cache already exists.') | |
221 fetch_cmd = ['fetch'] + v + ['--update-shallow', '--tags', 'origin'] | |
222 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=repo_dir, retry=True) | |
223 | |
224 | |
225 @subcommand.usage('[url of repo to unlock, or -a|--all]') | |
226 def CMDunlock(parser, args): | |
227 """Unlock one or all repos if their lock files are still around.""" | |
228 parser.add_option('--force', '-f', action='store_true', | |
229 help='Actually perform the action') | |
230 parser.add_option('--all', '-a', action='store_true', | |
231 help='Unlock all repository caches') | |
232 options, args = parser.parse_args(args) | |
233 if len(args) > 1 or (len(args) == 0 and not options.all): | |
234 parser.error('git cache unlock takes exactly one repo url, or --all') | |
235 | |
236 if not options.all: | |
237 url = args[0] | |
238 repo_dirs = [os.path.join(options.cache_dir, UrlToCacheDir(url))] | |
239 else: | |
240 repo_dirs = [path for path in os.listdir(options.cache_dir) | |
241 if os.path.isdir(path)] | |
242 lockfiles = [repo_dir + '.lock' for repo_dir in repo_dirs | |
243 if os.path.exists(repo_dir + '.lock')] | |
244 | |
245 if not options.force: | |
246 parser.error('git cache unlock requires -f|--force to do anything. ' | |
247 'Refusing to unlock the following repo caches: ' | |
248 ', '.join(lockfiles)) | |
249 | |
250 unlocked = [] | |
251 untouched = [] | |
252 for repo_dir in repo_dirs: | |
253 lf = Lockfile(repo_dir) | |
254 if lf.break_lock(): | |
255 unlocked.append(repo_dir) | |
256 else: | |
257 untouched.append(repo_dir) | |
258 | |
259 if unlocked: | |
260 logging.info('Broke locks on these caches: %s' % unlocked) | |
261 if untouched: | |
262 logging.debug('Did not touch these caches: %s' % untouched) | |
263 | |
264 | |
265 class OptionParser(optparse.OptionParser): | |
266 """Wrapper class for OptionParser to handle global options.""" | |
267 | |
268 def __init__(self, *args, **kwargs): | |
269 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs) | |
270 self.add_option('-c', '--cache-dir', | |
271 help='Path to the directory containing the cache') | |
272 self.add_option('-v', '--verbose', action='count', default=0, | |
273 help='Increase verbosity (can be passed multiple times)') | |
274 | |
275 def parse_args(self, args=None, values=None): | |
276 options, args = optparse.OptionParser.parse_args(self, args, values) | |
277 | |
278 try: | |
279 global_cache_dir = subprocess.check_output( | |
280 [GIT_EXECUTABLE, 'config', '--global', 'cache.cachepath']).strip() | |
281 if options.cache_dir: | |
282 logging.warn('Overriding globally-configured cache directory.') | |
283 else: | |
284 options.cache_dir = global_cache_dir | |
285 except subprocess.CalledProcessError: | |
286 if not options.cache_dir: | |
287 self.error('No cache directory specified on command line ' | |
288 'or in cache.cachepath.') | |
289 options.cache_dir = os.path.abspath(options.cache_dir) | |
290 | |
291 levels = [logging.WARNING, logging.INFO, logging.DEBUG] | |
292 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)]) | |
293 | |
294 return options, args | |
295 | |
296 | |
297 def main(argv): | |
298 dispatcher = subcommand.CommandDispatcher(__name__) | |
299 return dispatcher.execute(OptionParser(), argv) | |
300 | |
301 | |
302 if __name__ == '__main__': | |
303 sys.exit(main(sys.argv[1:])) | |
OLD | NEW |