Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(169)

Side by Side Diff: git_cache.py

Issue 229653002: Make git_cache.py import-able. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/depot_tools
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« gclient_scm.py ('K') | « gclient_scm.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved. 2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """A git command for managing a local cache of git repositories.""" 6 """A git command for managing a local cache of git repositories."""
7 7
8 from __future__ import print_function
8 import errno 9 import errno
9 import logging 10 import logging
10 import optparse 11 import optparse
11 import os 12 import os
12 import tempfile 13 import tempfile
13 import subprocess 14 import subprocess
14 import sys 15 import sys
15 import urlparse 16 import urlparse
16 17
17 from download_from_google_storage import Gsutil 18 from download_from_google_storage import Gsutil
18 import gclient_utils 19 import gclient_utils
19 import subcommand 20 import subcommand
20 21
21 22 try:
22 GIT_EXECUTABLE = 'git.bat' if sys.platform.startswith('win') else 'git' 23 # pylint: disable=E0602
23 BOOTSTRAP_BUCKET = 'chromium-git-cache' 24 WinErr = WindowsError
24 GSUTIL_DEFAULT_PATH = os.path.join( 25 except NameError:
25 os.path.dirname(os.path.abspath(__file__)), 26 class WinErr(Exception):
26 'third_party', 'gsutil', 'gsutil') 27 pass
27
28
29 def UrlToCacheDir(url):
30 """Convert a git url to a normalized form for the cache dir path."""
31 parsed = urlparse.urlparse(url)
32 norm_url = parsed.netloc + parsed.path
33 if norm_url.endswith('.git'):
34 norm_url = norm_url[:-len('.git')]
35 return norm_url.replace('-', '--').replace('/', '-').lower()
36
37
38 def RunGit(cmd, **kwargs):
39 """Run git in a subprocess."""
40 kwargs.setdefault('cwd', os.getcwd())
41 if kwargs.get('filter_fn'):
42 kwargs['filter_fn'] = gclient_utils.GitFilter(kwargs.get('filter_fn'))
43 kwargs.setdefault('print_stdout', False)
44 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy())
45 env.setdefault('GIT_ASKPASS', 'true')
46 env.setdefault('SSH_ASKPASS', 'true')
47 else:
48 kwargs.setdefault('print_stdout', True)
49 stdout = kwargs.get('stdout', sys.stdout)
50 print >> stdout, 'running "git %s" in "%s"' % (' '.join(cmd), kwargs['cwd'])
51 gclient_utils.CheckCallAndFilter([GIT_EXECUTABLE] + cmd, **kwargs)
52
53 28
54 class LockError(Exception): 29 class LockError(Exception):
55 pass 30 pass
56 31
57 32
58 class Lockfile(object): 33 class Lockfile(object):
59 """Class to represent a cross-platform process-specific lockfile.""" 34 """Class to represent a cross-platform process-specific lockfile."""
60 35
61 def __init__(self, path): 36 def __init__(self, path):
62 self.path = os.path.abspath(path) 37 self.path = os.path.abspath(path)
(...skipping 11 matching lines...) Expand all
74 pid = int(f.readline().strip()) 49 pid = int(f.readline().strip())
75 except (IOError, ValueError): 50 except (IOError, ValueError):
76 pid = None 51 pid = None
77 return pid 52 return pid
78 53
79 def _make_lockfile(self): 54 def _make_lockfile(self):
80 """Safely creates a lockfile containing the current pid.""" 55 """Safely creates a lockfile containing the current pid."""
81 open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) 56 open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
82 fd = os.open(self.lockfile, open_flags, 0o644) 57 fd = os.open(self.lockfile, open_flags, 0o644)
83 f = os.fdopen(fd, 'w') 58 f = os.fdopen(fd, 'w')
84 print >> f, self.pid 59 print(self.pid, file=f)
85 f.close() 60 f.close()
86 61
87 def _remove_lockfile(self): 62 def _remove_lockfile(self):
88 """Delete the lockfile. Complains (implicitly) if it doesn't exist.""" 63 """Delete the lockfile. Complains (implicitly) if it doesn't exist."""
89 os.remove(self.lockfile) 64 os.remove(self.lockfile)
90 65
91 def lock(self): 66 def lock(self):
92 """Acquire the lock. 67 """Acquire the lock.
93 68
94 Note: This is a NON-BLOCKING FAIL-FAST operation. 69 Note: This is a NON-BLOCKING FAIL-FAST operation.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
131 106
132 def i_am_locking(self): 107 def i_am_locking(self):
133 """Test if the file is locked by this process.""" 108 """Test if the file is locked by this process."""
134 return self.is_locked() and self.pid == self._read_pid() 109 return self.is_locked() and self.pid == self._read_pid()
135 110
136 def __enter__(self): 111 def __enter__(self):
137 self.lock() 112 self.lock()
138 return self 113 return self
139 114
140 def __exit__(self, *_exc): 115 def __exit__(self, *_exc):
141 self.unlock() 116 # Windows is unreliable when it comes to file locking. YMMV.
142 117 try:
118 self.unlock()
119 except WinErr:
120 pass
121
122
123 class Mirror(object):
124
125 git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
126 gsutil_exe = os.path.join(
127 os.path.dirname(os.path.abspath(__file__)),
128 'third_party', 'gsutil', 'gsutil')
129 bootstrap_bucket = 'chromium-git-cache'
130
131 def __init__(self, url, refs=None, print_func=None):
132 self.url = url
133 self.refs = refs or []
134 self.basedir = self.UrlToCacheDir(url)
135 self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
136 self.print = print_func or print
137
138 @staticmethod
139 def UrlToCacheDir(url):
140 """Convert a git url to a normalized form for the cache dir path."""
141 parsed = urlparse.urlparse(url)
142 norm_url = parsed.netloc + parsed.path
143 if norm_url.endswith('.git'):
144 norm_url = norm_url[:-len('.git')]
145 return norm_url.replace('-', '--').replace('/', '-').lower()
146
147 @staticmethod
148 def FindExecutable(executable):
149 """This mimics the "which" utility."""
150 path_folders = os.environ.get('PATH').split(os.pathsep)
151
152 for path_folder in path_folders:
153 target = os.path.join(path_folder, executable)
154 # Just incase we have some ~/blah paths.
155 target = os.path.abspath(os.path.expanduser(target))
156 if os.path.isfile(target) and os.access(target, os.X_OK):
157 return target
158 return None
159
160 @classmethod
161 def SetCachePath(cls, cachepath):
162 setattr(cls, 'cachepath', cachepath)
163
164 @classmethod
165 def GetCachePath(cls):
166 if not hasattr(cls, 'cachepath'):
167 try:
168 cachepath = subprocess.check_output(
169 [cls.git_exe, 'config', '--global', 'cache.cachepath']).strip()
170 except subprocess.CalledProcessError:
171 cachepath = None
172 if not cachepath:
173 raise RuntimeError('No global cache.cachepath git configuration found.')
174 setattr(cls, 'cachepath', cachepath)
175 return getattr(cls, 'cachepath')
176
177 def RunGit(self, cmd, **kwargs):
178 """Run git in a subprocess."""
179 cwd = kwargs.setdefault('cwd', self.mirror_path)
180 kwargs.setdefault('print_stdout', False)
181 kwargs.setdefault('filter_fn', self.print)
182 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy())
183 env.setdefault('GIT_ASKPASS', 'true')
184 env.setdefault('SSH_ASKPASS', 'true')
185 self.print('running "git %s" in "%s"' % (' '.join(cmd), cwd))
186 gclient_utils.CheckCallAndFilter([self.git_exe] + cmd, **kwargs)
187
188 def config(self, cwd=None):
189 if cwd is None:
190 cwd = self.mirror_path
191 self.RunGit(['config', 'core.deltaBaseCacheLimit',
192 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd)
193 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd)
194 self.RunGit(['config', '--replace-all', 'remote.origin.fetch',
195 '+refs/heads/*:refs/heads/*'], cwd=cwd)
196 for ref in self.refs:
197 ref = ref.lstrip('+').rstrip('/')
198 if ref.startswith('refs/'):
199 refspec = '+%s:%s' % (ref, ref)
200 else:
201 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref)
202 self.RunGit(['config', '--add', 'remote.origin.fetch', refspec], cwd=cwd)
203
204 def bootstrap_repo(self, directory):
205 """Bootstrap the repo from Google Stroage if possible.
206
207 Requires 7z on Windows and Unzip on Linux/Mac.
208 """
209 if sys.platform.startswith('win'):
210 if not self.FindExecutable('7z'):
211 self.print('''
212 Cannot find 7z in the path. If you want git cache to be able to bootstrap from
213 Google Storage, please install 7z from:
214
215 http://www.7-zip.org/download.html
216 ''')
217 return False
218 else:
219 if not self.FindExecutable('unzip'):
220 self.print('''
221 Cannot find unzip in the path. If you want git cache to be able to bootstrap
222 from Google Storage, please ensure unzip is present on your system.
223 ''')
224 return False
225
226 gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
227 gsutil = Gsutil(
228 self.gsutil_exe, boto_path=os.devnull, bypass_prodaccess=True)
229 # Get the most recent version of the zipfile.
230 _, ls_out, _ = gsutil.check_call('ls', gs_folder)
231 ls_out_sorted = sorted(ls_out.splitlines())
232 if not ls_out_sorted:
233 # This repo is not on Google Storage.
234 return False
235 latest_checkout = ls_out_sorted[-1]
236
237 # Download zip file to a temporary directory.
238 try:
239 tempdir = tempfile.mkdtemp()
240 self.print('Downloading %s' % latest_checkout)
241 code, out, err = gsutil.check_call('cp', latest_checkout, tempdir)
242 if code:
243 self.print('%s\n%s' % (out, err))
244 return False
245 filename = os.path.join(tempdir, latest_checkout.split('/')[-1])
246
247 # Unpack the file with 7z on Windows, or unzip everywhere else.
248 if sys.platform.startswith('win'):
249 cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
250 else:
251 cmd = ['unzip', filename, '-d', directory]
252 retcode = subprocess.call(cmd)
253 finally:
254 # Clean up the downloaded zipfile.
255 gclient_utils.rmtree(tempdir)
256
257 if retcode:
258 self.print(
259 'Extracting bootstrap zipfile %s failed.\n'
260 'Resuming normal operations.' % filename)
261 return False
262 return True
263
264 def exists(self):
265 return os.path.isfile(os.path.join(self.mirror_path, 'config'))
266
267 def populate(self, depth=None, shallow=False, bootstrap=False,
268 verbose=False):
269 if shallow and not depth:
270 depth = 10000
271 gclient_utils.safe_makedirs(self.GetCachePath())
272
273 v = []
274 if verbose:
275 v = ['-v', '--progress']
276
277 d = []
278 if depth:
279 d = ['--depth', str(depth)]
280
281
282 with Lockfile(self.mirror_path):
283 # Setup from scratch if the repo is new or is in a bad state.
284 tempdir = None
285 if not os.path.exists(os.path.join(self.mirror_path, 'config')):
286 gclient_utils.rmtree(self.mirror_path)
287 tempdir = tempfile.mkdtemp(
288 suffix=self.basedir, dir=self.GetCachePath())
289 bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir)
290 if not bootstrapped:
291 self.RunGit(['init', '--bare'], cwd=tempdir)
292 else:
293 if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
294 logging.warn(
295 'Shallow fetch requested, but repo cache already exists.')
296 d = []
297
298 rundir = tempdir or self.mirror_path
299 self.config(rundir)
300 fetch_cmd = ['fetch'] + v + d + ['origin']
301 fetch_specs = subprocess.check_output(
302 [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
303 cwd=rundir).strip().splitlines()
304 for spec in fetch_specs:
305 try:
306 self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True)
307 except subprocess.CalledProcessError:
308 logging.warn('Fetch of %s failed' % spec)
309 if tempdir:
310 os.rename(tempdir, self.mirror_path)
311
312 def update_bootstrap(self):
313 # The files are named <git number>.zip
314 gen_number = subprocess.check_output(
315 [self.git_exe, 'number', 'master'], cwd=self.mirror_path).strip()
316 self.RunGit(['gc']) # Run Garbage Collect to compress packfile.
317 # Creating a temp file and then deleting it ensures we can use this name.
318 _, tmp_zipfile = tempfile.mkstemp(suffix='.zip')
319 os.remove(tmp_zipfile)
320 subprocess.call(['zip', '-r', tmp_zipfile, '.'], cwd=self.mirror_path)
321 gsutil = Gsutil(path=self.gsutil_exe, boto_path=None)
322 dest_name = 'gs://%s/%s/%s.zip' % (
323 self.bootstrap_bucket, self.basedir, gen_number)
324 gsutil.call('cp', tmp_zipfile, dest_name)
325 os.remove(tmp_zipfile)
326
327 def unlock(self):
328 lf = Lockfile(self.mirror_path)
329 config_lock = os.path.join(self.mirror_path, 'config.lock')
330 if os.path.exists(config_lock):
331 os.remove(config_lock)
332 lf.break_lock()
143 333
144 @subcommand.usage('[url of repo to check for caching]') 334 @subcommand.usage('[url of repo to check for caching]')
145 def CMDexists(parser, args): 335 def CMDexists(parser, args):
146 """Check to see if there already is a cache of the given repo.""" 336 """Check to see if there already is a cache of the given repo."""
147 options, args = parser.parse_args(args) 337 _, args = parser.parse_args(args)
148 if not len(args) == 1: 338 if not len(args) == 1:
149 parser.error('git cache exists only takes exactly one repo url.') 339 parser.error('git cache exists only takes exactly one repo url.')
150 url = args[0] 340 url = args[0]
151 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) 341 mirror = Mirror(url)
152 flag_file = os.path.join(repo_dir, 'config') 342 if mirror.exists():
153 if os.path.isdir(repo_dir) and os.path.isfile(flag_file): 343 print(mirror.mirror_path)
154 print repo_dir
155 return 0 344 return 0
156 return 1 345 return 1
157 346
158 347
159 @subcommand.usage('[url of repo to create a bootstrap zip file]') 348 @subcommand.usage('[url of repo to create a bootstrap zip file]')
160 def CMDupdate_bootstrap(parser, args): 349 def CMDupdate_bootstrap(parser, args):
161 """Create and uploads a bootstrap tarball.""" 350 """Create and uploads a bootstrap tarball."""
162 # Lets just assert we can't do this on Windows. 351 # Lets just assert we can't do this on Windows.
163 if sys.platform.startswith('win'): 352 if sys.platform.startswith('win'):
164 print >> sys.stderr, 'Sorry, update bootstrap will not work on Windows.' 353 print('Sorry, update bootstrap will not work on Windows.', file=sys.stderr)
165 return 1 354 return 1
166 355
167 # First, we need to ensure the cache is populated. 356 # First, we need to ensure the cache is populated.
168 populate_args = args[:] 357 populate_args = args[:]
169 populate_args.append('--no_bootstrap') 358 populate_args.append('--no_bootstrap')
170 CMDpopulate(parser, populate_args) 359 CMDpopulate(parser, populate_args)
171 360
172 # Get the repo directory. 361 # Get the repo directory.
173 options, args = parser.parse_args(args) 362 _, args = parser.parse_args(args)
174 url = args[0] 363 url = args[0]
175 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) 364 mirror = Mirror(url)
176 365 mirror.update_bootstrap()
177 # The files are named <git number>.zip 366 return 0
178 gen_number = subprocess.check_output(['git', 'number', 'master'],
179 cwd=repo_dir).strip()
180 RunGit(['gc'], cwd=repo_dir) # Run Garbage Collect to compress packfile.
181 # Creating a temp file and then deleting it ensures we can use this name.
182 _, tmp_zipfile = tempfile.mkstemp(suffix='.zip')
183 os.remove(tmp_zipfile)
184 subprocess.call(['zip', '-r', tmp_zipfile, '.'], cwd=repo_dir)
185 gsutil = Gsutil(path=GSUTIL_DEFAULT_PATH, boto_path=None)
186 dest_name = 'gs://%s/%s/%s.zip' % (BOOTSTRAP_BUCKET,
187 UrlToCacheDir(url),
188 gen_number)
189 gsutil.call('cp', tmp_zipfile, dest_name)
190 os.remove(tmp_zipfile)
191 367
192 368
193 @subcommand.usage('[url of repo to add to or update in cache]') 369 @subcommand.usage('[url of repo to add to or update in cache]')
194 def CMDpopulate(parser, args): 370 def CMDpopulate(parser, args):
195 """Ensure that the cache has all up-to-date objects for the given repo.""" 371 """Ensure that the cache has all up-to-date objects for the given repo."""
196 parser.add_option('--depth', type='int', 372 parser.add_option('--depth', type='int',
197 help='Only cache DEPTH commits of history') 373 help='Only cache DEPTH commits of history')
198 parser.add_option('--shallow', '-s', action='store_true', 374 parser.add_option('--shallow', '-s', action='store_true',
199 help='Only cache 10000 commits of history') 375 help='Only cache 10000 commits of history')
200 parser.add_option('--ref', action='append', 376 parser.add_option('--ref', action='append',
201 help='Specify additional refs to be fetched') 377 help='Specify additional refs to be fetched')
202 parser.add_option('--no_bootstrap', action='store_true', 378 parser.add_option('--no_bootstrap', action='store_true',
203 help='Don\'t bootstrap from Google Storage') 379 help='Don\'t bootstrap from Google Storage')
204 380
205 options, args = parser.parse_args(args) 381 options, args = parser.parse_args(args)
206 if options.shallow and not options.depth:
207 options.depth = 10000
208 if not len(args) == 1: 382 if not len(args) == 1:
209 parser.error('git cache populate only takes exactly one repo url.') 383 parser.error('git cache populate only takes exactly one repo url.')
210 url = args[0] 384 url = args[0]
211 385
212 gclient_utils.safe_makedirs(options.cache_dir) 386 mirror = Mirror(url, refs=options.ref)
213 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) 387 kwargs = {
214 388 'verbose': options.verbose,
215 v = [] 389 'shallow': options.shallow,
216 filter_fn = lambda l: '[up to date]' not in l 390 'bootstrap': not options.no_bootstrap,
217 if options.verbose: 391 }
218 v = ['-v', '--progress']
219 filter_fn = None
220
221 d = []
222 if options.depth: 392 if options.depth:
223 d = ['--depth', '%d' % options.depth] 393 kwargs['depth'] = options.depth
224 394 mirror.populate(**kwargs)
225 def _find(executable):
226 """This mimics the "which" utility."""
227 path_folders = os.environ.get('PATH').split(os.pathsep)
228
229 for path_folder in path_folders:
230 target = os.path.join(path_folder, executable)
231 # Just incase we have some ~/blah paths.
232 target = os.path.abspath(os.path.expanduser(target))
233 if os.path.isfile(target) and os.access(target, os.X_OK):
234 return target
235 return False
236
237 def _maybe_bootstrap_repo(directory):
238 """Bootstrap the repo from Google Stroage if possible.
239
240 Requires 7z on Windows and Unzip on Linux/Mac.
241 """
242 if options.no_bootstrap:
243 return False
244 if sys.platform.startswith('win'):
245 if not _find('7z'):
246 print 'Cannot find 7z in the path.'
247 print 'If you want git cache to be able to bootstrap from '
248 print 'Google Storage, please install 7z from:'
249 print 'http://www.7-zip.org/download.html'
250 return False
251 else:
252 if not _find('unzip'):
253 print 'Cannot find unzip in the path.'
254 print 'If you want git cache to be able to bootstrap from '
255 print 'Google Storage, please ensure unzip is present on your system.'
256 return False
257
258 folder = UrlToCacheDir(url)
259 gs_folder = 'gs://%s/%s' % (BOOTSTRAP_BUCKET, folder)
260 gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=os.devnull,
261 bypass_prodaccess=True)
262 # Get the most recent version of the zipfile.
263 _, ls_out, _ = gsutil.check_call('ls', gs_folder)
264 ls_out_sorted = sorted(ls_out.splitlines())
265 if not ls_out_sorted:
266 # This repo is not on Google Storage.
267 return False
268 latest_checkout = ls_out_sorted[-1]
269
270 # Download zip file to a temporary directory.
271 tempdir = tempfile.mkdtemp()
272 print 'Downloading %s...' % latest_checkout
273 code, out, err = gsutil.check_call('cp', latest_checkout, tempdir)
274 if code:
275 print '%s\n%s' % (out, err)
276 return False
277 filename = os.path.join(tempdir, latest_checkout.split('/')[-1])
278
279 # Unpack the file with 7z on Windows, or unzip everywhere else.
280 if sys.platform.startswith('win'):
281 cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
282 else:
283 cmd = ['unzip', filename, '-d', directory]
284 retcode = subprocess.call(cmd)
285
286 # Clean up the downloaded zipfile.
287 gclient_utils.rmtree(tempdir)
288 if retcode:
289 print 'Extracting bootstrap zipfile %s failed.' % filename
290 print 'Resuming normal operations'
291 return False
292 return True
293
294 def _config(directory):
295 RunGit(['config', 'core.deltaBaseCacheLimit',
296 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=directory)
297 RunGit(['config', 'remote.origin.url', url],
298 cwd=directory)
299 RunGit(['config', '--replace-all', 'remote.origin.fetch',
300 '+refs/heads/*:refs/heads/*'],
301 cwd=directory)
302 RunGit(['config', '--add', 'remote.origin.fetch',
303 '+refs/tags/*:refs/tags/*'],
304 cwd=directory)
305 for ref in options.ref or []:
306 ref = ref.rstrip('/')
307 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref)
308 RunGit(['config', '--add', 'remote.origin.fetch', refspec],
309 cwd=directory)
310
311 with Lockfile(repo_dir):
312 # Setup from scratch if the repo is new or is in a bad state.
313 if not os.path.exists(os.path.join(repo_dir, 'config')):
314 gclient_utils.rmtree(repo_dir)
315 tempdir = tempfile.mkdtemp(suffix=UrlToCacheDir(url),
316 dir=options.cache_dir)
317 bootstrapped = _maybe_bootstrap_repo(tempdir)
318 if not bootstrapped:
319 RunGit(['init', '--bare'], cwd=tempdir)
320 _config(tempdir)
321 fetch_cmd = ['fetch'] + v + d + ['origin']
322 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=tempdir, retry=True)
323 os.rename(tempdir, repo_dir)
324 else:
325 _config(repo_dir)
326 if options.depth and os.path.exists(os.path.join(repo_dir, 'shallow')):
327 logging.warn('Shallow fetch requested, but repo cache already exists.')
328 fetch_cmd = ['fetch'] + v + ['origin']
329 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=repo_dir, retry=True)
330 395
331 396
332 @subcommand.usage('[url of repo to unlock, or -a|--all]') 397 @subcommand.usage('[url of repo to unlock, or -a|--all]')
333 def CMDunlock(parser, args): 398 def CMDunlock(parser, args):
334 """Unlock one or all repos if their lock files are still around.""" 399 """Unlock one or all repos if their lock files are still around."""
335 parser.add_option('--force', '-f', action='store_true', 400 parser.add_option('--force', '-f', action='store_true',
336 help='Actually perform the action') 401 help='Actually perform the action')
337 parser.add_option('--all', '-a', action='store_true', 402 parser.add_option('--all', '-a', action='store_true',
338 help='Unlock all repository caches') 403 help='Unlock all repository caches')
339 options, args = parser.parse_args(args) 404 options, args = parser.parse_args(args)
340 if len(args) > 1 or (len(args) == 0 and not options.all): 405 if len(args) > 1 or (len(args) == 0 and not options.all):
341 parser.error('git cache unlock takes exactly one repo url, or --all') 406 parser.error('git cache unlock takes exactly one repo url, or --all')
342 407
408 repo_dirs = []
343 if not options.all: 409 if not options.all:
344 url = args[0] 410 url = args[0]
345 repo_dirs = [os.path.join(options.cache_dir, UrlToCacheDir(url))] 411 repo_dirs.append(Mirror(url).mirror_path)
346 else: 412 else:
347 repo_dirs = [os.path.join(options.cache_dir, path) 413 cachepath = Mirror.GetCachePath()
348 for path in os.listdir(options.cache_dir) 414 repo_dirs = [os.path.join(cachepath, path)
349 if os.path.isdir(os.path.join(options.cache_dir, path))] 415 for path in os.listdir(cachepath)
350 repo_dirs.extend([os.path.join(options.cache_dir, 416 if os.path.isdir(os.path.join(cachepath, path))]
417 repo_dirs.extend([os.path.join(cachepath,
351 lockfile.replace('.lock', '')) 418 lockfile.replace('.lock', ''))
352 for lockfile in os.listdir(options.cache_dir) 419 for lockfile in os.listdir(cachepath)
353 if os.path.isfile(os.path.join(options.cache_dir, 420 if os.path.isfile(os.path.join(cachepath,
354 lockfile)) 421 lockfile))
355 and lockfile.endswith('.lock') 422 and lockfile.endswith('.lock')
356 and os.path.join(options.cache_dir, lockfile) 423 and os.path.join(cachepath, lockfile)
357 not in repo_dirs]) 424 not in repo_dirs])
358 lockfiles = [repo_dir + '.lock' for repo_dir in repo_dirs 425 lockfiles = [repo_dir + '.lock' for repo_dir in repo_dirs
359 if os.path.exists(repo_dir + '.lock')] 426 if os.path.exists(repo_dir + '.lock')]
360 427
361 if not options.force: 428 if not options.force:
362 parser.error('git cache unlock requires -f|--force to do anything. ' 429 parser.error('git cache unlock requires -f|--force to do anything. '
363 'Refusing to unlock the following repo caches: ' 430 'Refusing to unlock the following repo caches: '
364 ', '.join(lockfiles)) 431 ', '.join(lockfiles))
365 432
366 unlocked = [] 433 unlocked_repos = []
367 untouched = [] 434 untouched_repos = []
368 for repo_dir in repo_dirs: 435 for repo_dir in repo_dirs:
369 lf = Lockfile(repo_dir) 436 lf = Lockfile(repo_dir)
370 config_lock = os.path.join(repo_dir, 'config.lock') 437 config_lock = os.path.join(repo_dir, 'config.lock')
371 unlocked = False 438 unlocked = False
372 if os.path.exists(config_lock): 439 if os.path.exists(config_lock):
373 os.remove(config_lock) 440 os.remove(config_lock)
374 unlocked = True 441 unlocked = True
375 if lf.break_lock(): 442 if lf.break_lock():
376 unlocked = True 443 unlocked = True
377 444
378 if unlocked: 445 if unlocked:
379 unlocked.append(repo_dir) 446 unlocked_repos.append(repo_dir)
380 else: 447 else:
381 untouched.append(repo_dir) 448 untouched_repos.append(repo_dir)
382 449
383 if unlocked: 450 if unlocked_repos:
384 logging.info('Broke locks on these caches: %s' % unlocked) 451 logging.info('Broke locks on these caches:\n %s' % '\n '.join(
385 if untouched: 452 unlocked_repos))
386 logging.debug('Did not touch these caches: %s' % untouched) 453 if untouched_repos:
454 logging.debug('Did not touch these caches:\n %s' % '\n '.join(
455 untouched_repos))
387 456
388 457
389 class OptionParser(optparse.OptionParser): 458 class OptionParser(optparse.OptionParser):
390 """Wrapper class for OptionParser to handle global options.""" 459 """Wrapper class for OptionParser to handle global options."""
391 460
392 def __init__(self, *args, **kwargs): 461 def __init__(self, *args, **kwargs):
393 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs) 462 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs)
394 self.add_option('-c', '--cache-dir', 463 self.add_option('-c', '--cache-dir',
395 help='Path to the directory containing the cache') 464 help='Path to the directory containing the cache')
396 self.add_option('-v', '--verbose', action='count', default=0, 465 self.add_option('-v', '--verbose', action='count', default=0,
397 help='Increase verbosity (can be passed multiple times)') 466 help='Increase verbosity (can be passed multiple times)')
398 467
399 def parse_args(self, args=None, values=None): 468 def parse_args(self, args=None, values=None):
400 options, args = optparse.OptionParser.parse_args(self, args, values) 469 options, args = optparse.OptionParser.parse_args(self, args, values)
401 470
402 try: 471 try:
403 global_cache_dir = subprocess.check_output( 472 global_cache_dir = Mirror.GetCachePath()
404 [GIT_EXECUTABLE, 'config', '--global', 'cache.cachepath']).strip() 473 except RuntimeError:
405 if options.cache_dir: 474 global_cache_dir = None
406 if global_cache_dir and ( 475 if options.cache_dir:
407 os.path.abspath(options.cache_dir) != 476 if global_cache_dir and (
408 os.path.abspath(global_cache_dir)): 477 os.path.abspath(options.cache_dir) !=
409 logging.warn('Overriding globally-configured cache directory.') 478 os.path.abspath(global_cache_dir)):
410 else: 479 logging.warn('Overriding globally-configured cache directory.')
411 options.cache_dir = global_cache_dir 480 Mirror.SetCachePath(options.cache_dir)
412 except subprocess.CalledProcessError:
413 if not options.cache_dir:
414 self.error('No cache directory specified on command line '
415 'or in cache.cachepath.')
416 options.cache_dir = os.path.abspath(options.cache_dir)
417 481
418 levels = [logging.WARNING, logging.INFO, logging.DEBUG] 482 levels = [logging.WARNING, logging.INFO, logging.DEBUG]
419 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)]) 483 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
420 484
421 return options, args 485 return options, args
422 486
423 487
424 def main(argv): 488 def main(argv):
425 dispatcher = subcommand.CommandDispatcher(__name__) 489 dispatcher = subcommand.CommandDispatcher(__name__)
426 return dispatcher.execute(OptionParser(), argv) 490 return dispatcher.execute(OptionParser(), argv)
427 491
428 492
429 if __name__ == '__main__': 493 if __name__ == '__main__':
430 sys.exit(main(sys.argv[1:])) 494 sys.exit(main(sys.argv[1:]))
OLDNEW
« gclient_scm.py ('K') | « gclient_scm.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698