Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(148)

Side by Side Diff: git_cache.py

Issue 229653002: Make git_cache.py import-able. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/depot_tools
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« gclient_scm.py ('K') | « gclient_scm.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved. 2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """A git command for managing a local cache of git repositories.""" 6 """A git command for managing a local cache of git repositories."""
7 7
8 from __future__ import print_function
8 import errno 9 import errno
9 import logging 10 import logging
10 import optparse 11 import optparse
11 import os 12 import os
12 import tempfile 13 import tempfile
13 import subprocess 14 import subprocess
14 import sys 15 import sys
15 import urlparse 16 import urlparse
16 17
17 from download_from_google_storage import Gsutil 18 from download_from_google_storage import Gsutil
18 import gclient_utils 19 import gclient_utils
19 import subcommand 20 import subcommand
20 21
21 22 try:
22 GIT_EXECUTABLE = 'git.bat' if sys.platform.startswith('win') else 'git' 23 # pylint: disable=E0602
23 BOOTSTRAP_BUCKET = 'chromium-git-cache' 24 WinErr = WindowsError
24 GSUTIL_DEFAULT_PATH = os.path.join( 25 except NameError:
25 os.path.dirname(os.path.abspath(__file__)), 26 class WinErr(Exception):
26 'third_party', 'gsutil', 'gsutil') 27 pass
27
28
29 def UrlToCacheDir(url):
30 """Convert a git url to a normalized form for the cache dir path."""
31 parsed = urlparse.urlparse(url)
32 norm_url = parsed.netloc + parsed.path
33 if norm_url.endswith('.git'):
34 norm_url = norm_url[:-len('.git')]
35 return norm_url.replace('-', '--').replace('/', '-').lower()
36
37
38 def RunGit(cmd, **kwargs):
39 """Run git in a subprocess."""
40 kwargs.setdefault('cwd', os.getcwd())
41 if kwargs.get('filter_fn'):
42 kwargs['filter_fn'] = gclient_utils.GitFilter(kwargs.get('filter_fn'))
43 kwargs.setdefault('print_stdout', False)
44 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy())
45 env.setdefault('GIT_ASKPASS', 'true')
46 env.setdefault('SSH_ASKPASS', 'true')
47 else:
48 kwargs.setdefault('print_stdout', True)
49 stdout = kwargs.get('stdout', sys.stdout)
50 print >> stdout, 'running "git %s" in "%s"' % (' '.join(cmd), kwargs['cwd'])
51 gclient_utils.CheckCallAndFilter([GIT_EXECUTABLE] + cmd, **kwargs)
52
53 28
54 class LockError(Exception): 29 class LockError(Exception):
55 pass 30 pass
56 31
57 32
58 class Lockfile(object): 33 class Lockfile(object):
59 """Class to represent a cross-platform process-specific lockfile.""" 34 """Class to represent a cross-platform process-specific lockfile."""
60 35
61 def __init__(self, path): 36 def __init__(self, path):
62 self.path = os.path.abspath(path) 37 self.path = os.path.abspath(path)
(...skipping 11 matching lines...) Expand all
74 pid = int(f.readline().strip()) 49 pid = int(f.readline().strip())
75 except (IOError, ValueError): 50 except (IOError, ValueError):
76 pid = None 51 pid = None
77 return pid 52 return pid
78 53
79 def _make_lockfile(self): 54 def _make_lockfile(self):
80 """Safely creates a lockfile containing the current pid.""" 55 """Safely creates a lockfile containing the current pid."""
81 open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY) 56 open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
82 fd = os.open(self.lockfile, open_flags, 0o644) 57 fd = os.open(self.lockfile, open_flags, 0o644)
83 f = os.fdopen(fd, 'w') 58 f = os.fdopen(fd, 'w')
84 print >> f, self.pid 59 print(self.pid, file=f)
85 f.close() 60 f.close()
86 61
87 def _remove_lockfile(self): 62 def _remove_lockfile(self):
88 """Delete the lockfile. Complains (implicitly) if it doesn't exist.""" 63 """Delete the lockfile. Complains (implicitly) if it doesn't exist."""
89 os.remove(self.lockfile) 64 os.remove(self.lockfile)
90 65
91 def lock(self): 66 def lock(self):
92 """Acquire the lock. 67 """Acquire the lock.
93 68
94 Note: This is a NON-BLOCKING FAIL-FAST operation. 69 Note: This is a NON-BLOCKING FAIL-FAST operation.
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
131 106
132 def i_am_locking(self): 107 def i_am_locking(self):
133 """Test if the file is locked by this process.""" 108 """Test if the file is locked by this process."""
134 return self.is_locked() and self.pid == self._read_pid() 109 return self.is_locked() and self.pid == self._read_pid()
135 110
136 def __enter__(self): 111 def __enter__(self):
137 self.lock() 112 self.lock()
138 return self 113 return self
139 114
140 def __exit__(self, *_exc): 115 def __exit__(self, *_exc):
141 self.unlock() 116 # Windows is unreliable when it comes to file locking. YMMV.
142 117 try:
118 self.unlock()
119 except WinErr:
120 pass
Ryan Tseng 2014/04/09 00:39:44 if os.exists(self.lockfile): raise If the lock
szager1 2014/04/09 05:22:29 Below is sporadic error I see on Windows. There's
121
122
123 class Mirror(object):
124
125 git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
126 gsutil_exe = os.path.join(
127 os.path.dirname(os.path.abspath(__file__)),
128 'third_party', 'gsutil', 'gsutil')
129 bootstrap_bucket = 'chromium-git-cache'
130
131 def __init__(self, url, refs=None, print_func=None):
132 self.url = url
133 self.refs = refs or []
134 self.basedir = self.UrlToCacheDir(url)
135 self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
136 self.print = print_func or print
137
138 @staticmethod
139 def UrlToCacheDir(url):
140 """Convert a git url to a normalized form for the cache dir path."""
141 parsed = urlparse.urlparse(url)
142 norm_url = parsed.netloc + parsed.path
143 if norm_url.endswith('.git'):
144 norm_url = norm_url[:-len('.git')]
145 return norm_url.replace('-', '--').replace('/', '-').lower()
146
147 @staticmethod
148 def FindExecutable(executable):
agable 2014/04/11 20:44:58 Why is this a method on Mirror?
149 """This mimics the "which" utility."""
150 path_folders = os.environ.get('PATH').split(os.pathsep)
151
152 for path_folder in path_folders:
153 target = os.path.join(path_folder, executable)
154 # Just incase we have some ~/blah paths.
155 target = os.path.abspath(os.path.expanduser(target))
156 if os.path.isfile(target) and os.access(target, os.X_OK):
157 return target
158 return None
159
160 @classmethod
161 def SetCachePath(cls, cachepath):
agable 2014/04/11 20:44:58 Could make cache_path a property with a @property
162 setattr(cls, 'cachepath', cachepath)
163
164 @classmethod
165 def GetCachePath(cls):
Ryan Tseng 2014/04/09 00:39:44 This is rather convoluted, and I feel like it'll c
szager1 2014/04/09 05:22:29 Unlike url, cachepath is a global setting. It doe
agable 2014/04/11 20:44:58 Can't do this at __init__ time because it's a clas
166 if not hasattr(cls, 'cachepath'):
167 try:
168 cachepath = subprocess.check_output(
169 [cls.git_exe, 'config', '--global', 'cache.cachepath']).strip()
170 except subprocess.CalledProcessError:
171 cachepath = None
172 if not cachepath:
173 raise RuntimeError('No global cache.cachepath git configuration found.')
174 setattr(cls, 'cachepath', cachepath)
175 return getattr(cls, 'cachepath')
176
177 def RunGit(self, cmd, **kwargs):
178 """Run git in a subprocess."""
179 cwd = kwargs.setdefault('cwd', self.mirror_path)
180 kwargs.setdefault('print_stdout', False)
181 kwargs.setdefault('filter_fn', self.print)
182 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy())
183 env.setdefault('GIT_ASKPASS', 'true')
184 env.setdefault('SSH_ASKPASS', 'true')
185 self.print('running "git %s" in "%s"' % (' '.join(cmd), cwd))
186 gclient_utils.CheckCallAndFilter([self.git_exe] + cmd, **kwargs)
187
188 def config(self, cwd=None):
189 if cwd is None:
190 cwd = self.mirror_path
191 self.RunGit(['config', 'core.deltaBaseCacheLimit',
192 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd)
193 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd)
194 self.RunGit(['config', '--replace-all', 'remote.origin.fetch',
195 '+refs/heads/*:refs/heads/*'], cwd=cwd)
196 for ref in self.refs:
197 ref = ref.lstrip('+').rstrip('/')
198 if ref.startswith('refs/'):
199 refspec = '+%s:%s' % (ref, ref)
200 else:
201 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref)
202 self.RunGit(['config', '--add', 'remote.origin.fetch', refspec], cwd=cwd)
203
204 def bootstrap_repo(self, directory):
205 """Bootstrap the repo from Google Stroage if possible.
206
207 Requires 7z on Windows and Unzip on Linux/Mac.
208 """
209 if sys.platform.startswith('win'):
210 if not self.FindExecutable('7z'):
211 self.print('''
agable 2014/04/11 20:44:58 use textwrap.dedent.
212 Cannot find 7z in the path. If you want git cache to be able to bootstrap from
Ryan Tseng 2014/04/09 00:39:44 nit: 2 spaces here, or 1 space down on line 221/11
szager1 2014/04/09 05:22:29 Done.
213 Google Storage, please install 7z from:
214
215 http://www.7-zip.org/download.html
216 ''')
217 return False
218 else:
219 if not self.FindExecutable('unzip'):
220 self.print('''
221 Cannot find unzip in the path. If you want git cache to be able to bootstrap
222 from Google Storage, please ensure unzip is present on your system.
223 ''')
224 return False
225
226 gs_folder = 'gs://%s/%s' % (self.bootstrap_bucket, self.basedir)
227 gsutil = Gsutil(
228 self.gsutil_exe, boto_path=os.devnull, bypass_prodaccess=True)
229 # Get the most recent version of the zipfile.
230 _, ls_out, _ = gsutil.check_call('ls', gs_folder)
231 ls_out_sorted = sorted(ls_out.splitlines())
232 if not ls_out_sorted:
233 # This repo is not on Google Storage.
234 return False
235 latest_checkout = ls_out_sorted[-1]
236
237 # Download zip file to a temporary directory.
238 try:
239 tempdir = tempfile.mkdtemp()
240 self.print('Downloading %s' % latest_checkout)
241 code, out, err = gsutil.check_call('cp', latest_checkout, tempdir)
242 if code:
243 self.print('%s\n%s' % (out, err))
244 return False
245 filename = os.path.join(tempdir, latest_checkout.split('/')[-1])
246
247 # Unpack the file with 7z on Windows, or unzip everywhere else.
248 if sys.platform.startswith('win'):
249 cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
250 else:
251 cmd = ['unzip', filename, '-d', directory]
252 retcode = subprocess.call(cmd)
253 finally:
254 # Clean up the downloaded zipfile.
255 gclient_utils.rmtree(tempdir)
256
257 if retcode:
258 self.print(
259 'Extracting bootstrap zipfile %s failed.\n'
260 'Resuming normal operations.' % filename)
261 return False
262 return True
263
264 def exists(self):
265 return os.path.isfile(os.path.join(self.mirror_path, 'config'))
266
267 def populate(self, depth=None, shallow=False, bootstrap=False, noisy=False):
268 if shallow and not depth:
269 depth = 10000
270 gclient_utils.safe_makedirs(self.GetCachePath())
271
272 v = []
273 if noisy:
Ryan Tseng 2014/04/09 00:39:44 why not just call it verbose all the way down the
szager1 2014/04/09 05:22:29 Done.
274 v = ['-v', '--progress']
275
276 d = []
277 if depth:
278 d = ['--depth', str(depth)]
279
280
281 with Lockfile(self.mirror_path):
282 # Setup from scratch if the repo is new or is in a bad state.
283 tempdir = None
284 if not os.path.exists(os.path.join(self.mirror_path, 'config')):
285 gclient_utils.rmtree(self.mirror_path)
286 tempdir = tempfile.mkdtemp(
287 suffix=self.basedir, dir=self.GetCachePath())
288 bootstrapped = bootstrap and self.bootstrap_repo(tempdir)
Ryan Tseng 2014/04/09 00:39:44 Actually we probably want not depth and bootstrap
szager1 2014/04/09 05:22:29 Done.
289 if not bootstrapped:
290 self.RunGit(['init', '--bare'], cwd=tempdir)
291 else:
292 if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
293 logging.warn(
294 'Shallow fetch requested, but repo cache already exists.')
295 d = []
296
297 rundir = tempdir or self.mirror_path
298 self.config(rundir)
299 fetch_cmd = ['fetch'] + v + d + ['origin']
300 fetch_specs = subprocess.check_output(
301 [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
302 cwd=rundir).strip().splitlines()
303 for spec in fetch_specs:
304 try:
305 self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True)
306 except subprocess.CalledProcessError:
307 logging.warn('Fetch of %s failed' % spec)
308 if tempdir:
309 os.rename(tempdir, self.mirror_path)
310
311 def update_bootstrap(self):
312 # The files are named <git number>.zip
313 gen_number = subprocess.check_output(
314 [self.git_exe, 'number', 'master'], cwd=self.mirror_path).strip()
315 self.RunGit(['gc']) # Run Garbage Collect to compress packfile.
316 # Creating a temp file and then deleting it ensures we can use this name.
317 _, tmp_zipfile = tempfile.mkstemp(suffix='.zip')
318 os.remove(tmp_zipfile)
319 subprocess.call(['zip', '-r', tmp_zipfile, '.'], cwd=self.mirror_path)
320 gsutil = Gsutil(path=self.gsutil_exe, boto_path=None)
321 dest_name = 'gs://%s/%s/%s.zip' % (
322 self.bootstrap_bucket, self.basedir, gen_number)
323 gsutil.call('cp', tmp_zipfile, dest_name)
324 os.remove(tmp_zipfile)
325
326 def unlock(self):
327 lf = Lockfile(self.mirror_path)
328 config_lock = os.path.join(self.mirror_path, 'config.lock')
329 if os.path.exists(config_lock):
330 os.remove(config_lock)
331 lf.break_lock()
143 332
144 @subcommand.usage('[url of repo to check for caching]') 333 @subcommand.usage('[url of repo to check for caching]')
145 def CMDexists(parser, args): 334 def CMDexists(parser, args):
146 """Check to see if there already is a cache of the given repo.""" 335 """Check to see if there already is a cache of the given repo."""
147 options, args = parser.parse_args(args) 336 _, args = parser.parse_args(args)
148 if not len(args) == 1: 337 if not len(args) == 1:
149 parser.error('git cache exists only takes exactly one repo url.') 338 parser.error('git cache exists only takes exactly one repo url.')
150 url = args[0] 339 url = args[0]
151 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) 340 mirror = Mirror(url)
152 flag_file = os.path.join(repo_dir, 'config') 341 if mirror.exists():
153 if os.path.isdir(repo_dir) and os.path.isfile(flag_file): 342 print(mirror.mirror_path)
154 print repo_dir
155 return 0 343 return 0
156 return 1 344 return 1
157 345
158 346
159 @subcommand.usage('[url of repo to create a bootstrap zip file]') 347 @subcommand.usage('[url of repo to create a bootstrap zip file]')
160 def CMDupdate_bootstrap(parser, args): 348 def CMDupdate_bootstrap(parser, args):
161 """Create and uploads a bootstrap tarball.""" 349 """Create and uploads a bootstrap tarball."""
162 # Lets just assert we can't do this on Windows. 350 # Lets just assert we can't do this on Windows.
163 if sys.platform.startswith('win'): 351 if sys.platform.startswith('win'):
164 print >> sys.stderr, 'Sorry, update bootstrap will not work on Windows.' 352 print('Sorry, update bootstrap will not work on Windows.', file=sys.stderr)
165 return 1 353 return 1
166 354
167 # First, we need to ensure the cache is populated. 355 # First, we need to ensure the cache is populated.
168 populate_args = args[:] 356 populate_args = args[:]
169 populate_args.append('--no_bootstrap') 357 populate_args.append('--no_bootstrap')
170 CMDpopulate(parser, populate_args) 358 CMDpopulate(parser, populate_args)
171 359
172 # Get the repo directory. 360 # Get the repo directory.
173 options, args = parser.parse_args(args) 361 _, args = parser.parse_args(args)
174 url = args[0] 362 url = args[0]
175 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) 363 mirror = Mirror(url)
176 364 mirror.update_bootstrap()
177 # The files are named <git number>.zip 365 return 0
178 gen_number = subprocess.check_output(['git', 'number', 'master'],
179 cwd=repo_dir).strip()
180 RunGit(['gc'], cwd=repo_dir) # Run Garbage Collect to compress packfile.
181 # Creating a temp file and then deleting it ensures we can use this name.
182 _, tmp_zipfile = tempfile.mkstemp(suffix='.zip')
183 os.remove(tmp_zipfile)
184 subprocess.call(['zip', '-r', tmp_zipfile, '.'], cwd=repo_dir)
185 gsutil = Gsutil(path=GSUTIL_DEFAULT_PATH, boto_path=None)
186 dest_name = 'gs://%s/%s/%s.zip' % (BOOTSTRAP_BUCKET,
187 UrlToCacheDir(url),
188 gen_number)
189 gsutil.call('cp', tmp_zipfile, dest_name)
190 os.remove(tmp_zipfile)
191 366
192 367
193 @subcommand.usage('[url of repo to add to or update in cache]') 368 @subcommand.usage('[url of repo to add to or update in cache]')
194 def CMDpopulate(parser, args): 369 def CMDpopulate(parser, args):
195 """Ensure that the cache has all up-to-date objects for the given repo.""" 370 """Ensure that the cache has all up-to-date objects for the given repo."""
196 parser.add_option('--depth', type='int', 371 parser.add_option('--depth', type='int',
197 help='Only cache DEPTH commits of history') 372 help='Only cache DEPTH commits of history')
198 parser.add_option('--shallow', '-s', action='store_true', 373 parser.add_option('--shallow', '-s', action='store_true',
199 help='Only cache 10000 commits of history') 374 help='Only cache 10000 commits of history')
200 parser.add_option('--ref', action='append', 375 parser.add_option('--ref', action='append',
201 help='Specify additional refs to be fetched') 376 help='Specify additional refs to be fetched')
202 parser.add_option('--no_bootstrap', action='store_true', 377 parser.add_option('--no_bootstrap', action='store_true',
203 help='Don\'t bootstrap from Google Storage') 378 help='Don\'t bootstrap from Google Storage')
204 379
205 options, args = parser.parse_args(args) 380 options, args = parser.parse_args(args)
206 if options.shallow and not options.depth: 381 if options.shallow and not options.depth:
207 options.depth = 10000 382 options.depth = 10000
Ryan Tseng 2014/04/09 00:39:44 no longer needed.
szager1 2014/04/09 05:22:29 Done.
208 if not len(args) == 1: 383 if not len(args) == 1:
209 parser.error('git cache populate only takes exactly one repo url.') 384 parser.error('git cache populate only takes exactly one repo url.')
210 url = args[0] 385 url = args[0]
211 386
212 gclient_utils.safe_makedirs(options.cache_dir) 387 mirror = Mirror(url, refs=options.ref)
213 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) 388 kwargs = {
214 389 'noisy': options.verbose,
215 v = [] 390 'shallow': options.shallow,
216 filter_fn = lambda l: '[up to date]' not in l 391 'bootstrap': not options.no_bootstrap,
217 if options.verbose: 392 }
218 v = ['-v', '--progress']
219 filter_fn = None
220
221 d = []
222 if options.depth: 393 if options.depth:
223 d = ['--depth', '%d' % options.depth] 394 kwargs['depth'] = options.depth
224 395 mirror.populate(**kwargs)
225 def _find(executable):
226 """This mimics the "which" utility."""
227 path_folders = os.environ.get('PATH').split(os.pathsep)
228
229 for path_folder in path_folders:
230 target = os.path.join(path_folder, executable)
231 # Just incase we have some ~/blah paths.
232 target = os.path.abspath(os.path.expanduser(target))
233 if os.path.isfile(target) and os.access(target, os.X_OK):
234 return target
235 return False
236
237 def _maybe_bootstrap_repo(directory):
238 """Bootstrap the repo from Google Stroage if possible.
239
240 Requires 7z on Windows and Unzip on Linux/Mac.
241 """
242 if options.no_bootstrap:
243 return False
244 if sys.platform.startswith('win'):
245 if not _find('7z'):
246 print 'Cannot find 7z in the path.'
247 print 'If you want git cache to be able to bootstrap from '
248 print 'Google Storage, please install 7z from:'
249 print 'http://www.7-zip.org/download.html'
250 return False
251 else:
252 if not _find('unzip'):
253 print 'Cannot find unzip in the path.'
254 print 'If you want git cache to be able to bootstrap from '
255 print 'Google Storage, please ensure unzip is present on your system.'
256 return False
257
258 folder = UrlToCacheDir(url)
259 gs_folder = 'gs://%s/%s' % (BOOTSTRAP_BUCKET, folder)
260 gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=os.devnull,
261 bypass_prodaccess=True)
262 # Get the most recent version of the zipfile.
263 _, ls_out, _ = gsutil.check_call('ls', gs_folder)
264 ls_out_sorted = sorted(ls_out.splitlines())
265 if not ls_out_sorted:
266 # This repo is not on Google Storage.
267 return False
268 latest_checkout = ls_out_sorted[-1]
269
270 # Download zip file to a temporary directory.
271 tempdir = tempfile.mkdtemp()
272 print 'Downloading %s...' % latest_checkout
273 code, out, err = gsutil.check_call('cp', latest_checkout, tempdir)
274 if code:
275 print '%s\n%s' % (out, err)
276 return False
277 filename = os.path.join(tempdir, latest_checkout.split('/')[-1])
278
279 # Unpack the file with 7z on Windows, or unzip everywhere else.
280 if sys.platform.startswith('win'):
281 cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename]
282 else:
283 cmd = ['unzip', filename, '-d', directory]
284 retcode = subprocess.call(cmd)
285
286 # Clean up the downloaded zipfile.
287 gclient_utils.rmtree(tempdir)
288 if retcode:
289 print 'Extracting bootstrap zipfile %s failed.' % filename
290 print 'Resuming normal operations'
291 return False
292 return True
293
294 def _config(directory):
295 RunGit(['config', 'core.deltaBaseCacheLimit',
296 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=directory)
297 RunGit(['config', 'remote.origin.url', url],
298 cwd=directory)
299 RunGit(['config', '--replace-all', 'remote.origin.fetch',
300 '+refs/heads/*:refs/heads/*'],
301 cwd=directory)
302 RunGit(['config', '--add', 'remote.origin.fetch',
303 '+refs/tags/*:refs/tags/*'],
304 cwd=directory)
305 for ref in options.ref or []:
306 ref = ref.rstrip('/')
307 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref)
308 RunGit(['config', '--add', 'remote.origin.fetch', refspec],
309 cwd=directory)
310
311 with Lockfile(repo_dir):
312 # Setup from scratch if the repo is new or is in a bad state.
313 if not os.path.exists(os.path.join(repo_dir, 'config')):
314 gclient_utils.rmtree(repo_dir)
315 tempdir = tempfile.mkdtemp(suffix=UrlToCacheDir(url),
316 dir=options.cache_dir)
317 bootstrapped = _maybe_bootstrap_repo(tempdir)
318 if not bootstrapped:
319 RunGit(['init', '--bare'], cwd=tempdir)
320 _config(tempdir)
321 fetch_cmd = ['fetch'] + v + d + ['origin']
322 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=tempdir, retry=True)
323 os.rename(tempdir, repo_dir)
324 else:
325 _config(repo_dir)
326 if options.depth and os.path.exists(os.path.join(repo_dir, 'shallow')):
327 logging.warn('Shallow fetch requested, but repo cache already exists.')
328 fetch_cmd = ['fetch'] + v + ['origin']
329 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=repo_dir, retry=True)
330 396
331 397
332 @subcommand.usage('[url of repo to unlock, or -a|--all]') 398 @subcommand.usage('[url of repo to unlock, or -a|--all]')
333 def CMDunlock(parser, args): 399 def CMDunlock(parser, args):
334 """Unlock one or all repos if their lock files are still around.""" 400 """Unlock one or all repos if their lock files are still around."""
335 parser.add_option('--force', '-f', action='store_true', 401 parser.add_option('--force', '-f', action='store_true',
336 help='Actually perform the action') 402 help='Actually perform the action')
337 parser.add_option('--all', '-a', action='store_true', 403 parser.add_option('--all', '-a', action='store_true',
338 help='Unlock all repository caches') 404 help='Unlock all repository caches')
339 options, args = parser.parse_args(args) 405 options, args = parser.parse_args(args)
340 if len(args) > 1 or (len(args) == 0 and not options.all): 406 if len(args) > 1 or (len(args) == 0 and not options.all):
341 parser.error('git cache unlock takes exactly one repo url, or --all') 407 parser.error('git cache unlock takes exactly one repo url, or --all')
342 408
409 repo_dirs = []
343 if not options.all: 410 if not options.all:
344 url = args[0] 411 url = args[0]
345 repo_dirs = [os.path.join(options.cache_dir, UrlToCacheDir(url))] 412 repo_dirs.append(Mirror(url).mirror_path)
346 else: 413 else:
347 repo_dirs = [os.path.join(options.cache_dir, path) 414 cachepath = Mirror.GetCachePath()
348 for path in os.listdir(options.cache_dir) 415 repo_dirs = [os.path.join(cachepath, path)
349 if os.path.isdir(os.path.join(options.cache_dir, path))] 416 for path in os.listdir(cachepath)
350 repo_dirs.extend([os.path.join(options.cache_dir, 417 if os.path.isdir(os.path.join(cachepath, path))]
418 repo_dirs.extend([os.path.join(cachepath,
351 lockfile.replace('.lock', '')) 419 lockfile.replace('.lock', ''))
352 for lockfile in os.listdir(options.cache_dir) 420 for lockfile in os.listdir(cachepath)
353 if os.path.isfile(os.path.join(options.cache_dir, 421 if os.path.isfile(os.path.join(cachepath,
354 lockfile)) 422 lockfile))
355 and lockfile.endswith('.lock') 423 and lockfile.endswith('.lock')
356 and os.path.join(options.cache_dir, lockfile) 424 and os.path.join(cachepath, lockfile)
357 not in repo_dirs]) 425 not in repo_dirs])
358 lockfiles = [repo_dir + '.lock' for repo_dir in repo_dirs 426 lockfiles = [repo_dir + '.lock' for repo_dir in repo_dirs
359 if os.path.exists(repo_dir + '.lock')] 427 if os.path.exists(repo_dir + '.lock')]
360 428
361 if not options.force: 429 if not options.force:
362 parser.error('git cache unlock requires -f|--force to do anything. ' 430 parser.error('git cache unlock requires -f|--force to do anything. '
363 'Refusing to unlock the following repo caches: ' 431 'Refusing to unlock the following repo caches: '
364 ', '.join(lockfiles)) 432 ', '.join(lockfiles))
365 433
366 unlocked = [] 434 unlocked_repos = []
367 untouched = [] 435 untouched_repos = []
368 for repo_dir in repo_dirs: 436 for repo_dir in repo_dirs:
369 lf = Lockfile(repo_dir) 437 lf = Lockfile(repo_dir)
370 config_lock = os.path.join(repo_dir, 'config.lock') 438 config_lock = os.path.join(repo_dir, 'config.lock')
371 unlocked = False 439 unlocked = False
372 if os.path.exists(config_lock): 440 if os.path.exists(config_lock):
373 os.remove(config_lock) 441 os.remove(config_lock)
374 unlocked = True 442 unlocked = True
375 if lf.break_lock(): 443 if lf.break_lock():
376 unlocked = True 444 unlocked = True
377 445
378 if unlocked: 446 if unlocked:
379 unlocked.append(repo_dir) 447 unlocked_repos.append(repo_dir)
380 else: 448 else:
381 untouched.append(repo_dir) 449 untouched_repos.append(repo_dir)
382 450
383 if unlocked: 451 if unlocked_repos:
384 logging.info('Broke locks on these caches: %s' % unlocked) 452 logging.info('Broke locks on these caches:\n %s' % '\n '.join(
385 if untouched: 453 unlocked_repos))
386 logging.debug('Did not touch these caches: %s' % untouched) 454 if untouched_repos:
455 logging.debug('Did not touch these caches:\n %s' % '\n '.join(
456 untouched_repos))
387 457
388 458
389 class OptionParser(optparse.OptionParser): 459 class OptionParser(optparse.OptionParser):
390 """Wrapper class for OptionParser to handle global options.""" 460 """Wrapper class for OptionParser to handle global options."""
391 461
392 def __init__(self, *args, **kwargs): 462 def __init__(self, *args, **kwargs):
393 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs) 463 optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs)
394 self.add_option('-c', '--cache-dir', 464 self.add_option('-c', '--cache-dir',
395 help='Path to the directory containing the cache') 465 help='Path to the directory containing the cache')
396 self.add_option('-v', '--verbose', action='count', default=0, 466 self.add_option('-v', '--verbose', action='count', default=0,
397 help='Increase verbosity (can be passed multiple times)') 467 help='Increase verbosity (can be passed multiple times)')
398 468
399 def parse_args(self, args=None, values=None): 469 def parse_args(self, args=None, values=None):
400 options, args = optparse.OptionParser.parse_args(self, args, values) 470 options, args = optparse.OptionParser.parse_args(self, args, values)
401 471
402 try: 472 try:
403 global_cache_dir = subprocess.check_output( 473 global_cache_dir = Mirror.GetCachePath()
404 [GIT_EXECUTABLE, 'config', '--global', 'cache.cachepath']).strip() 474 except RuntimeError:
405 if options.cache_dir: 475 global_cache_dir = None
406 if global_cache_dir and ( 476 if options.cache_dir:
407 os.path.abspath(options.cache_dir) != 477 if global_cache_dir and (
408 os.path.abspath(global_cache_dir)): 478 os.path.abspath(options.cache_dir) !=
409 logging.warn('Overriding globally-configured cache directory.') 479 os.path.abspath(global_cache_dir)):
410 else: 480 logging.warn('Overriding globally-configured cache directory.')
411 options.cache_dir = global_cache_dir 481 Mirror.SetCachePath(options.cache_dir)
412 except subprocess.CalledProcessError:
413 if not options.cache_dir:
414 self.error('No cache directory specified on command line '
415 'or in cache.cachepath.')
416 options.cache_dir = os.path.abspath(options.cache_dir)
417 482
418 levels = [logging.WARNING, logging.INFO, logging.DEBUG] 483 levels = [logging.WARNING, logging.INFO, logging.DEBUG]
419 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)]) 484 logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
420 485
421 return options, args 486 return options, args
422 487
423 488
424 def main(argv): 489 def main(argv):
425 dispatcher = subcommand.CommandDispatcher(__name__) 490 dispatcher = subcommand.CommandDispatcher(__name__)
426 return dispatcher.execute(OptionParser(), argv) 491 return dispatcher.execute(OptionParser(), argv)
427 492
428 493
429 if __name__ == '__main__': 494 if __name__ == '__main__':
430 sys.exit(main(sys.argv[1:])) 495 sys.exit(main(sys.argv[1:]))
OLDNEW
« gclient_scm.py ('K') | « gclient_scm.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698