OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """A git command for managing a local cache of git repositories.""" | 6 """A git command for managing a local cache of git repositories.""" |
7 | 7 |
8 import errno | 8 import errno |
9 import logging | 9 import logging |
10 import optparse | 10 import optparse |
11 import os | 11 import os |
12 import tempfile | 12 import tempfile |
13 import subprocess | 13 import subprocess |
14 import sys | 14 import sys |
15 import urlparse | 15 import urlparse |
16 | 16 |
17 import download_from_google_storage | |
agable
2014/04/01 23:44:52
from download_from_google_storage import Gsutil? E
Ryan Tseng
2014/04/02 00:32:19
Done.
| |
17 import gclient_utils | 18 import gclient_utils |
18 import subcommand | 19 import subcommand |
19 | 20 |
20 | 21 |
21 GIT_EXECUTABLE = 'git.bat' if sys.platform.startswith('win') else 'git' | 22 GIT_EXECUTABLE = 'git.bat' if sys.platform.startswith('win') else 'git' |
23 BOOTSTRAP_BUCKET = 'chromium-git-cache' | |
24 GSUTIL_DEFAULT_PATH = os.path.join( | |
25 os.path.dirname(os.path.abspath(__file__)), | |
26 'third_party', 'gsutil', 'gsutil') | |
22 | 27 |
23 | 28 |
24 def UrlToCacheDir(url): | 29 def UrlToCacheDir(url): |
25 """Convert a git url to a normalized form for the cache dir path.""" | 30 """Convert a git url to a normalized form for the cache dir path.""" |
26 parsed = urlparse.urlparse(url) | 31 parsed = urlparse.urlparse(url) |
27 norm_url = parsed.netloc + parsed.path | 32 norm_url = parsed.netloc + parsed.path |
28 if norm_url.endswith('.git'): | 33 if norm_url.endswith('.git'): |
29 norm_url = norm_url[:-len('.git')] | 34 norm_url = norm_url[:-len('.git')] |
30 return norm_url.replace('-', '--').replace('/', '-').lower() | 35 return norm_url.replace('-', '--').replace('/', '-').lower() |
31 | 36 |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
144 parser.error('git cache exists only takes exactly one repo url.') | 149 parser.error('git cache exists only takes exactly one repo url.') |
145 url = args[0] | 150 url = args[0] |
146 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | 151 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) |
147 flag_file = os.path.join(repo_dir, 'config') | 152 flag_file = os.path.join(repo_dir, 'config') |
148 if os.path.isdir(repo_dir) and os.path.isfile(flag_file): | 153 if os.path.isdir(repo_dir) and os.path.isfile(flag_file): |
149 print repo_dir | 154 print repo_dir |
150 return 0 | 155 return 0 |
151 return 1 | 156 return 1 |
152 | 157 |
153 | 158 |
159 @subcommand.usage('[url of repo to create a bootstrap zip file]') | |
160 def CMDupdate_bootstrap(parser, args): | |
161 """Create and uploads a bootstrap tarball.""" | |
162 # Lets just assert we can't do this on Windows. | |
163 if sys.platform.startswith('win'): | |
164 print >> sys.stderr, 'Sorry, update bootstrap will not work on Windows.' | |
165 return 1 | |
166 | |
167 # First, we need to ensure the cache is populated. | |
168 args.append('--no_bootstrap') | |
agable
2014/04/01 23:44:52
Would prefer to manipulate a copy of args, so that
Ryan Tseng
2014/04/02 00:32:19
Done.
| |
169 CMDpopulate(parser, args) | |
170 | |
171 # Get the repo directory. | |
172 options, args = parser.parse_args(args) | |
173 url = args[0] | |
174 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | |
175 | |
176 # The files are named <git number>.zip | |
177 gen_number = subprocess.check_output(['git', 'number', 'master'], | |
178 cwd=repo_dir).strip() | |
179 RunGit(['gc'], cwd=repo_dir) # Run Garbage Collect to compress packfile. | |
180 # Creating a temp file and then deleting it ensures we can use this name. | |
181 _, tmp_zipfile = tempfile.mkstemp(suffix='.zip') | |
182 os.remove(tmp_zipfile) | |
183 subprocess.call(['zip', '-r', tmp_zipfile, '.'], cwd=repo_dir) | |
184 gsutil = download_from_google_storage.Gsutil(path=GSUTIL_DEFAULT_PATH, | |
185 boto_path=None) | |
186 dest_name = 'gs://%s/%s/%s.zip' % (BOOTSTRAP_BUCKET, | |
187 UrlToCacheDir(url), | |
188 gen_number) | |
189 gsutil.call('cp', tmp_zipfile, dest_name) | |
190 os.remove(tmp_zipfile) | |
191 | |
192 | |
154 @subcommand.usage('[url of repo to add to or update in cache]') | 193 @subcommand.usage('[url of repo to add to or update in cache]') |
155 def CMDpopulate(parser, args): | 194 def CMDpopulate(parser, args): |
156 """Ensure that the cache has all up-to-date objects for the given repo.""" | 195 """Ensure that the cache has all up-to-date objects for the given repo.""" |
157 parser.add_option('--depth', type='int', | 196 parser.add_option('--depth', type='int', |
158 help='Only cache DEPTH commits of history') | 197 help='Only cache DEPTH commits of history') |
159 parser.add_option('--shallow', '-s', action='store_true', | 198 parser.add_option('--shallow', '-s', action='store_true', |
160 help='Only cache 10000 commits of history') | 199 help='Only cache 10000 commits of history') |
161 parser.add_option('--ref', action='append', | 200 parser.add_option('--ref', action='append', |
162 help='Specify additional refs to be fetched') | 201 help='Specify additional refs to be fetched') |
202 parser.add_option('--no_bootstrap', action='store_true', | |
203 help='Don\'t bootstrap from Google Storage') | |
204 | |
163 options, args = parser.parse_args(args) | 205 options, args = parser.parse_args(args) |
164 if options.shallow and not options.depth: | 206 if options.shallow and not options.depth: |
165 options.depth = 10000 | 207 options.depth = 10000 |
166 if not len(args) == 1: | 208 if not len(args) == 1: |
167 parser.error('git cache populate only takes exactly one repo url.') | 209 parser.error('git cache populate only takes exactly one repo url.') |
168 url = args[0] | 210 url = args[0] |
169 | 211 |
170 gclient_utils.safe_makedirs(options.cache_dir) | 212 gclient_utils.safe_makedirs(options.cache_dir) |
171 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | 213 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) |
172 | 214 |
173 v = [] | 215 v = [] |
174 filter_fn = lambda l: '[up to date]' not in l | 216 filter_fn = lambda l: '[up to date]' not in l |
175 if options.verbose: | 217 if options.verbose: |
176 v = ['-v', '--progress'] | 218 v = ['-v', '--progress'] |
177 filter_fn = None | 219 filter_fn = None |
178 | 220 |
179 d = [] | 221 d = [] |
180 if options.depth: | 222 if options.depth: |
181 d = ['--depth', '%d' % options.depth] | 223 d = ['--depth', '%d' % options.depth] |
182 | 224 |
225 def _find(executable): | |
226 # Mimics the "which" utility. | |
agable
2014/04/01 23:44:52
Do a real docstring.
Ryan Tseng
2014/04/02 00:32:19
Done.
| |
227 path_folders = os.environ.get('PATH').split(os.pathsep) | |
228 | |
229 for path_folder in path_folders: | |
230 target = os.path.join(path_folder, executable) | |
231 # Just incase we have some ~/blah paths. | |
232 target = os.path.abspath(os.path.expanduser(target)) | |
233 if os.path.isfile(target) and os.access(target, os.X_OK): | |
234 return target | |
235 return False | |
236 | |
237 def _maybe_bootstrap_repo(directory): | |
238 # Bootstrap the repo from Google Stroage if there is a pre-checked out | |
agable
2014/04/01 23:44:52
Do a real docstring.
Ryan Tseng
2014/04/02 00:32:19
Done.
| |
239 # version already. Uses 7z on windows to inflate, and unzip | |
240 # everywhere else. | |
241 if options.no_bootstrap: | |
242 return False | |
243 if sys.platform.startswith('win'): | |
244 if not _find('7z'): | |
245 print 'Cannot find 7z in the path.' | |
246 print 'Install 7z from http://www.7-zip.org/download.html if you want ' | |
agable
2014/04/01 23:44:52
...if you want git cache to be able to bootstrap..
Ryan Tseng
2014/04/02 00:32:19
Done.
| |
247 print 'git cache to bootstrap from Google Storage.' | |
248 return False | |
249 else: | |
250 if not _find('unzip'): | |
251 print 'Cannot find unzip in the path.' | |
252 print 'Install unzip if you want to create a git cache to boostrap ' | |
agable
2014/04/01 23:44:52
...if you want git cache to be able to bootstrap..
Ryan Tseng
2014/04/02 00:32:19
Done.
| |
253 print 'from Google Storage.' | |
254 return False | |
255 | |
256 folder = UrlToCacheDir(url) | |
257 gs_folder = 'gs://%s/%s' % (BOOTSTRAP_BUCKET, folder) | |
258 gsutil = download_from_google_storage.Gsutil(GSUTIL_DEFAULT_PATH, | |
259 boto_path=os.devnull, | |
260 bypass_prodaccess=True) | |
261 # Get the most recent version of the zipfile. | |
262 _, ls_out, _ = gsutil.check_call('ls', gs_folder) | |
263 ls_out_sorted = sorted(ls_out.splitlines()) | |
264 if not ls_out_sorted: | |
265 # This repo is not on Google Storage. | |
266 return False | |
267 latest_checkout = ls_out_sorted[-1] | |
268 | |
269 # Download zip file to a temporary directory. | |
270 tempdir = tempfile.mkdtemp() | |
271 print 'Downloading %s...' % latest_checkout | |
272 code, out, err = gsutil.check_call('cp', latest_checkout, tempdir) | |
273 if code: | |
274 print '%s\n%s' % (out, err) | |
275 return False | |
276 filename = os.path.join(tempdir, latest_checkout.split('/')[-1]) | |
277 | |
278 # Unpack the file with 7z on Windows, or unzip everywhere else. | |
279 if sys.platform.startswith('win'): | |
280 cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename] | |
281 else: | |
282 cmd = ['unzip', filename, '-d', directory] | |
283 code = subprocess.call(cmd) | |
agable
2014/04/01 23:44:52
retcode
Ryan Tseng
2014/04/02 00:32:19
Done.
| |
284 | |
285 # Clean up the downloaded zipfile. | |
286 gclient_utils.rmtree(tempdir) | |
287 if code: | |
288 print 'Extracting bootstrap zipfile %s failed.' % filename | |
289 print 'Resuming normal operations' | |
290 return False | |
291 return True | |
292 | |
183 def _config(directory): | 293 def _config(directory): |
184 RunGit(['config', 'core.deltaBaseCacheLimit', | 294 RunGit(['config', 'core.deltaBaseCacheLimit', |
185 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=directory) | 295 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=directory) |
186 RunGit(['config', 'remote.origin.url', url], | 296 RunGit(['config', 'remote.origin.url', url], |
187 cwd=directory) | 297 cwd=directory) |
188 RunGit(['config', '--replace-all', 'remote.origin.fetch', | 298 RunGit(['config', '--replace-all', 'remote.origin.fetch', |
189 '+refs/heads/*:refs/heads/*'], | 299 '+refs/heads/*:refs/heads/*'], |
190 cwd=directory) | 300 cwd=directory) |
191 RunGit(['config', '--add', 'remote.origin.fetch', | 301 RunGit(['config', '--add', 'remote.origin.fetch', |
192 '+refs/tags/*:refs/tags/*'], | 302 '+refs/tags/*:refs/tags/*'], |
193 cwd=directory) | 303 cwd=directory) |
194 for ref in options.ref or []: | 304 for ref in options.ref or []: |
195 ref = ref.rstrip('/') | 305 ref = ref.rstrip('/') |
196 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) | 306 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) |
197 RunGit(['config', '--add', 'remote.origin.fetch', refspec], | 307 RunGit(['config', '--add', 'remote.origin.fetch', refspec], |
198 cwd=directory) | 308 cwd=directory) |
199 | 309 |
200 with Lockfile(repo_dir): | 310 with Lockfile(repo_dir): |
201 # Setup from scratch if the repo is new or is in a bad state. | 311 # Setup from scratch if the repo is new or is in a bad state. |
202 if not os.path.exists(os.path.join(repo_dir, 'config')): | 312 if not os.path.exists(os.path.join(repo_dir, 'config')): |
203 gclient_utils.rmtree(repo_dir) | 313 gclient_utils.rmtree(repo_dir) |
204 tempdir = tempfile.mkdtemp(suffix=UrlToCacheDir(url), | 314 tempdir = tempfile.mkdtemp(suffix=UrlToCacheDir(url), |
205 dir=options.cache_dir) | 315 dir=options.cache_dir) |
206 RunGit(['init', '--bare'], cwd=tempdir) | 316 bootstrapped = _maybe_bootstrap_repo(tempdir) |
317 if not bootstrapped: | |
318 RunGit(['init', '--bare'], cwd=tempdir) | |
207 _config(tempdir) | 319 _config(tempdir) |
208 fetch_cmd = ['fetch'] + v + d + ['origin'] | 320 fetch_cmd = ['fetch'] + v + d + ['origin'] |
209 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=tempdir, retry=True) | 321 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=tempdir, retry=True) |
210 os.rename(tempdir, repo_dir) | 322 os.rename(tempdir, repo_dir) |
211 else: | 323 else: |
212 _config(repo_dir) | 324 _config(repo_dir) |
213 if options.depth and os.path.exists(os.path.join(repo_dir, 'shallow')): | 325 if options.depth and os.path.exists(os.path.join(repo_dir, 'shallow')): |
214 logging.warn('Shallow fetch requested, but repo cache already exists.') | 326 logging.warn('Shallow fetch requested, but repo cache already exists.') |
215 fetch_cmd = ['fetch'] + v + ['origin'] | 327 fetch_cmd = ['fetch'] + v + ['origin'] |
216 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=repo_dir, retry=True) | 328 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=repo_dir, retry=True) |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
256 lf = Lockfile(repo_dir) | 368 lf = Lockfile(repo_dir) |
257 config_lock = os.path.join(repo_dir, 'config.lock') | 369 config_lock = os.path.join(repo_dir, 'config.lock') |
258 unlocked = False | 370 unlocked = False |
259 if os.path.exists(config_lock): | 371 if os.path.exists(config_lock): |
260 os.remove(config_lock) | 372 os.remove(config_lock) |
261 unlocked = True | 373 unlocked = True |
262 if lf.break_lock(): | 374 if lf.break_lock(): |
263 unlocked = True | 375 unlocked = True |
264 | 376 |
265 if unlocked: | 377 if unlocked: |
266 unlocked.append(repo_dir) | 378 unlocked.append(repo_dir) |
267 else: | 379 else: |
268 untouched.append(repo_dir) | 380 untouched.append(repo_dir) |
269 | 381 |
270 if unlocked: | 382 if unlocked: |
271 logging.info('Broke locks on these caches: %s' % unlocked) | 383 logging.info('Broke locks on these caches: %s' % unlocked) |
272 if untouched: | 384 if untouched: |
273 logging.debug('Did not touch these caches: %s' % untouched) | 385 logging.debug('Did not touch these caches: %s' % untouched) |
274 | 386 |
275 | 387 |
276 class OptionParser(optparse.OptionParser): | 388 class OptionParser(optparse.OptionParser): |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
308 return options, args | 420 return options, args |
309 | 421 |
310 | 422 |
311 def main(argv): | 423 def main(argv): |
312 dispatcher = subcommand.CommandDispatcher(__name__) | 424 dispatcher = subcommand.CommandDispatcher(__name__) |
313 return dispatcher.execute(OptionParser(), argv) | 425 return dispatcher.execute(OptionParser(), argv) |
314 | 426 |
315 | 427 |
316 if __name__ == '__main__': | 428 if __name__ == '__main__': |
317 sys.exit(main(sys.argv[1:])) | 429 sys.exit(main(sys.argv[1:])) |
OLD | NEW |