Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """A git command for managing a local cache of git repositories.""" | 6 """A git command for managing a local cache of git repositories.""" |
| 7 | 7 |
| 8 import errno | 8 import errno |
| 9 import logging | 9 import logging |
| 10 import optparse | 10 import optparse |
| 11 import os | 11 import os |
| 12 import tempfile | 12 import tempfile |
| 13 import subprocess | 13 import subprocess |
| 14 import sys | 14 import sys |
| 15 import urlparse | 15 import urlparse |
| 16 | 16 |
| 17 import download_from_google_storage | |
| 17 import gclient_utils | 18 import gclient_utils |
| 18 import subcommand | 19 import subcommand |
| 19 | 20 |
| 20 | 21 |
| 21 GIT_EXECUTABLE = 'git.bat' if sys.platform.startswith('win') else 'git' | 22 GIT_EXECUTABLE = 'git.bat' if sys.platform.startswith('win') else 'git' |
| 23 BOOTSTRAP_BUCKET = 'chromium-git-cache' | |
| 24 GSUTIL_DEFAULT_PATH = os.path.join( | |
| 25 os.path.dirname(os.path.abspath(__file__)), | |
| 26 'third_party', 'gsutil', 'gsutil') | |
| 22 | 27 |
| 23 | 28 |
| 24 def UrlToCacheDir(url): | 29 def UrlToCacheDir(url): |
| 25 """Convert a git url to a normalized form for the cache dir path.""" | 30 """Convert a git url to a normalized form for the cache dir path.""" |
| 26 parsed = urlparse.urlparse(url) | 31 parsed = urlparse.urlparse(url) |
| 27 norm_url = parsed.netloc + parsed.path | 32 norm_url = parsed.netloc + parsed.path |
| 28 if norm_url.endswith('.git'): | 33 if norm_url.endswith('.git'): |
| 29 norm_url = norm_url[:-len('.git')] | 34 norm_url = norm_url[:-len('.git')] |
| 30 return norm_url.replace('-', '--').replace('/', '-').lower() | 35 return norm_url.replace('-', '--').replace('/', '-').lower() |
| 31 | 36 |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 144 parser.error('git cache exists only takes exactly one repo url.') | 149 parser.error('git cache exists only takes exactly one repo url.') |
| 145 url = args[0] | 150 url = args[0] |
| 146 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | 151 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) |
| 147 flag_file = os.path.join(repo_dir, 'config') | 152 flag_file = os.path.join(repo_dir, 'config') |
| 148 if os.path.isdir(repo_dir) and os.path.isfile(flag_file): | 153 if os.path.isdir(repo_dir) and os.path.isfile(flag_file): |
| 149 print repo_dir | 154 print repo_dir |
| 150 return 0 | 155 return 0 |
| 151 return 1 | 156 return 1 |
| 152 | 157 |
| 153 | 158 |
| 159 @subcommand.usage('[url of repo to create a bootstrap zip file]') | |
| 160 def CMDupdate_bootstrap(parser, args): | |
| 161 """Create and uploads a bootstrap tarball.""" | |
|
iannucci
2014/04/01 05:47:11
let's just assert this is on !win32 w/ a friendly
Ryan Tseng
2014/04/01 20:18:10
Done.
| |
| 162 # First, we need to ensure the cache is populated. | |
| 163 args.append('--no_bootstrap') | |
| 164 CMDpopulate(parser, args) | |
|
iannucci
2014/04/01 05:47:11
you'll want to do a full gc repack
Ryan Tseng
2014/04/01 20:18:10
Done.
| |
| 165 | |
| 166 # The files are named <git number>.zip | |
| 167 gen_number = subprocess.check_output(['git', 'number']).strip() | |
|
iannucci
2014/04/01 05:47:11
git number master
Ryan Tseng
2014/04/01 20:18:10
Done.
| |
| 168 options, args = parser.parse_args(args) | |
| 169 | |
| 170 if not len(args) == 1: | |
| 171 parser.error('git cache update_bootstrap only takes exactly one repo url.') | |
| 172 url = args[0] | |
|
iannucci
2014/04/01 05:47:11
I don't think these lines are needed b/c CMDpopula
Ryan Tseng
2014/04/01 20:18:10
Done.
| |
| 173 | |
| 174 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | |
| 175 # Creating a temp file and then deleting it ensures we can use this name. | |
| 176 _, tmp_zipfile = tempfile.mkstemp(suffix='.zip') | |
| 177 os.remove(tmp_zipfile) | |
| 178 if sys.platform.startswith('win'): | |
| 179 cmd = ['7z', 'a', '-r', '-tzip', tmp_zipfile, '.'] | |
| 180 else: | |
| 181 cmd = ['zip', '-r', tmp_zipfile, '.'] | |
| 182 subprocess.call(cmd, cwd=repo_dir) | |
| 183 gsutil = download_from_google_storage.Gsutil(path=GSUTIL_DEFAULT_PATH, | |
| 184 boto_path=None) | |
| 185 dest_name = 'gs://%s/%s/%s.zip' % (BOOTSTRAP_BUCKET, | |
| 186 UrlToCacheDir(url), | |
| 187 gen_number) | |
| 188 gsutil.call('cp', tmp_zipfile, dest_name) | |
| 189 os.remove(tmp_zipfile) | |
| 190 | |
| 191 | |
| 154 @subcommand.usage('[url of repo to add to or update in cache]') | 192 @subcommand.usage('[url of repo to add to or update in cache]') |
| 155 def CMDpopulate(parser, args): | 193 def CMDpopulate(parser, args): |
| 156 """Ensure that the cache has all up-to-date objects for the given repo.""" | 194 """Ensure that the cache has all up-to-date objects for the given repo.""" |
| 157 parser.add_option('--depth', type='int', | 195 parser.add_option('--depth', type='int', |
| 158 help='Only cache DEPTH commits of history') | 196 help='Only cache DEPTH commits of history') |
| 159 parser.add_option('--shallow', '-s', action='store_true', | 197 parser.add_option('--shallow', '-s', action='store_true', |
| 160 help='Only cache 10000 commits of history') | 198 help='Only cache 10000 commits of history') |
| 161 parser.add_option('--ref', action='append', | 199 parser.add_option('--ref', action='append', |
| 162 help='Specify additional refs to be fetched') | 200 help='Specify additional refs to be fetched') |
| 201 parser.add_option('--no_bootstrap', action='store_true', | |
| 202 help='Don\'t bootstrap from Google Storage') | |
| 203 | |
| 163 options, args = parser.parse_args(args) | 204 options, args = parser.parse_args(args) |
| 164 if options.shallow and not options.depth: | 205 if options.shallow and not options.depth: |
| 165 options.depth = 10000 | 206 options.depth = 10000 |
| 166 if not len(args) == 1: | 207 if not len(args) == 1: |
| 167 parser.error('git cache populate only takes exactly one repo url.') | 208 parser.error('git cache populate only takes exactly one repo url.') |
| 168 url = args[0] | 209 url = args[0] |
| 169 | 210 |
| 170 gclient_utils.safe_makedirs(options.cache_dir) | 211 gclient_utils.safe_makedirs(options.cache_dir) |
| 171 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) | 212 repo_dir = os.path.join(options.cache_dir, UrlToCacheDir(url)) |
| 172 | 213 |
| 173 v = [] | 214 v = [] |
| 174 filter_fn = lambda l: '[up to date]' not in l | 215 filter_fn = lambda l: '[up to date]' not in l |
| 175 if options.verbose: | 216 if options.verbose: |
| 176 v = ['-v', '--progress'] | 217 v = ['-v', '--progress'] |
| 177 filter_fn = None | 218 filter_fn = None |
| 178 | 219 |
| 179 d = [] | 220 d = [] |
| 180 if options.depth: | 221 if options.depth: |
| 181 d = ['--depth', '%d' % options.depth] | 222 d = ['--depth', '%d' % options.depth] |
| 182 | 223 |
| 224 def _find(executable): | |
| 225 # Mimics the "which" utility. | |
| 226 if sys.platform.startswith('win'): | |
| 227 path_folders = os.environ.get('PATH').split(';') | |
| 228 else: | |
| 229 path_folders = os.environ.get('PATH').split(':') | |
|
iannucci
2014/04/01 05:47:11
os.pathsep
Ryan Tseng
2014/04/01 20:18:10
Done.
| |
| 230 | |
| 231 for path_folder in path_folders: | |
| 232 target = os.path.join(path_folder, executable) | |
| 233 # Just incase we have some ~/blah paths. | |
| 234 target = os.path.abspath(os.path.expanduser(target)) | |
| 235 if os.path.isfile(target) and os.access(target, os.X_OK): | |
| 236 return target | |
| 237 return False | |
| 238 | |
| 239 def _maybe_bootstrap_repo(directory): | |
| 240 # Bootstrap the repo from Google Stroage if there is a pre-checked out | |
| 241 # version already. Uses 7z on windows to inflate, and unzip | |
| 242 # everywhere else. | |
| 243 if options.no_bootstrap: | |
| 244 return False | |
| 245 if sys.platform.startswith('win'): | |
| 246 if not _find('7z'): | |
| 247 print 'Cannot find 7z in the path.' | |
| 248 print 'Install 7z from http://www.7-zip.org/download.html if you want ' | |
| 249 print 'git cache to bootstrap from Google Storage.' | |
| 250 return False | |
| 251 else: | |
| 252 if not _find('unzip'): | |
| 253 print 'Cannot find unzip in the path.' | |
| 254 print 'Install unzip if you want to create a git cache to boostrap ' | |
| 255 print 'from Google Storage.' | |
| 256 return False | |
| 257 | |
| 258 folder = UrlToCacheDir(url) | |
| 259 gs_folder = 'gs://%s/%s' % (BOOTSTRAP_BUCKET, folder) | |
| 260 gsutil = download_from_google_storage.Gsutil(GSUTIL_DEFAULT_PATH, | |
| 261 boto_path=os.devnull, | |
| 262 bypass_prodaccess=True) | |
| 263 # Get the most recent version. | |
| 264 _, ls_out, _ = gsutil.check_call('ls', gs_folder) | |
| 265 ls_out_sorted = sorted(ls_out.splitlines()) | |
| 266 if not ls_out_sorted: | |
| 267 # This repo is not on Google Storage. | |
| 268 return False | |
| 269 latest_checkout = ls_out_sorted[-1] | |
| 270 | |
| 271 # Download zip file to tempdir. | |
| 272 tempdir = tempfile.mkdtemp() | |
| 273 print 'Downloading %s...' % latest_checkout | |
| 274 code, out, err = gsutil.check_call('cp', latest_checkout, tempdir) | |
| 275 if code: | |
| 276 print '%s\n%s' % (out, err) | |
| 277 return False | |
| 278 filename = os.path.join(tempdir, latest_checkout.split('/')[-1]) | |
| 279 | |
| 280 # Unpack the file. | |
| 281 if sys.platform.startswith('win'): | |
| 282 cmd = ['7z', 'x', '-o%s' % directory, '-tzip', filename] | |
| 283 else: | |
| 284 # Use the "unzip" utility to inflate. | |
| 285 cmd = ['unzip', filename, '-d', directory] | |
| 286 code = subprocess.call(cmd) | |
| 287 | |
| 288 gclient_utils.rmtree(tempdir) | |
|
iannucci
2014/04/01 05:47:11
this whole chunk of code begs to be refactored int
Ryan Tseng
2014/04/01 20:18:10
A whole new class to house 3 lines for zip and 5 l
| |
| 289 if code: | |
| 290 print 'Extracting bootstrap zipfile %s failed.' % filename | |
| 291 print 'Resuming normal operations' | |
| 292 return False | |
| 293 return True | |
| 294 | |
| 183 def _config(directory): | 295 def _config(directory): |
| 184 RunGit(['config', 'core.deltaBaseCacheLimit', | 296 RunGit(['config', 'core.deltaBaseCacheLimit', |
| 185 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=directory) | 297 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=directory) |
| 186 RunGit(['config', 'remote.origin.url', url], | 298 RunGit(['config', 'remote.origin.url', url], |
| 187 cwd=directory) | 299 cwd=directory) |
| 188 RunGit(['config', '--replace-all', 'remote.origin.fetch', | 300 RunGit(['config', '--replace-all', 'remote.origin.fetch', |
| 189 '+refs/heads/*:refs/heads/*'], | 301 '+refs/heads/*:refs/heads/*'], |
| 190 cwd=directory) | 302 cwd=directory) |
| 191 RunGit(['config', '--add', 'remote.origin.fetch', | 303 RunGit(['config', '--add', 'remote.origin.fetch', |
| 192 '+refs/tags/*:refs/tags/*'], | 304 '+refs/tags/*:refs/tags/*'], |
| 193 cwd=directory) | 305 cwd=directory) |
| 194 for ref in options.ref or []: | 306 for ref in options.ref or []: |
| 195 ref = ref.rstrip('/') | 307 ref = ref.rstrip('/') |
| 196 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) | 308 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) |
| 197 RunGit(['config', '--add', 'remote.origin.fetch', refspec], | 309 RunGit(['config', '--add', 'remote.origin.fetch', refspec], |
| 198 cwd=directory) | 310 cwd=directory) |
| 199 | 311 |
| 200 with Lockfile(repo_dir): | 312 with Lockfile(repo_dir): |
| 201 # Setup from scratch if the repo is new or is in a bad state. | 313 # Setup from scratch if the repo is new or is in a bad state. |
| 202 if not os.path.exists(os.path.join(repo_dir, 'config')): | 314 if not os.path.exists(os.path.join(repo_dir, 'config')): |
| 203 gclient_utils.rmtree(repo_dir) | 315 gclient_utils.rmtree(repo_dir) |
| 204 tempdir = tempfile.mkdtemp(suffix=UrlToCacheDir(url), | 316 tempdir = tempfile.mkdtemp(suffix=UrlToCacheDir(url), |
| 205 dir=options.cache_dir) | 317 dir=options.cache_dir) |
| 206 RunGit(['init', '--bare'], cwd=tempdir) | 318 bootstrapped = _maybe_bootstrap_repo(tempdir) |
| 319 if not bootstrapped: | |
| 320 RunGit(['init', '--bare'], cwd=tempdir) | |
| 207 _config(tempdir) | 321 _config(tempdir) |
| 208 fetch_cmd = ['fetch'] + v + d + ['origin'] | 322 fetch_cmd = ['fetch'] + v + d + ['origin'] |
| 209 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=tempdir, retry=True) | 323 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=tempdir, retry=True) |
| 210 os.rename(tempdir, repo_dir) | 324 os.rename(tempdir, repo_dir) |
| 211 else: | 325 else: |
| 212 _config(repo_dir) | 326 _config(repo_dir) |
| 213 if options.depth and os.path.exists(os.path.join(repo_dir, 'shallow')): | 327 if options.depth and os.path.exists(os.path.join(repo_dir, 'shallow')): |
| 214 logging.warn('Shallow fetch requested, but repo cache already exists.') | 328 logging.warn('Shallow fetch requested, but repo cache already exists.') |
| 215 fetch_cmd = ['fetch'] + v + ['origin'] | 329 fetch_cmd = ['fetch'] + v + ['origin'] |
| 216 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=repo_dir, retry=True) | 330 RunGit(fetch_cmd, filter_fn=filter_fn, cwd=repo_dir, retry=True) |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 256 lf = Lockfile(repo_dir) | 370 lf = Lockfile(repo_dir) |
| 257 config_lock = os.path.join(repo_dir, 'config.lock') | 371 config_lock = os.path.join(repo_dir, 'config.lock') |
| 258 unlocked = False | 372 unlocked = False |
| 259 if os.path.exists(config_lock): | 373 if os.path.exists(config_lock): |
| 260 os.remove(config_lock) | 374 os.remove(config_lock) |
| 261 unlocked = True | 375 unlocked = True |
| 262 if lf.break_lock(): | 376 if lf.break_lock(): |
| 263 unlocked = True | 377 unlocked = True |
| 264 | 378 |
| 265 if unlocked: | 379 if unlocked: |
| 266 unlocked.append(repo_dir) | 380 unlocked.append(repo_dir) |
| 267 else: | 381 else: |
| 268 untouched.append(repo_dir) | 382 untouched.append(repo_dir) |
| 269 | 383 |
| 270 if unlocked: | 384 if unlocked: |
| 271 logging.info('Broke locks on these caches: %s' % unlocked) | 385 logging.info('Broke locks on these caches: %s' % unlocked) |
| 272 if untouched: | 386 if untouched: |
| 273 logging.debug('Did not touch these caches: %s' % untouched) | 387 logging.debug('Did not touch these caches: %s' % untouched) |
| 274 | 388 |
| 275 | 389 |
| 276 class OptionParser(optparse.OptionParser): | 390 class OptionParser(optparse.OptionParser): |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 308 return options, args | 422 return options, args |
| 309 | 423 |
| 310 | 424 |
| 311 def main(argv): | 425 def main(argv): |
| 312 dispatcher = subcommand.CommandDispatcher(__name__) | 426 dispatcher = subcommand.CommandDispatcher(__name__) |
| 313 return dispatcher.execute(OptionParser(), argv) | 427 return dispatcher.execute(OptionParser(), argv) |
| 314 | 428 |
| 315 | 429 |
| 316 if __name__ == '__main__': | 430 if __name__ == '__main__': |
| 317 sys.exit(main(sys.argv[1:])) | 431 sys.exit(main(sys.argv[1:])) |
| OLD | NEW |