Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """A git command for managing a local cache of git repositories.""" | 6 """A git command for managing a local cache of git repositories.""" |
| 7 | 7 |
| 8 from __future__ import print_function | 8 from __future__ import print_function |
| 9 import errno | 9 import errno |
| 10 import logging | 10 import logging |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 140 | 140 |
| 141 class Mirror(object): | 141 class Mirror(object): |
| 142 | 142 |
| 143 git_exe = 'git.bat' if sys.platform.startswith('win') else 'git' | 143 git_exe = 'git.bat' if sys.platform.startswith('win') else 'git' |
| 144 gsutil_exe = os.path.join( | 144 gsutil_exe = os.path.join( |
| 145 os.path.dirname(os.path.abspath(__file__)), 'gsutil.py') | 145 os.path.dirname(os.path.abspath(__file__)), 'gsutil.py') |
| 146 cachepath_lock = threading.Lock() | 146 cachepath_lock = threading.Lock() |
| 147 | 147 |
| 148 def __init__(self, url, refs=None, print_func=None): | 148 def __init__(self, url, refs=None, print_func=None): |
| 149 self.url = url | 149 self.url = url |
| 150 self.refs = refs or [] | 150 self.refs = set([ref.lstrip('+').rstrip('/') for ref in (refs or [])]) |
|
iannucci
2015/09/22 20:35:26
can we have a 'parse_fetch_spec' function so the n
szager1
2015/09/22 22:48:27
Done.
| |
| 151 self.basedir = self.UrlToCacheDir(url) | 151 self.basedir = self.UrlToCacheDir(url) |
| 152 self.mirror_path = os.path.join(self.GetCachePath(), self.basedir) | 152 self.mirror_path = os.path.join(self.GetCachePath(), self.basedir) |
| 153 if print_func: | 153 if print_func: |
| 154 self.print = self.print_without_file | 154 self.print = self.print_without_file |
| 155 self.print_func = print_func | 155 self.print_func = print_func |
| 156 else: | 156 else: |
| 157 self.print = print | 157 self.print = print |
| 158 | 158 |
| 159 def print_without_file(self, message, **kwargs): | 159 def print_without_file(self, message, **kwargs): |
| 160 self.print_func(message) | 160 self.print_func(message) |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 230 | 230 |
| 231 # Allocate more RAM for cache-ing delta chains, for better performance | 231 # Allocate more RAM for cache-ing delta chains, for better performance |
| 232 # of "Resolving deltas". | 232 # of "Resolving deltas". |
| 233 self.RunGit(['config', 'core.deltaBaseCacheLimit', | 233 self.RunGit(['config', 'core.deltaBaseCacheLimit', |
| 234 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd) | 234 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd) |
| 235 | 235 |
| 236 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd) | 236 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd) |
| 237 self.RunGit(['config', '--replace-all', 'remote.origin.fetch', | 237 self.RunGit(['config', '--replace-all', 'remote.origin.fetch', |
| 238 '+refs/heads/*:refs/heads/*', r'\+refs/heads/\*:.*'], cwd=cwd) | 238 '+refs/heads/*:refs/heads/*', r'\+refs/heads/\*:.*'], cwd=cwd) |
| 239 for ref in self.refs: | 239 for ref in self.refs: |
| 240 ref = ref.lstrip('+').rstrip('/') | |
| 241 if ref.startswith('refs/'): | 240 if ref.startswith('refs/'): |
| 242 refspec = '+%s:%s' % (ref, ref) | 241 refspec = '+%s:%s' % (ref, ref) |
| 243 regex = r'\+%s:.*' % ref.replace('*', r'\*') | 242 regex = r'\+%s:.*' % ref.replace('*', r'\*') |
| 244 else: | 243 else: |
| 245 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) | 244 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) |
| 246 regex = r'\+refs/heads/%s:.*' % ref.replace('*', r'\*') | 245 regex = r'\+refs/heads/%s:.*' % ref.replace('*', r'\*') |
| 247 self.RunGit( | 246 self.RunGit( |
| 248 ['config', '--replace-all', 'remote.origin.fetch', refspec, regex], | 247 ['config', '--replace-all', 'remote.origin.fetch', refspec, regex], |
| 249 cwd=cwd) | 248 cwd=cwd) |
| 250 | 249 |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 307 if retcode: | 306 if retcode: |
| 308 self.print( | 307 self.print( |
| 309 'Extracting bootstrap zipfile %s failed.\n' | 308 'Extracting bootstrap zipfile %s failed.\n' |
| 310 'Resuming normal operations.' % filename) | 309 'Resuming normal operations.' % filename) |
| 311 return False | 310 return False |
| 312 return True | 311 return True |
| 313 | 312 |
| 314 def exists(self): | 313 def exists(self): |
| 315 return os.path.isfile(os.path.join(self.mirror_path, 'config')) | 314 return os.path.isfile(os.path.join(self.mirror_path, 'config')) |
| 316 | 315 |
| 316 def _preserve_fetchspec(self): | |
| 317 """Read and preserve remote.origin.fetch from an existing mirror.""" | |
| 318 if not self.exists(): | |
| 319 return | |
| 320 try: | |
| 321 config_fetchspecs = subprocess.check_output( | |
| 322 [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'], | |
| 323 cwd=self.mirror_path) | |
| 324 for fetchspec in config_fetchspecs.splitlines(): | |
| 325 self.refs.add(fetchspec.split(':')[0].lstrip('+').rstrip('/')) | |
|
iannucci
2015/09/22 20:35:26
and here too
szager1
2015/09/22 22:48:27
Done.
| |
| 326 except subprocess.CalledProcessError: | |
| 327 pass | |
| 328 | |
| 317 def _ensure_bootstrapped(self, depth, bootstrap, force=False): | 329 def _ensure_bootstrapped(self, depth, bootstrap, force=False): |
| 318 tempdir = None | 330 tempdir = None |
| 319 config_file = os.path.join(self.mirror_path, 'config') | |
| 320 pack_dir = os.path.join(self.mirror_path, 'objects', 'pack') | 331 pack_dir = os.path.join(self.mirror_path, 'objects', 'pack') |
| 321 pack_files = [] | 332 pack_files = [] |
| 322 | 333 |
| 323 if os.path.isdir(pack_dir): | 334 if os.path.isdir(pack_dir): |
| 324 pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')] | 335 pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')] |
| 325 | 336 |
| 326 should_bootstrap = (force or | 337 should_bootstrap = (force or |
| 327 not os.path.exists(config_file) or | 338 not self.exists() or |
| 328 len(pack_files) > GC_AUTOPACKLIMIT) | 339 len(pack_files) > GC_AUTOPACKLIMIT) |
| 329 if should_bootstrap: | 340 if should_bootstrap: |
| 341 if self.exists(): | |
| 342 # Re-bootstrapping an existing mirror; preserve existing fetch spec. | |
| 343 self._preserve_fetchspec() | |
| 330 tempdir = tempfile.mkdtemp( | 344 tempdir = tempfile.mkdtemp( |
| 331 prefix='_cache_tmp', suffix=self.basedir, dir=self.GetCachePath()) | 345 prefix='_cache_tmp', suffix=self.basedir, dir=self.GetCachePath()) |
| 332 bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir) | 346 bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir) |
| 333 if bootstrapped: | 347 if bootstrapped: |
| 334 # Bootstrap succeeded; delete previous cache, if any. | 348 # Bootstrap succeeded; delete previous cache, if any. |
| 335 gclient_utils.rmtree(self.mirror_path) | 349 gclient_utils.rmtree(self.mirror_path) |
| 336 elif not os.path.exists(config_file): | 350 elif not self.exists(): |
| 337 # Bootstrap failed, no previous cache; start with a bare git dir. | 351 # Bootstrap failed, no previous cache; start with a bare git dir. |
| 338 self.RunGit(['init', '--bare'], cwd=tempdir) | 352 self.RunGit(['init', '--bare'], cwd=tempdir) |
| 339 else: | 353 else: |
| 340 # Bootstrap failed, previous cache exists; warn and continue. | 354 # Bootstrap failed, previous cache exists; warn and continue. |
| 341 logging.warn( | 355 logging.warn( |
| 342 'Git cache has a lot of pack files (%d). Tried to re-bootstrap ' | 356 'Git cache has a lot of pack files (%d). Tried to re-bootstrap ' |
| 343 'but failed. Continuing with non-optimized repository.' | 357 'but failed. Continuing with non-optimized repository.' |
| 344 % len(pack_files)) | 358 % len(pack_files)) |
| 345 gclient_utils.rmtree(tempdir) | 359 gclient_utils.rmtree(tempdir) |
| 346 tempdir = None | 360 tempdir = None |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 556 } | 570 } |
| 557 if options.depth: | 571 if options.depth: |
| 558 kwargs['depth'] = options.depth | 572 kwargs['depth'] = options.depth |
| 559 mirror.populate(**kwargs) | 573 mirror.populate(**kwargs) |
| 560 | 574 |
| 561 | 575 |
| 562 @subcommand.usage('Fetch new commits into cache and current checkout') | 576 @subcommand.usage('Fetch new commits into cache and current checkout') |
| 563 def CMDfetch(parser, args): | 577 def CMDfetch(parser, args): |
| 564 """Update mirror, and fetch in cwd.""" | 578 """Update mirror, and fetch in cwd.""" |
| 565 parser.add_option('--all', action='store_true', help='Fetch all remotes') | 579 parser.add_option('--all', action='store_true', help='Fetch all remotes') |
| 580 parser.add_option('--no_bootstrap', '--no-bootstrap', | |
| 581 action='store_true', | |
| 582 help='Don\'t (re)bootstrap from Google Storage') | |
| 566 options, args = parser.parse_args(args) | 583 options, args = parser.parse_args(args) |
| 567 | 584 |
| 568 # Figure out which remotes to fetch. This mimics the behavior of regular | 585 # Figure out which remotes to fetch. This mimics the behavior of regular |
| 569 # 'git fetch'. Note that in the case of "stacked" or "pipelined" branches, | 586 # 'git fetch'. Note that in the case of "stacked" or "pipelined" branches, |
| 570 # this will NOT try to traverse up the branching structure to find the | 587 # this will NOT try to traverse up the branching structure to find the |
| 571 # ultimate remote to update. | 588 # ultimate remote to update. |
| 572 remotes = [] | 589 remotes = [] |
| 573 if options.all: | 590 if options.all: |
| 574 assert not args, 'fatal: fetch --all does not take a repository argument' | 591 assert not args, 'fatal: fetch --all does not take a repository argument' |
| 575 remotes = subprocess.check_output([Mirror.git_exe, 'remote']).splitlines() | 592 remotes = subprocess.check_output([Mirror.git_exe, 'remote']).splitlines() |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 586 remotes = [upstream] | 603 remotes = [upstream] |
| 587 if not remotes: | 604 if not remotes: |
| 588 remotes = ['origin'] | 605 remotes = ['origin'] |
| 589 | 606 |
| 590 cachepath = Mirror.GetCachePath() | 607 cachepath = Mirror.GetCachePath() |
| 591 git_dir = os.path.abspath(subprocess.check_output( | 608 git_dir = os.path.abspath(subprocess.check_output( |
| 592 [Mirror.git_exe, 'rev-parse', '--git-dir'])) | 609 [Mirror.git_exe, 'rev-parse', '--git-dir'])) |
| 593 git_dir = os.path.abspath(git_dir) | 610 git_dir = os.path.abspath(git_dir) |
| 594 if git_dir.startswith(cachepath): | 611 if git_dir.startswith(cachepath): |
| 595 mirror = Mirror.FromPath(git_dir) | 612 mirror = Mirror.FromPath(git_dir) |
| 596 mirror.populate() | 613 mirror.populate(bootstrap=not options.no_bootstrap) |
| 597 return 0 | 614 return 0 |
| 598 for remote in remotes: | 615 for remote in remotes: |
| 599 remote_url = subprocess.check_output( | 616 remote_url = subprocess.check_output( |
| 600 [Mirror.git_exe, 'config', 'remote.%s.url' % remote]).strip() | 617 [Mirror.git_exe, 'config', 'remote.%s.url' % remote]).strip() |
| 601 if remote_url.startswith(cachepath): | 618 if remote_url.startswith(cachepath): |
| 602 mirror = Mirror.FromPath(remote_url) | 619 mirror = Mirror.FromPath(remote_url) |
| 603 mirror.print = lambda *args: None | 620 mirror.print = lambda *args: None |
| 604 print('Updating git cache...') | 621 print('Updating git cache...') |
| 605 mirror.populate() | 622 mirror.populate(bootstrap=not options.no_bootstrap) |
| 606 subprocess.check_call([Mirror.git_exe, 'fetch', remote]) | 623 subprocess.check_call([Mirror.git_exe, 'fetch', remote]) |
| 607 return 0 | 624 return 0 |
| 608 | 625 |
| 609 | 626 |
| 610 @subcommand.usage('[url of repo to unlock, or -a|--all]') | 627 @subcommand.usage('[url of repo to unlock, or -a|--all]') |
| 611 def CMDunlock(parser, args): | 628 def CMDunlock(parser, args): |
| 612 """Unlock one or all repos if their lock files are still around.""" | 629 """Unlock one or all repos if their lock files are still around.""" |
| 613 parser.add_option('--force', '-f', action='store_true', | 630 parser.add_option('--force', '-f', action='store_true', |
| 614 help='Actually perform the action') | 631 help='Actually perform the action') |
| 615 parser.add_option('--all', '-a', action='store_true', | 632 parser.add_option('--all', '-a', action='store_true', |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 678 dispatcher = subcommand.CommandDispatcher(__name__) | 695 dispatcher = subcommand.CommandDispatcher(__name__) |
| 679 return dispatcher.execute(OptionParser(), argv) | 696 return dispatcher.execute(OptionParser(), argv) |
| 680 | 697 |
| 681 | 698 |
| 682 if __name__ == '__main__': | 699 if __name__ == '__main__': |
| 683 try: | 700 try: |
| 684 sys.exit(main(sys.argv[1:])) | 701 sys.exit(main(sys.argv[1:])) |
| 685 except KeyboardInterrupt: | 702 except KeyboardInterrupt: |
| 686 sys.stderr.write('interrupted\n') | 703 sys.stderr.write('interrupted\n') |
| 687 sys.exit(1) | 704 sys.exit(1) |
| OLD | NEW |