Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """A git command for managing a local cache of git repositories.""" | 6 """A git command for managing a local cache of git repositories.""" |
| 7 | 7 |
| 8 from __future__ import print_function | 8 from __future__ import print_function |
| 9 import errno | 9 import errno |
| 10 import logging | 10 import logging |
| (...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 138 return self.is_locked() and self.pid == self._read_pid() | 138 return self.is_locked() and self.pid == self._read_pid() |
| 139 | 139 |
| 140 | 140 |
| 141 class Mirror(object): | 141 class Mirror(object): |
| 142 | 142 |
| 143 git_exe = 'git.bat' if sys.platform.startswith('win') else 'git' | 143 git_exe = 'git.bat' if sys.platform.startswith('win') else 'git' |
| 144 gsutil_exe = os.path.join( | 144 gsutil_exe = os.path.join( |
| 145 os.path.dirname(os.path.abspath(__file__)), 'gsutil.py') | 145 os.path.dirname(os.path.abspath(__file__)), 'gsutil.py') |
| 146 cachepath_lock = threading.Lock() | 146 cachepath_lock = threading.Lock() |
| 147 | 147 |
| 148 @staticmethod | |
| 149 def parse_fetch_spec(spec): | |
| 150 """Parses and canonicalizes a fetch spec. | |
| 151 | |
| 152 Returns (fetchspec, value_regex), where value_regex can be used | |
| 153 with 'git config --replace-all'. | |
| 154 """ | |
| 155 parts = spec.split(':', 1) | |
| 156 src = parts[0].lstrip('+').rstrip('/') | |
| 157 if not src.startswith('refs/'): | |
| 158 src = 'refs/heads/%s' % src | |
| 159 dest = parts[1].rstrip('/') if len(parts) > 1 else src | |
| 160 regex = r'\+%s:.*' % src.replace('*', r'\*') | |
| 161 return ('+%s:%s' % (src, dest), regex) | |
| 162 | |
| 148 def __init__(self, url, refs=None, print_func=None): | 163 def __init__(self, url, refs=None, print_func=None): |
| 149 self.url = url | 164 self.url = url |
| 150 self.refs = refs or [] | 165 self.fetch_specs = set([self.parse_fetch_spec(ref) for ref in (refs or [])]) |
| 151 self.basedir = self.UrlToCacheDir(url) | 166 self.basedir = self.UrlToCacheDir(url) |
| 152 self.mirror_path = os.path.join(self.GetCachePath(), self.basedir) | 167 self.mirror_path = os.path.join(self.GetCachePath(), self.basedir) |
| 153 if print_func: | 168 if print_func: |
| 154 self.print = self.print_without_file | 169 self.print = self.print_without_file |
| 155 self.print_func = print_func | 170 self.print_func = print_func |
| 156 else: | 171 else: |
| 157 self.print = print | 172 self.print = print |
| 158 | 173 |
| 159 def print_without_file(self, message, **kwargs): | 174 def print_without_file(self, message, **kwargs): |
| 160 self.print_func(message) | 175 self.print_func(message) |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 229 self.RunGit(['config', 'gc.autopacklimit', '0'], cwd=cwd) | 244 self.RunGit(['config', 'gc.autopacklimit', '0'], cwd=cwd) |
| 230 | 245 |
| 231 # Allocate more RAM for cache-ing delta chains, for better performance | 246 # Allocate more RAM for cache-ing delta chains, for better performance |
| 232 # of "Resolving deltas". | 247 # of "Resolving deltas". |
| 233 self.RunGit(['config', 'core.deltaBaseCacheLimit', | 248 self.RunGit(['config', 'core.deltaBaseCacheLimit', |
| 234 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd) | 249 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd) |
| 235 | 250 |
| 236 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd) | 251 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd) |
| 237 self.RunGit(['config', '--replace-all', 'remote.origin.fetch', | 252 self.RunGit(['config', '--replace-all', 'remote.origin.fetch', |
| 238 '+refs/heads/*:refs/heads/*', r'\+refs/heads/\*:.*'], cwd=cwd) | 253 '+refs/heads/*:refs/heads/*', r'\+refs/heads/\*:.*'], cwd=cwd) |
| 239 for ref in self.refs: | 254 for spec, value_regex in self.fetch_specs: |
| 240 ref = ref.lstrip('+').rstrip('/') | |
| 241 if ref.startswith('refs/'): | |
| 242 refspec = '+%s:%s' % (ref, ref) | |
| 243 regex = r'\+%s:.*' % ref.replace('*', r'\*') | |
| 244 else: | |
| 245 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) | |
| 246 regex = r'\+refs/heads/%s:.*' % ref.replace('*', r'\*') | |
| 247 self.RunGit( | 255 self.RunGit( |
| 248 ['config', '--replace-all', 'remote.origin.fetch', refspec, regex], | 256 ['config', '--replace-all', 'remote.origin.fetch', spec, value_regex], |
| 249 cwd=cwd) | 257 cwd=cwd) |
| 250 | 258 |
| 251 def bootstrap_repo(self, directory): | 259 def bootstrap_repo(self, directory): |
| 252 """Bootstrap the repo from Google Stroage if possible. | 260 """Bootstrap the repo from Google Stroage if possible. |
| 253 | 261 |
| 254 More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing(). | 262 More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing(). |
| 255 """ | 263 """ |
| 256 | 264 |
| 257 python_fallback = False | 265 python_fallback = False |
| 258 if (sys.platform.startswith('win') and | 266 if (sys.platform.startswith('win') and |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 307 if retcode: | 315 if retcode: |
| 308 self.print( | 316 self.print( |
| 309 'Extracting bootstrap zipfile %s failed.\n' | 317 'Extracting bootstrap zipfile %s failed.\n' |
| 310 'Resuming normal operations.' % filename) | 318 'Resuming normal operations.' % filename) |
| 311 return False | 319 return False |
| 312 return True | 320 return True |
| 313 | 321 |
| 314 def exists(self): | 322 def exists(self): |
| 315 return os.path.isfile(os.path.join(self.mirror_path, 'config')) | 323 return os.path.isfile(os.path.join(self.mirror_path, 'config')) |
| 316 | 324 |
| 325 def _preserve_fetchspec(self): | |
| 326 """Read and preserve remote.origin.fetch from an existing mirror.""" | |
|
iannucci
2015/09/22 22:56:57
I'd mention that this mutates self.fetch_specs as
szager1
2015/09/22 23:11:56
Done.
| |
| 327 if not self.exists(): | |
| 328 return | |
| 329 try: | |
| 330 config_fetchspecs = subprocess.check_output( | |
| 331 [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'], | |
| 332 cwd=self.mirror_path) | |
| 333 for fetchspec in config_fetchspecs.splitlines(): | |
| 334 self.fetch_specs.add(self.parse_fetch_spec(fetchspec)) | |
| 335 except subprocess.CalledProcessError: | |
| 336 pass | |
|
iannucci
2015/09/22 22:56:57
This /seems/ like it could cause data loss on an e
szager1
2015/09/22 23:11:56
Well, "data loss" has an interesting meaning here.
| |
| 337 | |
| 317 def _ensure_bootstrapped(self, depth, bootstrap, force=False): | 338 def _ensure_bootstrapped(self, depth, bootstrap, force=False): |
| 318 tempdir = None | 339 tempdir = None |
| 319 config_file = os.path.join(self.mirror_path, 'config') | |
| 320 pack_dir = os.path.join(self.mirror_path, 'objects', 'pack') | 340 pack_dir = os.path.join(self.mirror_path, 'objects', 'pack') |
| 321 pack_files = [] | 341 pack_files = [] |
| 322 | 342 |
| 323 if os.path.isdir(pack_dir): | 343 if os.path.isdir(pack_dir): |
| 324 pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')] | 344 pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')] |
| 325 | 345 |
| 326 should_bootstrap = (force or | 346 should_bootstrap = (force or |
| 327 not os.path.exists(config_file) or | 347 not self.exists() or |
| 328 len(pack_files) > GC_AUTOPACKLIMIT) | 348 len(pack_files) > GC_AUTOPACKLIMIT) |
| 329 if should_bootstrap: | 349 if should_bootstrap: |
| 350 if self.exists(): | |
| 351 # Re-bootstrapping an existing mirror; preserve existing fetch spec. | |
| 352 self._preserve_fetchspec() | |
| 330 tempdir = tempfile.mkdtemp( | 353 tempdir = tempfile.mkdtemp( |
| 331 prefix='_cache_tmp', suffix=self.basedir, dir=self.GetCachePath()) | 354 prefix='_cache_tmp', suffix=self.basedir, dir=self.GetCachePath()) |
| 332 bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir) | 355 bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir) |
| 333 if bootstrapped: | 356 if bootstrapped: |
| 334 # Bootstrap succeeded; delete previous cache, if any. | 357 # Bootstrap succeeded; delete previous cache, if any. |
| 335 gclient_utils.rmtree(self.mirror_path) | 358 gclient_utils.rmtree(self.mirror_path) |
| 336 elif not os.path.exists(config_file): | 359 elif not self.exists(): |
| 337 # Bootstrap failed, no previous cache; start with a bare git dir. | 360 # Bootstrap failed, no previous cache; start with a bare git dir. |
| 338 self.RunGit(['init', '--bare'], cwd=tempdir) | 361 self.RunGit(['init', '--bare'], cwd=tempdir) |
| 339 else: | 362 else: |
| 340 # Bootstrap failed, previous cache exists; warn and continue. | 363 # Bootstrap failed, previous cache exists; warn and continue. |
| 341 logging.warn( | 364 logging.warn( |
| 342 'Git cache has a lot of pack files (%d). Tried to re-bootstrap ' | 365 'Git cache has a lot of pack files (%d). Tried to re-bootstrap ' |
| 343 'but failed. Continuing with non-optimized repository.' | 366 'but failed. Continuing with non-optimized repository.' |
| 344 % len(pack_files)) | 367 % len(pack_files)) |
| 345 gclient_utils.rmtree(tempdir) | 368 gclient_utils.rmtree(tempdir) |
| 346 tempdir = None | 369 tempdir = None |
| (...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 556 } | 579 } |
| 557 if options.depth: | 580 if options.depth: |
| 558 kwargs['depth'] = options.depth | 581 kwargs['depth'] = options.depth |
| 559 mirror.populate(**kwargs) | 582 mirror.populate(**kwargs) |
| 560 | 583 |
| 561 | 584 |
| 562 @subcommand.usage('Fetch new commits into cache and current checkout') | 585 @subcommand.usage('Fetch new commits into cache and current checkout') |
| 563 def CMDfetch(parser, args): | 586 def CMDfetch(parser, args): |
| 564 """Update mirror, and fetch in cwd.""" | 587 """Update mirror, and fetch in cwd.""" |
| 565 parser.add_option('--all', action='store_true', help='Fetch all remotes') | 588 parser.add_option('--all', action='store_true', help='Fetch all remotes') |
| 589 parser.add_option('--no_bootstrap', '--no-bootstrap', | |
| 590 action='store_true', | |
| 591 help='Don\'t (re)bootstrap from Google Storage') | |
| 566 options, args = parser.parse_args(args) | 592 options, args = parser.parse_args(args) |
| 567 | 593 |
| 568 # Figure out which remotes to fetch. This mimics the behavior of regular | 594 # Figure out which remotes to fetch. This mimics the behavior of regular |
| 569 # 'git fetch'. Note that in the case of "stacked" or "pipelined" branches, | 595 # 'git fetch'. Note that in the case of "stacked" or "pipelined" branches, |
| 570 # this will NOT try to traverse up the branching structure to find the | 596 # this will NOT try to traverse up the branching structure to find the |
| 571 # ultimate remote to update. | 597 # ultimate remote to update. |
| 572 remotes = [] | 598 remotes = [] |
| 573 if options.all: | 599 if options.all: |
| 574 assert not args, 'fatal: fetch --all does not take a repository argument' | 600 assert not args, 'fatal: fetch --all does not take a repository argument' |
| 575 remotes = subprocess.check_output([Mirror.git_exe, 'remote']).splitlines() | 601 remotes = subprocess.check_output([Mirror.git_exe, 'remote']).splitlines() |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 586 remotes = [upstream] | 612 remotes = [upstream] |
| 587 if not remotes: | 613 if not remotes: |
| 588 remotes = ['origin'] | 614 remotes = ['origin'] |
| 589 | 615 |
| 590 cachepath = Mirror.GetCachePath() | 616 cachepath = Mirror.GetCachePath() |
| 591 git_dir = os.path.abspath(subprocess.check_output( | 617 git_dir = os.path.abspath(subprocess.check_output( |
| 592 [Mirror.git_exe, 'rev-parse', '--git-dir'])) | 618 [Mirror.git_exe, 'rev-parse', '--git-dir'])) |
| 593 git_dir = os.path.abspath(git_dir) | 619 git_dir = os.path.abspath(git_dir) |
| 594 if git_dir.startswith(cachepath): | 620 if git_dir.startswith(cachepath): |
| 595 mirror = Mirror.FromPath(git_dir) | 621 mirror = Mirror.FromPath(git_dir) |
| 596 mirror.populate() | 622 mirror.populate(bootstrap=not options.no_bootstrap) |
| 597 return 0 | 623 return 0 |
| 598 for remote in remotes: | 624 for remote in remotes: |
| 599 remote_url = subprocess.check_output( | 625 remote_url = subprocess.check_output( |
| 600 [Mirror.git_exe, 'config', 'remote.%s.url' % remote]).strip() | 626 [Mirror.git_exe, 'config', 'remote.%s.url' % remote]).strip() |
| 601 if remote_url.startswith(cachepath): | 627 if remote_url.startswith(cachepath): |
| 602 mirror = Mirror.FromPath(remote_url) | 628 mirror = Mirror.FromPath(remote_url) |
| 603 mirror.print = lambda *args: None | 629 mirror.print = lambda *args: None |
| 604 print('Updating git cache...') | 630 print('Updating git cache...') |
| 605 mirror.populate() | 631 mirror.populate(bootstrap=not options.no_bootstrap) |
| 606 subprocess.check_call([Mirror.git_exe, 'fetch', remote]) | 632 subprocess.check_call([Mirror.git_exe, 'fetch', remote]) |
| 607 return 0 | 633 return 0 |
| 608 | 634 |
| 609 | 635 |
| 610 @subcommand.usage('[url of repo to unlock, or -a|--all]') | 636 @subcommand.usage('[url of repo to unlock, or -a|--all]') |
| 611 def CMDunlock(parser, args): | 637 def CMDunlock(parser, args): |
| 612 """Unlock one or all repos if their lock files are still around.""" | 638 """Unlock one or all repos if their lock files are still around.""" |
| 613 parser.add_option('--force', '-f', action='store_true', | 639 parser.add_option('--force', '-f', action='store_true', |
| 614 help='Actually perform the action') | 640 help='Actually perform the action') |
| 615 parser.add_option('--all', '-a', action='store_true', | 641 parser.add_option('--all', '-a', action='store_true', |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 678 dispatcher = subcommand.CommandDispatcher(__name__) | 704 dispatcher = subcommand.CommandDispatcher(__name__) |
| 679 return dispatcher.execute(OptionParser(), argv) | 705 return dispatcher.execute(OptionParser(), argv) |
| 680 | 706 |
| 681 | 707 |
| 682 if __name__ == '__main__': | 708 if __name__ == '__main__': |
| 683 try: | 709 try: |
| 684 sys.exit(main(sys.argv[1:])) | 710 sys.exit(main(sys.argv[1:])) |
| 685 except KeyboardInterrupt: | 711 except KeyboardInterrupt: |
| 686 sys.stderr.write('interrupted\n') | 712 sys.stderr.write('interrupted\n') |
| 687 sys.exit(1) | 713 sys.exit(1) |
| OLD | NEW |