| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """A git command for managing a local cache of git repositories.""" | 6 """A git command for managing a local cache of git repositories.""" |
| 7 | 7 |
| 8 from __future__ import print_function | 8 from __future__ import print_function |
| 9 import errno | 9 import errno |
| 10 import logging | 10 import logging |
| 11 import optparse | 11 import optparse |
| 12 import os | 12 import os |
| 13 import re | 13 import re |
| 14 import tempfile | 14 import tempfile |
| 15 import time | 15 import time |
| 16 import subprocess | 16 import subprocess |
| 17 import sys | 17 import sys |
| 18 import urlparse | 18 import urlparse |
| 19 import zipfile | 19 import zipfile |
| 20 | 20 |
| 21 from download_from_google_storage import Gsutil | 21 from download_from_google_storage import Gsutil |
| 22 import gclient_utils | 22 import gclient_utils |
| 23 import subcommand | 23 import subcommand |
| 24 | 24 |
| 25 # Analogous to gc.autopacklimit git config. |
| 26 GC_AUTOPACKLIMIT = 50 |
| 27 |
| 25 try: | 28 try: |
| 26 # pylint: disable=E0602 | 29 # pylint: disable=E0602 |
| 27 WinErr = WindowsError | 30 WinErr = WindowsError |
| 28 except NameError: | 31 except NameError: |
| 29 class WinErr(Exception): | 32 class WinErr(Exception): |
| 30 pass | 33 pass |
| 31 | 34 |
| 32 class LockError(Exception): | 35 class LockError(Exception): |
| 33 pass | 36 pass |
| 34 | 37 |
| (...skipping 184 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 219 kwargs.setdefault('filter_fn', self.print) | 222 kwargs.setdefault('filter_fn', self.print) |
| 220 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy()) | 223 env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy()) |
| 221 env.setdefault('GIT_ASKPASS', 'true') | 224 env.setdefault('GIT_ASKPASS', 'true') |
| 222 env.setdefault('SSH_ASKPASS', 'true') | 225 env.setdefault('SSH_ASKPASS', 'true') |
| 223 self.print('running "git %s" in "%s"' % (' '.join(cmd), cwd)) | 226 self.print('running "git %s" in "%s"' % (' '.join(cmd), cwd)) |
| 224 gclient_utils.CheckCallAndFilter([self.git_exe] + cmd, **kwargs) | 227 gclient_utils.CheckCallAndFilter([self.git_exe] + cmd, **kwargs) |
| 225 | 228 |
| 226 def config(self, cwd=None): | 229 def config(self, cwd=None): |
| 227 if cwd is None: | 230 if cwd is None: |
| 228 cwd = self.mirror_path | 231 cwd = self.mirror_path |
| 232 |
| 233 # Don't run git-gc in a daemon. Bad things can happen if it gets killed. |
| 234 self.RunGit(['config', 'gc.autodetach', '0'], cwd=cwd) |
| 235 |
| 236 # Don't combine pack files into one big pack file. It's really slow for |
| 237 # repositories, and there's no way to track progress and make sure it's |
| 238 # not stuck. |
| 239 self.RunGit(['config', 'gc.autopacklimit', '0'], cwd=cwd) |
| 240 |
| 241 # Allocate more RAM for cache-ing delta chains, for better performance |
| 242 # of "Resolving deltas". |
| 229 self.RunGit(['config', 'core.deltaBaseCacheLimit', | 243 self.RunGit(['config', 'core.deltaBaseCacheLimit', |
| 230 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd) | 244 gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd) |
| 245 |
| 231 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd) | 246 self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd) |
| 232 self.RunGit(['config', '--replace-all', 'remote.origin.fetch', | 247 self.RunGit(['config', '--replace-all', 'remote.origin.fetch', |
| 233 '+refs/heads/*:refs/heads/*'], cwd=cwd) | 248 '+refs/heads/*:refs/heads/*'], cwd=cwd) |
| 234 for ref in self.refs: | 249 for ref in self.refs: |
| 235 ref = ref.lstrip('+').rstrip('/') | 250 ref = ref.lstrip('+').rstrip('/') |
| 236 if ref.startswith('refs/'): | 251 if ref.startswith('refs/'): |
| 237 refspec = '+%s:%s' % (ref, ref) | 252 refspec = '+%s:%s' % (ref, ref) |
| 238 else: | 253 else: |
| 239 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) | 254 refspec = '+refs/%s/*:refs/%s/*' % (ref, ref) |
| 240 self.RunGit(['config', '--add', 'remote.origin.fetch', refspec], cwd=cwd) | 255 self.RunGit(['config', '--add', 'remote.origin.fetch', refspec], cwd=cwd) |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 312 v = ['-v', '--progress'] | 327 v = ['-v', '--progress'] |
| 313 | 328 |
| 314 d = [] | 329 d = [] |
| 315 if depth: | 330 if depth: |
| 316 d = ['--depth', str(depth)] | 331 d = ['--depth', str(depth)] |
| 317 | 332 |
| 318 | 333 |
| 319 with Lockfile(self.mirror_path): | 334 with Lockfile(self.mirror_path): |
| 320 # Setup from scratch if the repo is new or is in a bad state. | 335 # Setup from scratch if the repo is new or is in a bad state. |
| 321 tempdir = None | 336 tempdir = None |
| 322 if not os.path.exists(os.path.join(self.mirror_path, 'config')): | 337 config_file = os.path.join(self.mirror_path, 'config') |
| 323 gclient_utils.rmtree(self.mirror_path) | 338 pack_dir = os.path.join(self.mirror_path, 'objects', 'pack') |
| 339 pack_files = [] |
| 340 if os.path.isdir(pack_dir): |
| 341 pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')] |
| 342 |
| 343 should_bootstrap = (not os.path.exists(config_file) or |
| 344 len(pack_files) > GC_AUTOPACKLIMIT) |
| 345 if should_bootstrap: |
| 324 tempdir = tempfile.mkdtemp( | 346 tempdir = tempfile.mkdtemp( |
| 325 suffix=self.basedir, dir=self.GetCachePath()) | 347 suffix=self.basedir, dir=self.GetCachePath()) |
| 326 bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir) | 348 bootstrapped = not depth and bootstrap and self.bootstrap_repo(tempdir) |
| 327 if not bootstrapped: | 349 if bootstrapped: |
| 350 # Bootstrap succeeded; delete previous cache, if any. |
| 351 gclient_utils.rmtree(self.mirror_path) |
| 352 elif not os.path.exists(config_file): |
| 353 # Bootstrap failed, no previous cache; start with a bare git dir. |
| 328 self.RunGit(['init', '--bare'], cwd=tempdir) | 354 self.RunGit(['init', '--bare'], cwd=tempdir) |
| 355 else: |
| 356 # Bootstrap failed, previous cache exists; warn and continue. |
| 357 logging.warn( |
| 358 'Git cache has a lot of pack files (%d). Tried to re-bootstrap ' |
| 359 'but failed. Continuing with non-optimized repository.' |
| 360 % len(pack_files)) |
| 361 gclient_utils.rmtree(tempdir) |
| 362 tempdir = None |
| 329 else: | 363 else: |
| 330 if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')): | 364 if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')): |
| 331 logging.warn( | 365 logging.warn( |
| 332 'Shallow fetch requested, but repo cache already exists.') | 366 'Shallow fetch requested, but repo cache already exists.') |
| 333 d = [] | 367 d = [] |
| 334 | 368 |
| 335 rundir = tempdir or self.mirror_path | 369 rundir = tempdir or self.mirror_path |
| 336 self.config(rundir) | 370 self.config(rundir) |
| 337 fetch_cmd = ['fetch'] + v + d + ['origin'] | 371 fetch_cmd = ['fetch'] + v + d + ['origin'] |
| 338 fetch_specs = subprocess.check_output( | 372 fetch_specs = subprocess.check_output( |
| (...skipping 200 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 539 return options, args | 573 return options, args |
| 540 | 574 |
| 541 | 575 |
| 542 def main(argv): | 576 def main(argv): |
| 543 dispatcher = subcommand.CommandDispatcher(__name__) | 577 dispatcher = subcommand.CommandDispatcher(__name__) |
| 544 return dispatcher.execute(OptionParser(), argv) | 578 return dispatcher.execute(OptionParser(), argv) |
| 545 | 579 |
| 546 | 580 |
| 547 if __name__ == '__main__': | 581 if __name__ == '__main__': |
| 548 sys.exit(main(sys.argv[1:])) | 582 sys.exit(main(sys.argv[1:])) |
| OLD | NEW |