Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 """Gclient-specific SCM-specific operations.""" | 5 """Gclient-specific SCM-specific operations.""" |
| 6 | 6 |
| 7 import collections | 7 import collections |
| 8 import logging | 8 import logging |
| 9 import os | 9 import os |
| 10 import posixpath | 10 import posixpath |
| (...skipping 380 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 391 url != 'git://foo' and | 391 url != 'git://foo' and |
| 392 subprocess2.capture( | 392 subprocess2.capture( |
| 393 ['git', 'config', 'remote.origin.gclient-auto-fix-url'], | 393 ['git', 'config', 'remote.origin.gclient-auto-fix-url'], |
| 394 cwd=self.checkout_path).strip() != 'False'): | 394 cwd=self.checkout_path).strip() != 'False'): |
| 395 print('_____ switching %s to a new upstream' % self.relpath) | 395 print('_____ switching %s to a new upstream' % self.relpath) |
| 396 # Make sure it's clean | 396 # Make sure it's clean |
| 397 self._CheckClean(rev_str) | 397 self._CheckClean(rev_str) |
| 398 # Switch over to the new upstream | 398 # Switch over to the new upstream |
| 399 self._Run(['remote', 'set-url', 'origin', url], options) | 399 self._Run(['remote', 'set-url', 'origin', url], options) |
| 400 self._FetchAndReset(revision, file_list, options) | 400 self._FetchAndReset(revision, file_list, options) |
| 401 | |
| 402 # If we switched from a cached git repo to an uncached one or vice versa, | |
| 403 # set up the objects link and pack appropriately. | |
| 404 altfile = os.path.join( | |
| 405 self.checkout_path, '.git', 'objects', 'info', 'alternates') | |
| 406 if self.cache_dir: | |
| 407 with open(altfile, 'wa') as f: | |
| 408 f.write(os.path.join(url, 'objects')) | |
| 409 self._Run(['repack', '-ad'], options) | |
| 410 self._Run(['repack', '-adl'], options) | |
| 411 elif os.path.exists(altfile): | |
| 412 self._Run(['repack', '-a'], options) | |
| 413 os.remove(altfile) | |
|
iannucci
2013/07/12 23:32:10
Actually, we should do this in the non-switch case
| |
| 401 return | 414 return |
| 402 | 415 |
| 403 if not self._IsValidGitRepo(): | 416 if not self._IsValidGitRepo(): |
| 404 # .git directory is hosed for some reason, set it back up. | 417 # .git directory is hosed for some reason, set it back up. |
| 405 print('_____ %s/.git is corrupted, rebuilding' % self.relpath) | 418 print('_____ %s/.git is corrupted, rebuilding' % self.relpath) |
| 406 self._Run(['init'], options) | 419 self._Run(['init'], options) |
| 407 self._Run(['remote', 'set-url', 'origin', url], options) | 420 self._Run(['remote', 'set-url', 'origin', url], options) |
| 408 | 421 |
| 409 if not self._HasHead(): | 422 if not self._HasHead(): |
| 410 # Previous checkout was aborted before branches could be created in repo, | 423 # Previous checkout was aborted before branches could be created in repo, |
| (...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 746 | 759 |
| 747 # Replace - with -- to avoid ambiguity. / with - to flatten folder structure | 760 # Replace - with -- to avoid ambiguity. / with - to flatten folder structure |
| 748 folder = os.path.join( | 761 folder = os.path.join( |
| 749 self.cache_dir, | 762 self.cache_dir, |
| 750 self._NormalizeGitURL(url).replace('-', '--').replace('/', '-')) | 763 self._NormalizeGitURL(url).replace('-', '--').replace('/', '-')) |
| 751 | 764 |
| 752 v = ['-v'] if options.verbose else [] | 765 v = ['-v'] if options.verbose else [] |
| 753 filter_fn = lambda l: '[up to date]' not in l | 766 filter_fn = lambda l: '[up to date]' not in l |
| 754 with self.cache_locks[folder]: | 767 with self.cache_locks[folder]: |
| 755 gclient_utils.safe_makedirs(self.cache_dir) | 768 gclient_utils.safe_makedirs(self.cache_dir) |
| 769 do_fetch = True | |
| 756 if not os.path.exists(os.path.join(folder, 'config')): | 770 if not os.path.exists(os.path.join(folder, 'config')): |
| 771 do_fetch = False | |
| 757 gclient_utils.rmtree(folder) | 772 gclient_utils.rmtree(folder) |
| 758 self._Run(['clone'] + v + ['-c', 'core.deltaBaseCacheLimit=2g', | 773 cmd = ['clone'] + v + ['-c', 'core.deltaBaseCacheLimit=2g', |
| 759 '--progress', '--mirror', url, folder], | 774 '--progress', '--mirror'] |
| 775 if os.path.exists(self.checkout_path): | |
| 776 cmd += ['--reference', os.path.abspath(self.checkout_path)] | |
| 777 self._Run(cmd + [url, folder], | |
| 760 options, git_filter=True, filter_fn=filter_fn, | 778 options, git_filter=True, filter_fn=filter_fn, |
| 761 cwd=self.cache_dir) | 779 cwd=self.cache_dir) |
| 762 else: | 780 |
| 781 # If the clone has an object dependency on the existing repo, break it | |
| 782 # with repack and remove the linkage. | |
| 783 cache_alt_file = os.path.join(folder, 'objects', 'info', 'alternates') | |
| 784 if os.path.exists(cache_alt_file): | |
| 785 self._Run(['repack', '-a'], options, cwd=folder) | |
| 786 os.remove(cache_alt_file) | |
| 787 | |
| 788 if do_fetch: | |
| 763 # For now, assert that host/path/to/repo.git is identical. We may want | 789 # For now, assert that host/path/to/repo.git is identical. We may want |
| 764 # to relax this restriction in the future to allow for smarter cache | 790 # to relax this restriction in the future to allow for smarter cache |
| 765 # repo update schemes (such as pulling the same repo, but from a | 791 # repo update schemes (such as pulling the same repo, but from a |
| 766 # different host). | 792 # different host). |
| 767 existing_url = self._Capture(['config', 'remote.origin.url'], | 793 existing_url = self._Capture(['config', 'remote.origin.url'], |
| 768 cwd=folder) | 794 cwd=folder) |
| 769 assert self._NormalizeGitURL(existing_url) == self._NormalizeGitURL(url) | 795 assert self._NormalizeGitURL(existing_url) == self._NormalizeGitURL(url) |
| 770 | 796 |
| 771 # Would normally use `git remote update`, but it doesn't support | 797 # Would normally use `git remote update`, but it doesn't support |
| 772 # --progress, so use fetch instead. | 798 # --progress, so use fetch instead. |
| 773 self._Run(['fetch'] + v + ['--multiple', '--progress', '--all'], | 799 self._Run(['fetch'] + v + ['--multiple', '--progress', '--all'], |
| 774 options, git_filter=True, filter_fn=filter_fn, cwd=folder) | 800 options, git_filter=True, filter_fn=filter_fn, cwd=folder) |
| 775 return folder | 801 return folder |
| 776 | 802 |
| 777 def _Clone(self, revision, url, options): | 803 def _Clone(self, revision, url, options): |
| 778 """Clone a git repository from the given URL. | 804 """Clone a git repository from the given URL. |
| (...skipping 646 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1425 new_command.append('--force') | 1451 new_command.append('--force') |
| 1426 if command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: | 1452 if command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: |
| 1427 new_command.extend(('--accept', 'theirs-conflict')) | 1453 new_command.extend(('--accept', 'theirs-conflict')) |
| 1428 elif options.manually_grab_svn_rev: | 1454 elif options.manually_grab_svn_rev: |
| 1429 new_command.append('--force') | 1455 new_command.append('--force') |
| 1430 if command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: | 1456 if command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: |
| 1431 new_command.extend(('--accept', 'postpone')) | 1457 new_command.extend(('--accept', 'postpone')) |
| 1432 elif command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: | 1458 elif command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: |
| 1433 new_command.extend(('--accept', 'postpone')) | 1459 new_command.extend(('--accept', 'postpone')) |
| 1434 return new_command | 1460 return new_command |
| OLD | NEW |