OLD | NEW |
---|---|
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 """Gclient-specific SCM-specific operations.""" | 5 """Gclient-specific SCM-specific operations.""" |
6 | 6 |
7 import collections | 7 import collections |
8 import logging | 8 import logging |
9 import os | 9 import os |
10 import posixpath | 10 import posixpath |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
215 result, version = scm.GIT.AssertVersion('1.7') | 215 result, version = scm.GIT.AssertVersion('1.7') |
216 if not result: | 216 if not result: |
217 raise gclient_utils.Error('Git version is older than 1.7: %s' % version) | 217 raise gclient_utils.Error('Git version is older than 1.7: %s' % version) |
218 return result | 218 return result |
219 except OSError: | 219 except OSError: |
220 return False | 220 return False |
221 | 221 |
222 def GetCheckoutRoot(self): | 222 def GetCheckoutRoot(self): |
223 return scm.GIT.GetCheckoutRoot(self.checkout_path) | 223 return scm.GIT.GetCheckoutRoot(self.checkout_path) |
224 | 224 |
225 def GetRemoteURL(self, options, cwd=None): | |
226 try: | |
227 return self._Capture(['config', 'remote.%s.url' % self.remote], | |
228 cwd=cwd or self.checkout_path).rstrip() | |
229 except (OSError, subprocess2.CalledProcessError): | |
230 return None | |
231 | |
225 def GetRevisionDate(self, _revision): | 232 def GetRevisionDate(self, _revision): |
226 """Returns the given revision's date in ISO-8601 format (which contains the | 233 """Returns the given revision's date in ISO-8601 format (which contains the |
227 time zone).""" | 234 time zone).""" |
228 # TODO(floitsch): get the time-stamp of the given revision and not just the | 235 # TODO(floitsch): get the time-stamp of the given revision and not just the |
229 # time-stamp of the currently checked out revision. | 236 # time-stamp of the currently checked out revision. |
230 return self._Capture(['log', '-n', '1', '--format=%ai']) | 237 return self._Capture(['log', '-n', '1', '--format=%ai']) |
231 | 238 |
232 @staticmethod | 239 @staticmethod |
233 def cleanup(options, args, file_list): | 240 def cleanup(options, args, file_list): |
234 """'Cleanup' the repo. | 241 """'Cleanup' the repo. |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
272 # Not a fatal error, or even very interesting in a non-git-submodule | 279 # Not a fatal error, or even very interesting in a non-git-submodule |
273 # world. So just keep it quiet. | 280 # world. So just keep it quiet. |
274 pass | 281 pass |
275 try: | 282 try: |
276 gclient_utils.CheckCallAndFilter(cmd3, **kwargs) | 283 gclient_utils.CheckCallAndFilter(cmd3, **kwargs) |
277 except subprocess2.CalledProcessError: | 284 except subprocess2.CalledProcessError: |
278 gclient_utils.CheckCallAndFilter(cmd3 + ['always'], **kwargs) | 285 gclient_utils.CheckCallAndFilter(cmd3 + ['always'], **kwargs) |
279 | 286 |
280 gclient_utils.CheckCallAndFilter(cmd4, **kwargs) | 287 gclient_utils.CheckCallAndFilter(cmd4, **kwargs) |
281 | 288 |
282 def _FetchAndReset(self, revision, file_list, options): | |
283 """Equivalent to git fetch; git reset.""" | |
284 quiet = [] | |
285 if not options.verbose: | |
286 quiet = ['--quiet'] | |
287 self._UpdateBranchHeads(options, fetch=False) | |
288 | |
289 fetch_cmd = [ | |
290 '-c', 'core.deltaBaseCacheLimit=2g', 'fetch', self.remote, '--prune'] | |
291 self._Run(fetch_cmd + quiet, options, retry=True) | |
292 self._Run(['reset', '--hard', revision] + quiet, options) | |
293 self.UpdateSubmoduleConfig() | |
294 if file_list is not None: | |
295 files = self._Capture(['ls-files']).splitlines() | |
296 file_list.extend([os.path.join(self.checkout_path, f) for f in files]) | |
297 | |
298 def update(self, options, args, file_list): | 289 def update(self, options, args, file_list): |
299 """Runs git to update or transparently checkout the working copy. | 290 """Runs git to update or transparently checkout the working copy. |
300 | 291 |
301 All updated files will be appended to file_list. | 292 All updated files will be appended to file_list. |
302 | 293 |
303 Raises: | 294 Raises: |
304 Error: if can't get URL for relative path. | 295 Error: if can't get URL for relative path. |
305 """ | 296 """ |
306 if args: | 297 if args: |
307 raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args)) | 298 raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args)) |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
374 return self._Capture(['rev-parse', '--verify', 'HEAD']) | 365 return self._Capture(['rev-parse', '--verify', 'HEAD']) |
375 | 366 |
376 if not os.path.exists(os.path.join(self.checkout_path, '.git')): | 367 if not os.path.exists(os.path.join(self.checkout_path, '.git')): |
377 raise gclient_utils.Error('\n____ %s%s\n' | 368 raise gclient_utils.Error('\n____ %s%s\n' |
378 '\tPath is not a git repo. No .git dir.\n' | 369 '\tPath is not a git repo. No .git dir.\n' |
379 '\tTo resolve:\n' | 370 '\tTo resolve:\n' |
380 '\t\trm -rf %s\n' | 371 '\t\trm -rf %s\n' |
381 '\tAnd run gclient sync again\n' | 372 '\tAnd run gclient sync again\n' |
382 % (self.relpath, rev_str, self.relpath)) | 373 % (self.relpath, rev_str, self.relpath)) |
383 | 374 |
384 # See if the url has changed (the unittests use git://foo for the url, let | |
385 # that through). | |
386 current_url = self._Capture(['config', 'remote.%s.url' % self.remote]) | |
387 return_early = False | |
388 # TODO(maruel): Delete url != 'git://foo' since it's just to make the | |
389 # unit test pass. (and update the comment above) | |
390 # Skip url auto-correction if remote.origin.gclient-auto-fix-url is set. | |
391 # This allows devs to use experimental repos which have a different url | |
392 # but whose branch(s) are the same as official repos. | |
393 if (current_url != url and | |
394 url != 'git://foo' and | |
395 subprocess2.capture( | |
396 ['git', 'config', 'remote.%s.gclient-auto-fix-url' % self.remote], | |
397 cwd=self.checkout_path).strip() != 'False'): | |
398 print('_____ switching %s to a new upstream' % self.relpath) | |
399 # Make sure it's clean | |
400 self._CheckClean(rev_str) | |
401 # Switch over to the new upstream | |
402 self._Run(['remote', 'set-url', self.remote, url], options) | |
403 self._FetchAndReset(revision, file_list, options) | |
404 return_early = True | |
405 | |
406 # Need to do this in the normal path as well as in the post-remote-switch | 375 # Need to do this in the normal path as well as in the post-remote-switch |
407 # path. | 376 # path. |
408 self._PossiblySwitchCache(url, options) | 377 self._PossiblySwitchCache(url, options) |
409 | 378 |
410 if return_early: | |
411 return self._Capture(['rev-parse', '--verify', 'HEAD']) | |
412 | |
413 cur_branch = self._GetCurrentBranch() | 379 cur_branch = self._GetCurrentBranch() |
414 | 380 |
415 # Cases: | 381 # Cases: |
416 # 0) HEAD is detached. Probably from our initial clone. | 382 # 0) HEAD is detached. Probably from our initial clone. |
417 # - make sure HEAD is contained by a named ref, then update. | 383 # - make sure HEAD is contained by a named ref, then update. |
418 # Cases 1-4. HEAD is a branch. | 384 # Cases 1-4. HEAD is a branch. |
419 # 1) current branch is not tracking a remote branch (could be git-svn) | 385 # 1) current branch is not tracking a remote branch (could be git-svn) |
420 # - try to rebase onto the new hash or branch | 386 # - try to rebase onto the new hash or branch |
421 # 2) current branch is tracking a remote branch with local committed | 387 # 2) current branch is tracking a remote branch with local committed |
422 # changes, but the DEPS file switched to point to a hash | 388 # changes, but the DEPS file switched to point to a hash |
(...skipping 385 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
808 if use_reference: | 774 if use_reference: |
809 cmd += ['--reference', os.path.abspath(self.checkout_path)] | 775 cmd += ['--reference', os.path.abspath(self.checkout_path)] |
810 | 776 |
811 self._Run(cmd + [url, folder], | 777 self._Run(cmd + [url, folder], |
812 options, filter_fn=filter_fn, cwd=self.cache_dir, retry=True) | 778 options, filter_fn=filter_fn, cwd=self.cache_dir, retry=True) |
813 else: | 779 else: |
814 # For now, assert that host/path/to/repo.git is identical. We may want | 780 # For now, assert that host/path/to/repo.git is identical. We may want |
815 # to relax this restriction in the future to allow for smarter cache | 781 # to relax this restriction in the future to allow for smarter cache |
816 # repo update schemes (such as pulling the same repo, but from a | 782 # repo update schemes (such as pulling the same repo, but from a |
817 # different host). | 783 # different host). |
818 existing_url = self._Capture(['config', 'remote.%s.url' % self.remote], | 784 existing_url = self.GetRemoteURL(options, cwd=folder) |
819 cwd=folder) | |
820 assert self._NormalizeGitURL(existing_url) == self._NormalizeGitURL(url) | 785 assert self._NormalizeGitURL(existing_url) == self._NormalizeGitURL(url) |
821 | 786 |
822 if use_reference: | 787 if use_reference: |
823 with open(altfile, 'w') as f: | 788 with open(altfile, 'w') as f: |
824 f.write(os.path.abspath(checkout_objects)) | 789 f.write(os.path.abspath(checkout_objects)) |
825 | 790 |
826 # Would normally use `git remote update`, but it doesn't support | 791 # Would normally use `git remote update`, but it doesn't support |
827 # --progress, so use fetch instead. | 792 # --progress, so use fetch instead. |
828 self._Run(['fetch'] + v + ['--multiple', '--progress', '--all'], | 793 self._Run(['fetch'] + v + ['--multiple', '--progress', '--all'], |
829 options, filter_fn=filter_fn, cwd=folder, retry=True) | 794 options, filter_fn=filter_fn, cwd=folder, retry=True) |
(...skipping 248 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1078 result, version = scm.SVN.AssertVersion('1.4') | 1043 result, version = scm.SVN.AssertVersion('1.4') |
1079 if not result: | 1044 if not result: |
1080 raise gclient_utils.Error('SVN version is older than 1.4: %s' % version) | 1045 raise gclient_utils.Error('SVN version is older than 1.4: %s' % version) |
1081 return result | 1046 return result |
1082 except OSError: | 1047 except OSError: |
1083 return False | 1048 return False |
1084 | 1049 |
1085 def GetCheckoutRoot(self): | 1050 def GetCheckoutRoot(self): |
1086 return scm.SVN.GetCheckoutRoot(self.checkout_path) | 1051 return scm.SVN.GetCheckoutRoot(self.checkout_path) |
1087 | 1052 |
1053 def GetRemoteURL(self, options): | |
1054 try: | |
1055 local_info = scm.SVN.CaptureLocalInfo([os.curdir], self.checkout_path) | |
1056 except (OSError, subprocess2.CalledProcessError): | |
1057 return None | |
1058 return local_info.get('URL') | |
1059 | |
1088 def GetRevisionDate(self, revision): | 1060 def GetRevisionDate(self, revision): |
1089 """Returns the given revision's date in ISO-8601 format (which contains the | 1061 """Returns the given revision's date in ISO-8601 format (which contains the |
1090 time zone).""" | 1062 time zone).""" |
1091 date = scm.SVN.Capture( | 1063 date = scm.SVN.Capture( |
1092 ['propget', '--revprop', 'svn:date', '-r', revision], | 1064 ['propget', '--revprop', 'svn:date', '-r', revision], |
1093 os.path.join(self.checkout_path, '.')) | 1065 os.path.join(self.checkout_path, '.')) |
1094 return date.strip() | 1066 return date.strip() |
1095 | 1067 |
1096 def cleanup(self, options, args, _file_list): | 1068 def cleanup(self, options, args, _file_list): |
1097 """Cleanup working copy.""" | 1069 """Cleanup working copy.""" |
(...skipping 19 matching lines...) Expand all Loading... | |
1117 filter_fn=SvnDiffFilterer(self.relpath).Filter) | 1089 filter_fn=SvnDiffFilterer(self.relpath).Filter) |
1118 | 1090 |
1119 def update(self, options, args, file_list): | 1091 def update(self, options, args, file_list): |
1120 """Runs svn to update or transparently checkout the working copy. | 1092 """Runs svn to update or transparently checkout the working copy. |
1121 | 1093 |
1122 All updated files will be appended to file_list. | 1094 All updated files will be appended to file_list. |
1123 | 1095 |
1124 Raises: | 1096 Raises: |
1125 Error: if can't get URL for relative path. | 1097 Error: if can't get URL for relative path. |
1126 """ | 1098 """ |
1127 # Only update if git or hg is not controlling the directory. | |
1128 git_path = os.path.join(self.checkout_path, '.git') | |
1129 if os.path.exists(git_path): | |
1130 print('________ found .git directory; skipping %s' % self.relpath) | |
1131 return | |
1132 | |
1133 hg_path = os.path.join(self.checkout_path, '.hg') | |
1134 if os.path.exists(hg_path): | |
1135 print('________ found .hg directory; skipping %s' % self.relpath) | |
1136 return | |
1137 | |
1138 if args: | 1099 if args: |
1139 raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args)) | 1100 raise gclient_utils.Error("Unsupported argument(s): %s" % ",".join(args)) |
1140 | 1101 |
1141 # revision is the revision to match. It is None if no revision is specified, | 1102 # revision is the revision to match. It is None if no revision is specified, |
1142 # i.e. the 'deps ain't pinned'. | 1103 # i.e. the 'deps ain't pinned'. |
1143 url, revision = gclient_utils.SplitUrlRevision(self.url) | 1104 url, revision = gclient_utils.SplitUrlRevision(self.url) |
1144 # Keep the original unpinned url for reference in case the repo is switched. | |
1145 base_url = url | |
1146 managed = True | 1105 managed = True |
1147 if options.revision: | 1106 if options.revision: |
1148 # Override the revision number. | 1107 # Override the revision number. |
1149 revision = str(options.revision) | 1108 revision = str(options.revision) |
1150 if revision: | 1109 if revision: |
1151 if revision != 'unmanaged': | 1110 if revision != 'unmanaged': |
1152 forced_revision = True | 1111 forced_revision = True |
1153 # Reconstruct the url. | 1112 # Reconstruct the url. |
1154 url = '%s@%s' % (url, revision) | 1113 url = '%s@%s' % (url, revision) |
1155 rev_str = ' at %s' % revision | 1114 rev_str = ' at %s' % revision |
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1282 if d[0][0] == '!': | 1241 if d[0][0] == '!': |
1283 print 'You can pass --force to enable automatic removal.' | 1242 print 'You can pass --force to enable automatic removal.' |
1284 raise e | 1243 raise e |
1285 | 1244 |
1286 # Retrieve the current HEAD version because svn is slow at null updates. | 1245 # Retrieve the current HEAD version because svn is slow at null updates. |
1287 if options.manually_grab_svn_rev and not revision: | 1246 if options.manually_grab_svn_rev and not revision: |
1288 from_info_live = scm.SVN.CaptureRemoteInfo(from_info['URL']) | 1247 from_info_live = scm.SVN.CaptureRemoteInfo(from_info['URL']) |
1289 revision = str(from_info_live['Revision']) | 1248 revision = str(from_info_live['Revision']) |
1290 rev_str = ' at %s' % revision | 1249 rev_str = ' at %s' % revision |
1291 | 1250 |
1292 if from_info['URL'] != base_url: | |
1293 # The repository url changed, need to switch. | |
1294 try: | |
1295 to_info = scm.SVN.CaptureRemoteInfo(url) | |
1296 except (gclient_utils.Error, subprocess2.CalledProcessError): | |
1297 # The url is invalid or the server is not accessible, it's safer to bail | |
1298 # out right now. | |
1299 raise gclient_utils.Error('This url is unreachable: %s' % url) | |
1300 can_switch = ((from_info['Repository Root'] != to_info['Repository Root']) | |
1301 and (from_info['UUID'] == to_info['UUID'])) | |
1302 if can_switch: | |
1303 print('\n_____ relocating %s to a new checkout' % self.relpath) | |
1304 # We have different roots, so check if we can switch --relocate. | |
1305 # Subversion only permits this if the repository UUIDs match. | |
1306 # Perform the switch --relocate, then rewrite the from_url | |
1307 # to reflect where we "are now." (This is the same way that | |
1308 # Subversion itself handles the metadata when switch --relocate | |
1309 # is used.) This makes the checks below for whether we | |
1310 # can update to a revision or have to switch to a different | |
1311 # branch work as expected. | |
1312 # TODO(maruel): TEST ME ! | |
1313 command = ['switch', '--relocate', | |
1314 from_info['Repository Root'], | |
1315 to_info['Repository Root'], | |
1316 self.relpath] | |
1317 self._Run(command, options, cwd=self._root_dir) | |
1318 from_info['URL'] = from_info['URL'].replace( | |
1319 from_info['Repository Root'], | |
1320 to_info['Repository Root']) | |
1321 else: | |
1322 if not options.force and not options.reset: | |
1323 # Look for local modifications but ignore unversioned files. | |
1324 for status in scm.SVN.CaptureStatus(None, self.checkout_path): | |
1325 if status[0][0] != '?': | |
1326 raise gclient_utils.Error( | |
1327 ('Can\'t switch the checkout to %s; UUID don\'t match and ' | |
1328 'there is local changes in %s. Delete the directory and ' | |
1329 'try again.') % (url, self.checkout_path)) | |
1330 # Ok delete it. | |
1331 print('\n_____ switching %s to a new checkout' % self.relpath) | |
1332 gclient_utils.rmtree(self.checkout_path) | |
1333 # We need to checkout. | |
1334 command = ['checkout', url, self.checkout_path] | |
1335 command = self._AddAdditionalUpdateFlags(command, options, revision) | |
1336 self._RunAndGetFileList(command, options, file_list, self._root_dir) | |
1337 return self.Svnversion() | |
1338 | |
1339 # If the provided url has a revision number that matches the revision | 1251 # If the provided url has a revision number that matches the revision |
1340 # number of the existing directory, then we don't need to bother updating. | 1252 # number of the existing directory, then we don't need to bother updating. |
1341 if not options.force and str(from_info['Revision']) == revision: | 1253 if not options.force and str(from_info['Revision']) == revision: |
1342 if options.verbose or not forced_revision: | 1254 if options.verbose or not forced_revision: |
1343 print('\n_____ %s%s' % (self.relpath, rev_str)) | 1255 print('\n_____ %s%s' % (self.relpath, rev_str)) |
1344 else: | 1256 else: |
1345 command = ['update', self.checkout_path] | 1257 command = ['update', self.checkout_path] |
1346 command = self._AddAdditionalUpdateFlags(command, options, revision) | 1258 command = self._AddAdditionalUpdateFlags(command, options, revision) |
1347 self._RunAndGetFileList(command, options, file_list, self._root_dir) | 1259 self._RunAndGetFileList(command, options, file_list, self._root_dir) |
1348 | 1260 |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1395 if not os.path.isdir(self.checkout_path): | 1307 if not os.path.isdir(self.checkout_path): |
1396 if os.path.exists(self.checkout_path): | 1308 if os.path.exists(self.checkout_path): |
1397 gclient_utils.rmtree(self.checkout_path) | 1309 gclient_utils.rmtree(self.checkout_path) |
1398 # svn revert won't work if the directory doesn't exist. It needs to | 1310 # svn revert won't work if the directory doesn't exist. It needs to |
1399 # checkout instead. | 1311 # checkout instead. |
1400 print('\n_____ %s is missing, synching instead' % self.relpath) | 1312 print('\n_____ %s is missing, synching instead' % self.relpath) |
1401 # Don't reuse the args. | 1313 # Don't reuse the args. |
1402 return self.update(options, [], file_list) | 1314 return self.update(options, [], file_list) |
1403 | 1315 |
1404 if not os.path.isdir(os.path.join(self.checkout_path, '.svn')): | 1316 if not os.path.isdir(os.path.join(self.checkout_path, '.svn')): |
1405 if os.path.isdir(os.path.join(self.checkout_path, '.git')): | |
borenet
2014/01/14 21:49:45
Seems like this needs to go, too.
| |
1406 print('________ found .git directory; skipping %s' % self.relpath) | |
1407 return | |
1408 if os.path.isdir(os.path.join(self.checkout_path, '.hg')): | |
1409 print('________ found .hg directory; skipping %s' % self.relpath) | |
1410 return | |
1411 if not options.force: | 1317 if not options.force: |
1412 raise gclient_utils.Error('Invalid checkout path, aborting') | 1318 raise gclient_utils.Error('Invalid checkout path, aborting') |
1413 print( | 1319 print( |
1414 '\n_____ %s is not a valid svn checkout, synching instead' % | 1320 '\n_____ %s is not a valid svn checkout, synching instead' % |
1415 self.relpath) | 1321 self.relpath) |
1416 gclient_utils.rmtree(self.checkout_path) | 1322 gclient_utils.rmtree(self.checkout_path) |
1417 # Don't reuse the args. | 1323 # Don't reuse the args. |
1418 return self.update(options, [], file_list) | 1324 return self.update(options, [], file_list) |
1419 | 1325 |
1420 def printcb(file_status): | 1326 def printcb(file_status): |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1518 new_command.append('--force') | 1424 new_command.append('--force') |
1519 if command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: | 1425 if command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: |
1520 new_command.extend(('--accept', 'theirs-conflict')) | 1426 new_command.extend(('--accept', 'theirs-conflict')) |
1521 elif options.manually_grab_svn_rev: | 1427 elif options.manually_grab_svn_rev: |
1522 new_command.append('--force') | 1428 new_command.append('--force') |
1523 if command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: | 1429 if command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: |
1524 new_command.extend(('--accept', 'postpone')) | 1430 new_command.extend(('--accept', 'postpone')) |
1525 elif command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: | 1431 elif command[0] != 'checkout' and scm.SVN.AssertVersion('1.6')[0]: |
1526 new_command.extend(('--accept', 'postpone')) | 1432 new_command.extend(('--accept', 'postpone')) |
1527 return new_command | 1433 return new_command |
OLD | NEW |