| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/env python | |
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
| 3 # Use of this source code is governed by a BSD-style license that can be | |
| 4 # found in the LICENSE file. | |
| 5 | |
| 6 import datetime | |
| 7 import optparse | |
| 8 import os | |
| 9 import re | |
| 10 import sys | |
| 11 import urlparse | |
| 12 | |
| 13 | |
| 14 import gclient_utils | |
| 15 import subprocess2 | |
| 16 | |
| 17 USAGE = """ | |
| 18 WARNING: Please use this tool in an empty directory | |
| 19 (or at least one that you don't mind clobbering.) | |
| 20 | |
| 21 REQUIRES: SVN 1.5+ | |
| 22 NOTE: NO NEED TO CHECKOUT ANYTHING IN ADVANCE OF USING THIS TOOL. | |
| 23 Valid parameters: | |
| 24 | |
| 25 [Merge from trunk to branch] | |
| 26 --merge <revision> --branch <branch_num> | |
| 27 Example: %(app)s --merge 12345 --branch 187 | |
| 28 | |
| 29 [Merge from trunk to local copy] | |
| 30 --merge <revision> --local | |
| 31 Example: %(app)s --merge 12345 --local | |
| 32 | |
| 33 [Merge from branch to branch] | |
| 34 --merge <revision> --sbranch <branch_num> --branch <branch_num> | |
| 35 Example: %(app)s --merge 12345 --sbranch 248 --branch 249 | |
| 36 | |
| 37 [Revert from trunk] | |
| 38 --revert <revision> | |
| 39 Example: %(app)s --revert 12345 | |
| 40 | |
| 41 [Revert from branch] | |
| 42 --revert <revision> --branch <branch_num> | |
| 43 Example: %(app)s --revert 12345 --branch 187 | |
| 44 """ | |
| 45 | |
| 46 export_map_ = None | |
| 47 files_info_ = None | |
| 48 delete_map_ = None | |
| 49 file_pattern_ = r"[ ]+([MADUC])[ ]+/((?:trunk|branches/.*?)/src(.*)/(.*))" | |
| 50 depot_tools_dir_ = os.path.dirname(os.path.abspath(__file__)) | |
| 51 | |
| 52 | |
| 53 def runGcl(subcommand): | |
| 54 gcl_path = os.path.join(depot_tools_dir_, "gcl") | |
| 55 if not os.path.exists(gcl_path): | |
| 56 print "WARNING: gcl not found beside drover.py. Using system gcl instead..." | |
| 57 gcl_path = 'gcl' | |
| 58 | |
| 59 command = "%s %s" % (gcl_path, subcommand) | |
| 60 return os.system(command) | |
| 61 | |
| 62 def gclUpload(revision, author): | |
| 63 command = ("upload " + str(revision) + | |
| 64 " --send_mail --no_presubmit --reviewers=" + author) | |
| 65 return runGcl(command) | |
| 66 | |
| 67 def getSVNInfo(url, revision): | |
| 68 info = {} | |
| 69 svn_info = subprocess2.capture( | |
| 70 ['svn', 'info', '--non-interactive', '%s@%s' % (url, revision)], | |
| 71 stderr=subprocess2.VOID).splitlines() | |
| 72 for line in svn_info: | |
| 73 match = re.search(r"(.*?):(.*)", line) | |
| 74 if match: | |
| 75 info[match.group(1).strip()] = match.group(2).strip() | |
| 76 return info | |
| 77 | |
| 78 def isSVNDirty(): | |
| 79 svn_status = subprocess2.check_output(['svn', 'status']).splitlines() | |
| 80 for line in svn_status: | |
| 81 match = re.search(r"^[^X?]", line) | |
| 82 if match: | |
| 83 return True | |
| 84 | |
| 85 return False | |
| 86 | |
| 87 def getAuthor(url, revision): | |
| 88 info = getSVNInfo(url, revision) | |
| 89 if (info.has_key("Last Changed Author")): | |
| 90 return info["Last Changed Author"] | |
| 91 return None | |
| 92 | |
| 93 def isSVNFile(url, revision): | |
| 94 info = getSVNInfo(url, revision) | |
| 95 if (info.has_key("Node Kind")): | |
| 96 if (info["Node Kind"] == "file"): | |
| 97 return True | |
| 98 return False | |
| 99 | |
| 100 def isSVNDirectory(url, revision): | |
| 101 info = getSVNInfo(url, revision) | |
| 102 if (info.has_key("Node Kind")): | |
| 103 if (info["Node Kind"] == "directory"): | |
| 104 return True | |
| 105 return False | |
| 106 | |
| 107 def inCheckoutRoot(path): | |
| 108 info = getSVNInfo(path, "HEAD") | |
| 109 if (not info.has_key("Repository Root")): | |
| 110 return False | |
| 111 repo_root = info["Repository Root"] | |
| 112 info = getSVNInfo(os.path.dirname(os.path.abspath(path)), "HEAD") | |
| 113 if (info.get("Repository Root", None) != repo_root): | |
| 114 return True | |
| 115 return False | |
| 116 | |
| 117 def getRevisionLog(url, revision): | |
| 118 """Takes an svn url and gets the associated revision.""" | |
| 119 svn_log = subprocess2.check_output( | |
| 120 ['svn', 'log', url, '-r', str(revision)], | |
| 121 universal_newlines=True).splitlines(True) | |
| 122 # Don't include the header lines and the trailing "---..." line. | |
| 123 return ''.join(svn_log[3:-1]) | |
| 124 | |
| 125 def getSVNVersionInfo(): | |
| 126 """Extract version information from SVN""" | |
| 127 svn_info = subprocess2.check_output(['svn', '--version']).splitlines() | |
| 128 info = {} | |
| 129 for line in svn_info: | |
| 130 match = re.search(r"svn, version ((\d+)\.(\d+)\.(\d+))", line) | |
| 131 if match: | |
| 132 info['version'] = match.group(1) | |
| 133 info['major'] = int(match.group(2)) | |
| 134 info['minor'] = int(match.group(3)) | |
| 135 info['patch'] = int(match.group(4)) | |
| 136 return info | |
| 137 | |
| 138 return None | |
| 139 | |
| 140 def isMinimumSVNVersion(major, minor, patch=0): | |
| 141 """Test for minimum SVN version""" | |
| 142 return _isMinimumSVNVersion(getSVNVersionInfo(), major, minor, patch) | |
| 143 | |
| 144 def _isMinimumSVNVersion(version, major, minor, patch=0): | |
| 145 """Test for minimum SVN version, internal method""" | |
| 146 if not version: | |
| 147 return False | |
| 148 | |
| 149 if (version['major'] > major): | |
| 150 return True | |
| 151 elif (version['major'] < major): | |
| 152 return False | |
| 153 | |
| 154 if (version['minor'] > minor): | |
| 155 return True | |
| 156 elif (version['minor'] < minor): | |
| 157 return False | |
| 158 | |
| 159 if (version['patch'] >= patch): | |
| 160 return True | |
| 161 else: | |
| 162 return False | |
| 163 | |
| 164 def checkoutRevision(url, revision, branch_url, revert=False, pop=True): | |
| 165 files_info = getFileInfo(url, revision) | |
| 166 paths = getBestMergePaths2(files_info, revision) | |
| 167 export_map = getBestExportPathsMap2(files_info, revision) | |
| 168 | |
| 169 command = 'svn checkout -N ' + branch_url | |
| 170 print command | |
| 171 os.system(command) | |
| 172 | |
| 173 match = re.search(r"^[a-z]+://.*/(.*)", branch_url) | |
| 174 | |
| 175 if match: | |
| 176 os.chdir(match.group(1)) | |
| 177 | |
| 178 # This line is extremely important due to the way svn behaves in the | |
| 179 # set-depths action. If parents aren't handled before children, the child | |
| 180 # directories get clobbered and the merge step fails. | |
| 181 paths.sort() | |
| 182 | |
| 183 # Checkout the directories that already exist | |
| 184 for path in paths: | |
| 185 if (export_map.has_key(path) and not revert): | |
| 186 print "Exclude new directory " + path | |
| 187 continue | |
| 188 subpaths = path.split('/') | |
| 189 #In the normal case, where no url override is specified and it's just | |
| 190 # chromium source, it's necessary to remove the 'trunk' from the filepath, | |
| 191 # since in the checkout we include 'trunk' or 'branch/\d+'. | |
| 192 # | |
| 193 # However, when a url is specified we want to preserve that because it's | |
| 194 # a part of the filepath and necessary for path operations on svn (because | |
| 195 # frankly, we are checking out the correct top level, and not hacking it). | |
| 196 if pop: | |
| 197 subpaths.pop(0) | |
| 198 base = '' | |
| 199 for subpath in subpaths: | |
| 200 base += '/' + subpath | |
| 201 # This logic ensures that you don't empty out any directories | |
| 202 if not os.path.exists("." + base): | |
| 203 command = ('svn update --depth empty ' + "." + base) | |
| 204 print command | |
| 205 os.system(command) | |
| 206 | |
| 207 if (revert): | |
| 208 files = getAllFilesInRevision(files_info) | |
| 209 else: | |
| 210 files = getExistingFilesInRevision(files_info) | |
| 211 | |
| 212 for f in files: | |
| 213 # Prevent the tool from clobbering the src directory | |
| 214 if (f == ""): | |
| 215 continue | |
| 216 command = ('svn up ".' + f + '"') | |
| 217 print command | |
| 218 os.system(command) | |
| 219 | |
| 220 def mergeRevision(url, revision): | |
| 221 paths = getBestMergePaths(url, revision) | |
| 222 export_map = getBestExportPathsMap(url, revision) | |
| 223 | |
| 224 for path in paths: | |
| 225 if export_map.has_key(path): | |
| 226 continue | |
| 227 command = ('svn merge -N -r ' + str(revision-1) + ":" + str(revision) + " ") | |
| 228 command += " --ignore-ancestry " | |
| 229 command += " -x --ignore-eol-style " | |
| 230 command += url + path + "@" + str(revision) + " ." + path | |
| 231 | |
| 232 print command | |
| 233 os.system(command) | |
| 234 | |
| 235 def exportRevision(url, revision): | |
| 236 paths = getBestExportPathsMap(url, revision).keys() | |
| 237 paths.sort() | |
| 238 | |
| 239 for path in paths: | |
| 240 command = ('svn export -N ' + url + path + "@" + str(revision) + " ." + | |
| 241 path) | |
| 242 print command | |
| 243 os.system(command) | |
| 244 | |
| 245 command = 'svn add .' + path | |
| 246 print command | |
| 247 os.system(command) | |
| 248 | |
| 249 def deleteRevision(url, revision): | |
| 250 paths = getBestDeletePathsMap(url, revision).keys() | |
| 251 paths.sort() | |
| 252 paths.reverse() | |
| 253 | |
| 254 for path in paths: | |
| 255 command = "svn delete ." + path | |
| 256 print command | |
| 257 os.system(command) | |
| 258 | |
| 259 def revertExportRevision(url, revision): | |
| 260 paths = getBestExportPathsMap(url, revision).keys() | |
| 261 paths.sort() | |
| 262 paths.reverse() | |
| 263 | |
| 264 for path in paths: | |
| 265 command = "svn delete ." + path | |
| 266 print command | |
| 267 os.system(command) | |
| 268 | |
| 269 def revertRevision(url, revision): | |
| 270 command = ('svn merge --ignore-ancestry -c -%d %s .' % (revision, url)) | |
| 271 print command | |
| 272 os.system(command) | |
| 273 | |
| 274 def getFileInfo(url, revision): | |
| 275 global files_info_ | |
| 276 | |
| 277 if (files_info_ != None): | |
| 278 return files_info_ | |
| 279 | |
| 280 svn_log = subprocess2.check_output( | |
| 281 ['svn', 'log', url, '-r', str(revision), '-v']).splitlines() | |
| 282 | |
| 283 info = [] | |
| 284 for line in svn_log: | |
| 285 # A workaround to dump the (from .*) stuff, regex not so friendly in the 2nd | |
| 286 # pass... | |
| 287 match = re.search(r"(.*) \(from.*\)", line) | |
| 288 if match: | |
| 289 line = match.group(1) | |
| 290 match = re.search(file_pattern_, line) | |
| 291 if match: | |
| 292 info.append([match.group(1).strip(), match.group(2).strip(), | |
| 293 match.group(3).strip(),match.group(4).strip()]) | |
| 294 | |
| 295 files_info_ = info | |
| 296 return info | |
| 297 | |
| 298 def getBestMergePaths(url, revision): | |
| 299 """Takes an svn url and gets the associated revision.""" | |
| 300 return getBestMergePaths2(getFileInfo(url, revision), revision) | |
| 301 | |
| 302 def getBestMergePaths2(files_info, revision): | |
| 303 """Takes an svn url and gets the associated revision.""" | |
| 304 return list(set([f[2] for f in files_info])) | |
| 305 | |
| 306 def getBestExportPathsMap(url, revision): | |
| 307 return getBestExportPathsMap2(getFileInfo(url, revision), revision) | |
| 308 | |
| 309 def getBestExportPathsMap2(files_info, revision): | |
| 310 """Takes an svn url and gets the associated revision.""" | |
| 311 global export_map_ | |
| 312 | |
| 313 if export_map_: | |
| 314 return export_map_ | |
| 315 | |
| 316 result = {} | |
| 317 for file_info in files_info: | |
| 318 if (file_info[0] == "A"): | |
| 319 if(isSVNDirectory("svn://svn.chromium.org/chrome/" + file_info[1], | |
| 320 revision)): | |
| 321 result[file_info[2] + "/" + file_info[3]] = "" | |
| 322 | |
| 323 export_map_ = result | |
| 324 return result | |
| 325 | |
| 326 def getBestDeletePathsMap(url, revision): | |
| 327 return getBestDeletePathsMap2(getFileInfo(url, revision), revision) | |
| 328 | |
| 329 def getBestDeletePathsMap2(files_info, revision): | |
| 330 """Takes an svn url and gets the associated revision.""" | |
| 331 global delete_map_ | |
| 332 | |
| 333 if delete_map_: | |
| 334 return delete_map_ | |
| 335 | |
| 336 result = {} | |
| 337 for file_info in files_info: | |
| 338 if (file_info[0] == "D"): | |
| 339 if(isSVNDirectory("svn://svn.chromium.org/chrome/" + file_info[1], | |
| 340 revision)): | |
| 341 result[file_info[2] + "/" + file_info[3]] = "" | |
| 342 | |
| 343 delete_map_ = result | |
| 344 return result | |
| 345 | |
| 346 | |
| 347 def getExistingFilesInRevision(files_info): | |
| 348 """Checks for existing files in the revision. | |
| 349 | |
| 350 Anything that's A will require special treatment (either a merge or an | |
| 351 export + add) | |
| 352 """ | |
| 353 return ['%s/%s' % (f[2], f[3]) for f in files_info if f[0] != 'A'] | |
| 354 | |
| 355 | |
| 356 def getAllFilesInRevision(files_info): | |
| 357 """Checks for existing files in the revision. | |
| 358 | |
| 359 Anything that's A will require special treatment (either a merge or an | |
| 360 export + add) | |
| 361 """ | |
| 362 return ['%s/%s' % (f[2], f[3]) for f in files_info] | |
| 363 | |
| 364 | |
| 365 def getSVNAuthInfo(folder=None): | |
| 366 """Fetches SVN authorization information in the subversion auth folder and | |
| 367 returns it as a dictionary of dictionaries.""" | |
| 368 if not folder: | |
| 369 if sys.platform == 'win32': | |
| 370 folder = '%%APPDATA%\\Subversion\\auth' | |
| 371 else: | |
| 372 folder = '~/.subversion/auth' | |
| 373 folder = os.path.expandvars(os.path.expanduser(folder)) | |
| 374 svn_simple_folder = os.path.join(folder, 'svn.simple') | |
| 375 results = {} | |
| 376 try: | |
| 377 for auth_file in os.listdir(svn_simple_folder): | |
| 378 # Read the SVN auth file, convert it into a dictionary, and store it. | |
| 379 results[auth_file] = dict(re.findall(r'K [0-9]+\n(.*)\nV [0-9]+\n(.*)\n', | |
| 380 open(os.path.join(svn_simple_folder, auth_file)).read())) | |
| 381 except Exception as _: | |
| 382 pass | |
| 383 return results | |
| 384 | |
| 385 | |
| 386 def getCurrentSVNUsers(url): | |
| 387 """Tries to fetch the current SVN in the current checkout by scanning the | |
| 388 SVN authorization folder for a match with the current SVN URL.""" | |
| 389 netloc = urlparse.urlparse(url)[1] | |
| 390 auth_infos = getSVNAuthInfo() | |
| 391 results = [] | |
| 392 for _, auth_info in auth_infos.iteritems(): | |
| 393 if ('svn:realmstring' in auth_info | |
| 394 and netloc in auth_info['svn:realmstring']): | |
| 395 username = auth_info['username'] | |
| 396 results.append(username) | |
| 397 if 'google.com' in username: | |
| 398 results.append(username.replace('google.com', 'chromium.org')) | |
| 399 return results | |
| 400 | |
| 401 | |
| 402 def prompt(question): | |
| 403 while True: | |
| 404 print question + " [y|n]:", | |
| 405 answer = sys.stdin.readline() | |
| 406 if answer.lower().startswith('n'): | |
| 407 return False | |
| 408 elif answer.lower().startswith('y'): | |
| 409 return True | |
| 410 | |
| 411 | |
| 412 def text_prompt(question, default): | |
| 413 print question + " [" + default + "]:" | |
| 414 answer = sys.stdin.readline() | |
| 415 if answer.strip() == "": | |
| 416 return default | |
| 417 return answer | |
| 418 | |
| 419 | |
| 420 def drover(options, args): | |
| 421 revision = options.revert or options.merge | |
| 422 | |
| 423 # Initialize some variables used below. They can be overwritten by | |
| 424 # the drover.properties file. | |
| 425 BASE_URL = "svn://svn.chromium.org/chrome" | |
| 426 REVERT_ALT_URLS = ['svn://svn.chromium.org/blink', | |
| 427 'svn://svn.chromium.org/chrome-internal', | |
| 428 'svn://svn.chromium.org/native_client'] | |
| 429 TRUNK_URL = BASE_URL + "/trunk/src" | |
| 430 BRANCH_URL = BASE_URL + "/branches/$branch/src" | |
| 431 SKIP_CHECK_WORKING = True | |
| 432 PROMPT_FOR_AUTHOR = False | |
| 433 NO_ALT_URLS = options.no_alt_urls | |
| 434 | |
| 435 DEFAULT_WORKING = "drover_" + str(revision) | |
| 436 if options.branch: | |
| 437 DEFAULT_WORKING += ("_" + options.branch) | |
| 438 | |
| 439 if not isMinimumSVNVersion(1, 5): | |
| 440 print "You need to use at least SVN version 1.5.x" | |
| 441 return 1 | |
| 442 | |
| 443 # Override the default properties if there is a drover.properties file. | |
| 444 global file_pattern_ | |
| 445 if os.path.exists("drover.properties"): | |
| 446 print 'Using options from %s' % os.path.join( | |
| 447 os.getcwd(), 'drover.properties') | |
| 448 FILE_PATTERN = file_pattern_ | |
| 449 f = open("drover.properties") | |
| 450 exec(f) | |
| 451 f.close() | |
| 452 if FILE_PATTERN: | |
| 453 file_pattern_ = FILE_PATTERN | |
| 454 NO_ALT_URLS = True | |
| 455 | |
| 456 if options.revert and options.branch: | |
| 457 print 'Note: --branch is usually not needed for reverts.' | |
| 458 url = BRANCH_URL.replace("$branch", options.branch) | |
| 459 elif options.merge and options.sbranch: | |
| 460 url = BRANCH_URL.replace("$branch", options.sbranch) | |
| 461 elif options.revert: | |
| 462 url = options.url or BASE_URL | |
| 463 file_pattern_ = r"[ ]+([MADUC])[ ]+((/.*)/(.*))" | |
| 464 else: | |
| 465 url = TRUNK_URL | |
| 466 | |
| 467 working = options.workdir or DEFAULT_WORKING | |
| 468 | |
| 469 if options.local: | |
| 470 working = os.getcwd() | |
| 471 if not inCheckoutRoot(working): | |
| 472 print "'%s' appears not to be the root of a working copy" % working | |
| 473 return 1 | |
| 474 if (isSVNDirty() and not | |
| 475 prompt("Working copy contains uncommitted files. Continue?")): | |
| 476 return 1 | |
| 477 | |
| 478 if options.revert and not NO_ALT_URLS and not options.url: | |
| 479 for cur_url in [url] + REVERT_ALT_URLS: | |
| 480 try: | |
| 481 commit_date_str = getSVNInfo( | |
| 482 cur_url, options.revert).get('Last Changed Date', 'x').split()[0] | |
| 483 commit_date = datetime.datetime.strptime(commit_date_str, '%Y-%m-%d') | |
| 484 if (datetime.datetime.now() - commit_date).days < 180: | |
| 485 if cur_url != url: | |
| 486 print 'Guessing svn repo: %s.' % cur_url, | |
| 487 print 'Use --no-alt-urls to disable heuristic.' | |
| 488 url = cur_url | |
| 489 break | |
| 490 except ValueError: | |
| 491 pass | |
| 492 command = 'svn log ' + url + " -r "+str(revision) + " -v" | |
| 493 os.system(command) | |
| 494 | |
| 495 if not (options.revertbot or prompt("Is this the correct revision?")): | |
| 496 return 0 | |
| 497 | |
| 498 if (os.path.exists(working)) and not options.local: | |
| 499 if not (options.revertbot or SKIP_CHECK_WORKING or | |
| 500 prompt("Working directory: '%s' already exists, clobber?" % working)): | |
| 501 return 0 | |
| 502 gclient_utils.rmtree(working) | |
| 503 | |
| 504 if not options.local: | |
| 505 os.makedirs(working) | |
| 506 os.chdir(working) | |
| 507 | |
| 508 if options.merge: | |
| 509 action = "Merge" | |
| 510 if not options.local: | |
| 511 branch_url = BRANCH_URL.replace("$branch", options.branch) | |
| 512 # Checkout everything but stuff that got added into a new dir | |
| 513 checkoutRevision(url, revision, branch_url) | |
| 514 # Merge everything that changed | |
| 515 mergeRevision(url, revision) | |
| 516 # "Export" files that were added from the source and add them to branch | |
| 517 exportRevision(url, revision) | |
| 518 # Delete directories that were deleted (file deletes are handled in the | |
| 519 # merge). | |
| 520 deleteRevision(url, revision) | |
| 521 elif options.revert: | |
| 522 action = "Revert" | |
| 523 pop_em = not options.url | |
| 524 checkoutRevision(url, revision, url, True, pop_em) | |
| 525 revertRevision(url, revision) | |
| 526 revertExportRevision(url, revision) | |
| 527 | |
| 528 # Check the base url so we actually find the author who made the change | |
| 529 if options.auditor: | |
| 530 author = options.auditor | |
| 531 else: | |
| 532 author = getAuthor(url, revision) | |
| 533 if not author: | |
| 534 author = getAuthor(TRUNK_URL, revision) | |
| 535 | |
| 536 # Check that the author of the CL is different than the user making | |
| 537 # the revert. If they're the same, then we'll want to prompt the user | |
| 538 # for a different reviewer to TBR. | |
| 539 current_users = getCurrentSVNUsers(BASE_URL) | |
| 540 is_self_revert = options.revert and author in current_users | |
| 541 | |
| 542 filename = str(revision)+".txt" | |
| 543 out = open(filename,"w") | |
| 544 drover_title = '%s %s' % (action, revision) | |
| 545 revision_log = getRevisionLog(url, revision).splitlines() | |
| 546 if revision_log: | |
| 547 commit_title = revision_log[0] | |
| 548 # Limit title to 68 chars so git log --oneline is <80 chars. | |
| 549 max_commit_title = 68 - (len(drover_title) + 3) | |
| 550 if len(commit_title) > max_commit_title: | |
| 551 commit_title = commit_title[:max_commit_title-3] + '...' | |
| 552 drover_title += ' "%s"' % commit_title | |
| 553 out.write(drover_title + '\n\n') | |
| 554 for line in revision_log: | |
| 555 out.write('> %s\n' % line) | |
| 556 if author: | |
| 557 out.write("\nTBR=" + author) | |
| 558 out.close() | |
| 559 | |
| 560 change_cmd = 'change ' + str(revision) + " " + filename | |
| 561 if options.revertbot: | |
| 562 if sys.platform == 'win32': | |
| 563 os.environ['SVN_EDITOR'] = 'cmd.exe /c exit' | |
| 564 else: | |
| 565 os.environ['SVN_EDITOR'] = 'true' | |
| 566 runGcl(change_cmd) | |
| 567 os.unlink(filename) | |
| 568 | |
| 569 if options.local: | |
| 570 return 0 | |
| 571 | |
| 572 print author | |
| 573 print revision | |
| 574 print ("gcl upload " + str(revision) + | |
| 575 " --send_mail --no_presubmit --reviewers=" + author) | |
| 576 | |
| 577 if options.revertbot or prompt("Would you like to upload?"): | |
| 578 if PROMPT_FOR_AUTHOR or is_self_revert: | |
| 579 author = text_prompt("Enter new author or press enter to accept default", | |
| 580 author) | |
| 581 if options.revertbot and options.revertbot_reviewers: | |
| 582 author += "," | |
| 583 author += options.revertbot_reviewers | |
| 584 gclUpload(revision, author) | |
| 585 else: | |
| 586 print "Deleting the changelist." | |
| 587 print "gcl delete " + str(revision) | |
| 588 runGcl("delete " + str(revision)) | |
| 589 return 0 | |
| 590 | |
| 591 # We commit if the reverbot is set to commit automatically, or if this is | |
| 592 # not the revertbot and the user agrees. | |
| 593 if options.revertbot_commit or (not options.revertbot and | |
| 594 prompt("Would you like to commit?")): | |
| 595 print "gcl commit " + str(revision) + " --no_presubmit --force" | |
| 596 return runGcl("commit " + str(revision) + " --no_presubmit --force") | |
| 597 else: | |
| 598 return 0 | |
| 599 | |
| 600 | |
| 601 def main(): | |
| 602 option_parser = optparse.OptionParser(usage=USAGE % {"app": sys.argv[0]}) | |
| 603 option_parser.add_option('-m', '--merge', type="int", | |
| 604 help='Revision to merge from trunk to branch') | |
| 605 option_parser.add_option('-b', '--branch', | |
| 606 help='Branch to revert or merge from') | |
| 607 option_parser.add_option('-l', '--local', action='store_true', | |
| 608 help='Local working copy to merge to') | |
| 609 option_parser.add_option('-s', '--sbranch', | |
| 610 help='Source branch for merge') | |
| 611 option_parser.add_option('-r', '--revert', type="int", | |
| 612 help='Revision to revert') | |
| 613 option_parser.add_option('-w', '--workdir', | |
| 614 help='subdir to use for the revert') | |
| 615 option_parser.add_option('-u', '--url', | |
| 616 help='svn url to use for the revert') | |
| 617 option_parser.add_option('-a', '--auditor', | |
| 618 help='overrides the author for reviewer') | |
| 619 option_parser.add_option('--revertbot', action='store_true', | |
| 620 default=False) | |
| 621 option_parser.add_option('--no-alt-urls', action='store_true', | |
| 622 help='Disable heuristics used to determine svn url') | |
| 623 option_parser.add_option('--revertbot-commit', action='store_true', | |
| 624 default=False) | |
| 625 option_parser.add_option('--revertbot-reviewers') | |
| 626 options, args = option_parser.parse_args() | |
| 627 | |
| 628 if not options.merge and not options.revert: | |
| 629 option_parser.error("You need at least --merge or --revert") | |
| 630 return 1 | |
| 631 | |
| 632 if options.merge and not (options.branch or options.local): | |
| 633 option_parser.error("--merge requires --branch or --local") | |
| 634 return 1 | |
| 635 | |
| 636 if options.local and (options.revert or options.branch): | |
| 637 option_parser.error("--local cannot be used with --revert or --branch") | |
| 638 return 1 | |
| 639 | |
| 640 return drover(options, args) | |
| 641 | |
| 642 | |
| 643 if __name__ == "__main__": | |
| 644 try: | |
| 645 sys.exit(main()) | |
| 646 except KeyboardInterrupt: | |
| 647 sys.stderr.write('interrupted\n') | |
| 648 sys.exit(1) | |
| OLD | NEW |