Index: drover.py |
=================================================================== |
--- drover.py (revision 29200) |
+++ drover.py (working copy) |
@@ -1,77 +1,95 @@ |
-# Copyright (c) 2009 The Chromium Authors. All rights reserved. |
-# Use of this source code is governed by a BSD-style license that can be |
-# found in the LICENSE file. |
- |
-import os |
-import re |
import subprocess |
import sys |
+import re |
+import os |
import webbrowser |
+export_map_ = None |
+files_info_ = None |
+delete_map_ = None |
+file_pattern_ = r"[ ]+([MADUC])[ ]+/((?:trunk|branches/\d+)/src(.*)/(.*))" |
def deltree(root): |
- """Removes a given directory.""" |
+ """ |
+ Removes a given directory |
+ """ |
if (not os.path.exists(root)): |
return |
- |
+ |
if sys.platform == 'win32': |
os.system('rmdir /S /Q ' + root.replace('/','\\')) |
else: |
for name in os.listdir(root): |
- path = os.path.join(root, name) |
- if os.path.isdir(path): |
- deltree(path) |
- else: |
- os.unlink(path) |
+ path = os.path.join(root, name) |
+ if os.path.isdir(path): |
+ deltree(path) |
+ else: |
+ os.unlink(path) |
os.rmdir(root) |
- |
def clobberDir(dir): |
- """Removes a given directory.""" |
- |
+ """ |
+ Removes a given directory |
+ """ |
+ |
if (os.path.exists(dir)): |
- print dir + " directory found, deleting" |
- # The following line was removed due to access controls in Windows |
- # which make os.unlink(path) calls impossible. |
- # deltree(dir) |
- os.system('rmdir /S /Q ' + dir.replace('/','\\')) |
+ print dir + " directory found, deleting" |
+ #The following line was removed due to access controls in Windows |
+ #which make os.unlink(path) calls impossible. |
+ #deltree(dir) |
+ os.system('rmdir /S /Q ' + dir.replace('/','\\')) |
- |
def gclUpload(revision, author): |
- command = ("gcl upload " + str(revision) + |
+ command = ("gcl upload " + str(revision) + |
" --send_mail --no_try --no_presubmit --reviewers=" + author) |
os.system(command) |
-# subprocess.Popen(command, |
-# shell=True, |
-# stdout=None, |
-# stderr=subprocess.PIPE) |
-# stderr=subprocess.PIPE).stdout.readlines() |
-# for line in svn_info: |
-# match = re.search(r"Issue created. URL: (http://.+)", line) |
-# if match: |
-# return match.group(1) |
- return None |
- |
-def getAuthor(url, revision): |
+def getSVNInfo(url, revision): |
command = 'svn info ' + url + "@"+str(revision) |
- svn_info = subprocess.Popen(command, |
- shell=True, |
- stdout=subprocess.PIPE, |
- stderr=subprocess.PIPE).stdout.readlines() |
+ svn_info = subprocess.Popen(command, |
+ shell=True, |
+ stdout=subprocess.PIPE, |
+ stderr=subprocess.PIPE).stdout.readlines() |
+ rtn = {} |
for line in svn_info: |
- match = re.search(r"Last Changed Author: (.+)", line) |
+ match = re.search(r"(.*?):(.*)", line) |
if match: |
- return match.group(1) |
+ rtn[match.group(1).strip()]=match.group(2).strip() |
+ |
+ return rtn |
+ |
+def getAuthor(url, revision): |
+ info = getSVNInfo(url, revision) |
+ |
+ if (info.has_key("Last Changed Author")): |
+ return info["Last Changed Author"] |
+ |
return None |
+def isSVNFile(url, revision): |
+ info = getSVNInfo(url, revision) |
+ |
+ if (info.has_key("Node Kind")): |
+ if (info["Node Kind"] == "file"): return True |
+ |
+ return False |
+def isSVNDirectory(url, revision): |
+ info = getSVNInfo(url, revision) |
+ |
+ if (info.has_key("Node Kind")): |
+ if (info["Node Kind"] == "directory"): return True |
+ |
+ return False |
+ |
def getRevisionLog(url, revision): |
- """Takes an svn url and gets the associated revision. """ |
+ """ |
+ Takes an svn url and gets the associated revision. |
+ """ |
command = 'svn log ' + url + " -r"+str(revision) |
- svn_info = subprocess.Popen(command, |
- shell=True, |
- stdout=subprocess.PIPE, |
+ svn_info = subprocess.Popen(command, |
+ shell=True, |
+ stdout=subprocess.PIPE, |
stderr=subprocess.PIPE).stdout.readlines() |
rtn= "" |
pos = 0 |
@@ -83,136 +101,225 @@ |
return rtn |
+def checkoutRevision(url, revision, branch_url, revert=False): |
+ files_info = getFileInfo(url, revision) |
+ paths = getBestMergePaths2(files_info, revision) |
+ export_map = getBestExportPathsMap2(files_info, revision) |
+ |
+ command = 'svn checkout -N ' + branch_url |
+ print command |
+ os.system(command) |
-def checkoutRevision(url, revision, branch_url): |
+ match = re.search(r"svn://.*/(.*)", branch_url) |
+ |
+ if match: |
+ os.chdir(match.group(1)) |
+ |
+ #This line is extremely important due to the way svn behaves in the set-depths |
+ #action. If parents aren't handled before children, the child directories get |
+ #clobbered and the merge step fails. |
+ paths.sort() |
+ |
+ #Checkout the directories that already exist |
+ for path in paths: |
+ if (export_map.has_key(path) and not revert): |
+ print "Exclude new directory " + path |
+ continue |
+ subpaths = path.split('/') |
+ subpaths.pop(0) |
+ base = '' |
+ for subpath in subpaths: |
+ base += '/' + subpath |
+ #This logic ensures that you don't empty out any directories |
+ if not os.path.exists("." + base): |
+ command = ('svn update --depth empty ' + "." + base) |
+ print command |
+ os.system(command) |
+ |
+ if (revert): |
+ files = getAllFilesInRevision(files_info) |
+ else: |
+ files = getExistingFilesInRevision(files_info) |
+ |
+ for file in files: |
+ #Prevent the tool from clobbering the src directory |
+ if (file == ""): |
+ continue |
+ command = ('svn up ".' + file + '"') |
+ print command |
+ os.system(command) |
+ |
+def mergeRevision(url, revision): |
paths = getBestMergePaths(url, revision) |
- deltree('./src') |
- if not os.path.exists('./src'): |
- command = 'svn checkout -N ' + branch_url |
+ export_map = getBestExportPathsMap(url, revision) |
+ |
+ for path in paths: |
+ if export_map.has_key(path): |
+ continue |
+ command = ('svn merge -N -r ' + str(revision-1) + ":" + str(revision) + " ") |
+ command = command + url + path + "@" + str(revision) + " ." + path |
+ |
print command |
os.system(command) |
- # This line is extremely important due to the way svn behaves in the |
- # set-depths action. If parents aren't handled before children, the child |
- # directories get clobbered and the merge step fails. |
+def exportRevision(url, revision): |
+ paths = getBestExportPathsMap(url, revision).keys() |
+ |
paths.sort() |
+ |
for path in paths: |
- subpaths = path.split('/') |
- subpaths.pop(0) |
- base = './src' |
- for subpath in subpaths: |
- base += '/' + subpath |
- if not os.path.exists(base): |
- command = ('svn update --depth empty ' + base) |
- print command |
- os.system(command) |
- else: |
- print "Found " + base |
+ command = ('svn export -N ' + url + path + "@" + str(revision) + " ." |
+ + path) |
+ print command |
+ os.system(command) |
+ |
+ command = ('svn add .' + path) |
+ print command |
+ os.system(command) |
- for file in getFilesInRevision(url, revision): |
- # Prevent the tool from clobbering the src directory. |
- if (file == ""): |
- continue |
- command = ('svn up ./src' + file) |
+def deleteRevision(url, revision): |
+ paths = getBestDeletePathsMap(url, revision).keys() |
+ paths.sort() |
+ paths.reverse() |
+ |
+ for path in paths: |
+ command = ("svn delete ." + path) |
print command |
os.system(command) |
-def mergeRevision(url, revision, ignoreAncestry=False): |
- paths = getBestMergePaths(url, revision) |
+def revertExportRevision(url, revision): |
+ paths = getBestExportPathsMap(url, revision).keys() |
+ paths.sort() |
+ paths.reverse() |
+ |
for path in paths: |
- command = ('svn merge -N -r ' + str(revision-1) + ":" + str(revision) + " ") |
- if (ignoreAncestry): |
- command = command + " --ignore-ancestry " |
- command = command + url + path + " ./src" + path |
+ command = ("svn delete ." + path) |
print command |
os.system(command) |
- |
def revertRevision(url, revision): |
paths = getBestMergePaths(url, revision) |
for path in paths: |
- command = ('svn merge -N -r ' + str(revision) + ":" + str(revision-1) + |
- " " + url + path + " ./src" + path) |
+ command = ('svn merge -N -r ' + str(revision) + ":" + str(revision-1) + |
+ " " + url + path + " ." + path) |
print command |
os.system(command) |
+ |
+def getFileInfo(url, revision): |
+ global files_info_, file_pattern_ |
+ |
+ if (files_info_ != None): |
+ return files_info_ |
- |
-def getBestMergePaths(url, revision): |
- """Takes an svn url and gets the associated revision.""" |
- command = 'svn log ' + url + " -r "+str(revision) + " -v" |
- svn_info = subprocess.Popen(command, |
- shell=True, |
- stdout=subprocess.PIPE, |
+ command = 'svn log ' + url + " -r " + str(revision) + " -v" |
+ svn_log = subprocess.Popen(command, |
+ shell=True, |
+ stdout=subprocess.PIPE, |
stderr=subprocess.PIPE).stdout.readlines() |
- map = {} |
- for line in svn_info: |
- match = re.search(r"[\n\r ]+[MADUC][\n\r ]+/(?:trunk|branches/\d+)/src([^ ]*)/[^ ]+", line) |
+ |
+ rtn = [] |
+ for line in svn_log: |
+ #A workaround to dump the (from .*) stuff, regex not so friendly in the 2nd |
+ #pass... |
+ match = re.search(r"(.*) \(from.*\)", line) |
if match: |
- map[match.group(1)] = match.group(1) |
+ line = match.group(1) |
+ match = re.search(file_pattern_, line) |
+ if match: |
+ rtn.append([match.group(1).strip(), match.group(2).strip(), |
+ match.group(3).strip(),match.group(4).strip()]) |
- return map.keys() |
+ files_info_ = rtn |
+ return rtn |
+def getBestMergePaths(url, revision): |
+ """ |
+ Takes an svn url and gets the associated revision. |
+ """ |
+ return getBestMergePaths2(getFileInfo(url, revision), revision) |
-def getFilesInRevision(url, revision): |
- """Takes an svn url and gets the associated revision.""" |
- command = 'svn log ' + url + " -r "+str(revision) + " -v" |
- svn_info = subprocess.Popen(command, |
- shell=True, |
- stdout=subprocess.PIPE, |
- stderr=subprocess.PIPE).stdout.readlines() |
- map = {} |
- for line in svn_info: |
- match = re.search(r"[\n\r ]+[MADUC][\n\r ]+/(?:trunk|branches/\d+)/src([^ ]*)/([^ ]+)", line) |
- if match: |
- map[match.group(1) + "/" + match.group(2)] = match.group(1) + "/" + match.group(2) |
+def getBestMergePaths2(files_info, revision): |
+ """ |
+ Takes an svn url and gets the associated revision. |
+ """ |
+ map = dict() |
+ for file_info in files_info: |
+ map[file_info[2]] = file_info[2] |
+ |
return map.keys() |
+def getBestExportPathsMap(url, revision): |
+ return getBestExportPathsMap2(getFileInfo(url, revision), revision) |
+ |
+def getBestExportPathsMap2(files_info, revision): |
+ """ |
+ Takes an svn url and gets the associated revision. |
+ """ |
+ global export_map_ |
+ |
+ if export_map_: |
+ return export_map_ |
-def getBestMergePath(url, revision): |
- """Takes an svn url and gets the associated revision.""" |
- command = 'svn log ' + url + " -r "+str(revision) + " -v" |
- svn_info = subprocess.Popen(command, |
- shell=True, |
- stdout=subprocess.PIPE, |
- stderr=subprocess.PIPE).stdout.readlines() |
- best_path = None |
- for line in svn_info: |
- match = re.search(r"[\n\r ]+[MADUC][\n\r ]+/.*/src(.*)/.+", line) |
- if match: |
- if (best_path == None): |
- best_path = match.group(1) |
- else: |
- best_path = leastPath(match.group(1),best_path) |
+ map = dict() |
+ for file_info in files_info: |
+ if (file_info[0] == "A"): |
+ if(isSVNDirectory("svn://chrome-svn/chrome/" + file_info[1], revision)): |
+ map[file_info[2] + "/" + file_info[3]] = "" |
- return best_path |
+ export_map_ = map |
+ |
+ return map |
+def getBestDeletePathsMap(url, revision): |
+ return getBestDeletePathsMap2(getFileInfo(url, revision), revision) |
-def leastPath(a, b): |
- if (not a) or (a == ""): |
- return "" |
- if (b == ""): |
- return "" |
- if (not b): |
- return a |
+def getBestDeletePathsMap2(files_info, revision): |
+ """ |
+ Takes an svn url and gets the associated revision. |
+ """ |
+ global delete_map_ |
+ |
+ if delete_map_: |
+ return delete_map_ |
- a_list = a.lstrip("/").split("/") |
- b_list = b.lstrip("/").split("/") |
- last_match = "" |
- while((len(a_list) != 0) and (len(b_list) != 0)): |
- a_value = a_list.pop(0) |
- b_value = b_list.pop(0) |
- if (a_value == b_value): |
- last_match = last_match + "/" + a_value |
- else: |
- break |
+ map = dict() |
+ for file_info in files_info: |
+ if (file_info[0] == "D"): |
+ if(isSVNDirectory("svn://chrome-svn/chrome/" + file_info[1], revision)): |
+ map[file_info[2] + "/" + file_info[3]] = "" |
- return last_match |
+ delete_map_ = map |
+ |
+ return map |
+ |
+def getExistingFilesInRevision(files_info): |
+ """ |
+ Checks for existing files in the revision, anything that's A will require |
+ special treatment (either a merge or an export + add) |
+ """ |
+ map = [] |
+ for file_info in files_info: |
+ if file_info[0] != "A": |
+ map.append(file_info[2] + "/" + file_info[3]) |
+ return map |
+def getAllFilesInRevision(files_info): |
+ """ |
+ Checks for existing files in the revision, anything that's A will require |
+ special treatment (either a merge or an export + add) |
+ """ |
+ map = [] |
+ for file_info in files_info: |
+ map.append(file_info[2] + "/" + file_info[3]) |
+ |
+ return map |
+ |
def prompt(question): |
p = None |
+ |
while not p: |
print question + " [y|n]:" |
p = sys.stdin.readline() |
@@ -221,13 +328,30 @@ |
elif p.lower().startswith('y'): |
return True |
else: |
- p = None |
+ p = None |
- |
+def text_prompt(question, default): |
+ print question + " [" + default + "]:" |
+ p = sys.stdin.readline() |
+ if p.strip() == "": |
+ return default |
+ return p |
+ |
def main(argv=None): |
BASE_URL = "svn://chrome-svn/chrome" |
TRUNK_URL = BASE_URL + "/trunk/src" |
- BRANCH_URL = None |
+ BRANCH_URL = BASE_URL + "/branches/$branch/src" |
+ DEFAULT_WORKING = "working" |
+ SKIP_CHECK_WORKING = True |
+ PROMPT_FOR_AUTHOR = False |
+ |
+ global file_pattern_ |
+ if os.path.exists("drover.properties"): |
+ file = open("drover.properties") |
+ exec(file) |
+ file.close() |
+ if FILE_PATTERN: |
+ file_pattern_ = FILE_PATTERN |
if (len(sys.argv) == 1): |
print "WARNING: Please use this tool in an empty directory (or at least one" |
@@ -251,45 +375,57 @@ |
revision = int(sys.argv[1]) |
if ((len(sys.argv) >= 4) and (sys.argv[2] in ['--revert','-r'])): |
- BRANCH_URL = BASE_URL + "/branches/" + sys.argv[3] + "/src" |
- url = BRANCH_URL |
+ url = BRANCH_URL.replace("$branch", sys.argv[3]) |
else: |
url = TRUNK_URL |
action = "Merge" |
+ |
+ working = DEFAULT_WORKING |
+ |
command = 'svn log ' + url + " -r "+str(revision) + " -v" |
os.system(command) |
+ |
if not prompt("Is this the correct revision?"): |
sys.exit(0) |
- |
+ |
+ if (os.path.exists(working)): |
+ if not (SKIP_CHECK_WORKING or prompt("Working directory: '" + working + "' already exists, clobber?")): |
+ sys.exit(0) |
+ deltree(working) |
+ |
+ os.makedirs(working) |
+ os.chdir(working) |
+ |
if (len(sys.argv) > 1): |
if sys.argv[2] in ['--merge','-m']: |
if (len(sys.argv) != 4): |
print "Please specify the branch # you want (i.e. 182) after --merge" |
sys.exit(0) |
- |
- branch_url = "svn://chrome-svn/chrome/branches/" + sys.argv[3] + "/src" |
+ |
+ branch_url = BRANCH_URL.replace("$branch", sys.argv[3]) |
+ #Checkout everything but stuff that got added into a new dir |
checkoutRevision(url, revision, branch_url) |
+ #Merge everything that changed |
mergeRevision(url, revision) |
- elif sys.argv[2] in ['--mplus','-p']: |
- if (len(sys.argv) != 4): |
- print "Please specify the branch # you want (i.e. 182) after --merge" |
- sys.exit(0) |
- branch_url = "svn://chrome-svn/chrome/branches/" + sys.argv[3] + "/src" |
- checkoutRevision(url, revision, branch_url) |
- mergeRevision(url, revision, True) |
+ #"Export" files that were added from the source and add them to branch |
+ exportRevision(url, revision) |
+ #Delete directories that were deleted (file deletes are handled in the |
+ #merge). |
+ deleteRevision(url, revision) |
elif sys.argv[2] in ['--revert','-r']: |
if (len(sys.argv) == 4): |
- url = "svn://chrome-svn/chrome/branches/" + sys.argv[3] + "/src" |
- checkoutRevision(url, revision, url) |
+ url = BRANCH_URL.replace("$branch", sys.argv[3]) |
+ checkoutRevision(url, revision, url, True) |
revertRevision(url, revision) |
+ revertExportRevision(url, revision) |
action = "Revert" |
else: |
print "Unknown parameter " + sys.argv[2] |
sys.exit(0) |
- |
- os.chdir('./src') |
- # Check the base url so we actually find the author who made the change. |
- author = getAuthor(BASE_URL, revision) |
+ |
+ #Check the base url so we actually find the author who made the change |
+ author = getAuthor(TRUNK_URL, revision) |
+ |
filename = str(revision)+".txt" |
out = open(filename,"w") |
out.write(action +" " + str(revision) + " - ") |
@@ -297,15 +433,19 @@ |
if (author): |
out.write("TBR=" + author) |
out.close() |
+ |
os.system('gcl change ' + str(revision) + " " + filename) |
os.unlink(filename) |
print author |
print revision |
- print "gcl upload " + str(revision) + " --send_mail --no_try --no_presubmit --reviewers=" + author |
+ print ("gcl upload " + str(revision) + |
+ " --send_mail --no_try --no_presubmit --reviewers=" + author) |
print "gcl commit " + str(revision) + " --no_presubmit --force" |
print "gcl delete " + str(revision) |
if prompt("Would you like to upload?"): |
+ if PROMPT_FOR_AUTHOR: |
+ author = text_prompt("Enter a new author or press enter to accept default", author) |
gclUpload(revision, author) |
else: |
print "Deleting the changelist." |
@@ -316,7 +456,6 @@ |
os.system("gcl commit " + str(revision) + " --no_presubmit --force") |
else: |
sys.exit(0) |
- |
- |
+ |
if __name__ == "__main__": |
- sys.exit(main()) |
+ sys.exit(main()) |