Chromium Code Reviews| Index: recipe_modules/bot_update/resources/bot_update.py |
| diff --git a/recipe_modules/bot_update/resources/bot_update.py b/recipe_modules/bot_update/resources/bot_update.py |
| index d53320413ceaed04a7389d1a1bf321d0dda5bfea..f31ad7cba9a61691a10fcda6ccc419b4fc774842 100755 |
| --- a/recipe_modules/bot_update/resources/bot_update.py |
| +++ b/recipe_modules/bot_update/resources/bot_update.py |
| @@ -89,7 +89,7 @@ CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git' |
| # Official builds use buildspecs, so this is a special case. |
| BUILDSPEC_TYPE = collections.namedtuple('buildspec', |
| ('container', 'version')) |
| -BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/' |
|
hinoka
2016/08/26 21:59:54
If this doesn't work / isn't invoked to begin with
agable
2016/08/29 19:09:58
Deleted a bunch of buildspec code now.
|
| +BUILDSPEC_RE = (r'^/chrome/tools/buildspec/\+/master/' |
| '(build|branches|releases)/(.+)$') |
| GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/' |
| 'buildspec') |
| @@ -113,49 +113,10 @@ COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}') |
| # Regular expression to parse gclient's revinfo entries. |
| REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$') |
| -# Used by 'ResolveSvnRevisionFromGitiles' |
| -GIT_SVN_PROJECT_MAP = { |
| - 'webkit': { |
| - 'svn_url': 'svn://svn.chromium.org/blink', |
| - 'branch_map': [ |
| - (r'trunk', r'refs/heads/master'), |
| - (r'branches/([^/]+)', r'refs/branch-heads/\1'), |
| - ], |
| - }, |
| - 'v8': { |
| - 'svn_url': 'https://v8.googlecode.com/svn', |
| - 'branch_map': [ |
| - (r'trunk', r'refs/heads/candidates'), |
| - (r'branches/bleeding_edge', r'refs/heads/master'), |
| - (r'branches/([^/]+)', r'refs/branch-heads/\1'), |
| - ], |
| - }, |
| - 'nacl': { |
| - 'svn_url': 'svn://svn.chromium.org/native_client', |
| - 'branch_map': [ |
| - (r'trunk/src/native_client', r'refs/heads/master'), |
| - ], |
| - }, |
| -} |
| - |
| -# Key for the 'git-svn' ID metadata commit footer entry. |
| -GIT_SVN_ID_FOOTER_KEY = 'git-svn-id' |
| -# e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117 |
| -# ce2b1a6d-e550-0410-aec6-3dcde31c8c00 |
| -GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)') |
| - |
| - |
| -# This is the git mirror of the buildspecs repository. We could rely on the svn |
| -# checkout, now that the git buildspecs are checked in alongside the svn |
| -# buildspecs, but we're going to want to pull all the buildspecs from here |
| -# eventually anyhow, and there's already some logic to pull from git (for the |
| -# old git_buildspecs.git repo), so just stick with that. |
| -GIT_BUILDSPEC_REPO = ( |
| - 'https://chrome-internal.googlesource.com/chrome/tools/buildspec') |
| # Copied from scripts/recipes/chromium.py. |
| GOT_REVISION_MAPPINGS = { |
| - '/chrome/trunk/src': { |
| + CHROMIUM_SRC_URL: { |
| 'src/': 'got_revision', |
| 'src/native_client/': 'got_nacl_revision', |
| 'src/tools/swarm_client/': 'got_swarm_client_revision', |
| @@ -183,18 +144,6 @@ step has two main advantages over them: |
| * it is a slave-side script, so its behavior can be modified without |
| restarting the master. |
| -Why Git, you ask? Because that is the direction that the Chromium project is |
| -heading. This step is an integral part of the transition from using the SVN repo |
| -at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while |
| -we fully convert everything to Git. This message will get out of your way |
| -eventually, and the waterfall will be a happier place because of it. |
| - |
| -This step can be activated or deactivated independently on every builder on |
| -every master. When it is active, the "gclient revert" and "update" steps become |
| -no-ops. When it is inactive, it prints this message, cleans up after itself, and |
| -lets everything else continue as though nothing has changed. Eventually, when |
| -everything is stable enough, this step will replace them entirely. |
| - |
| Debugging information: |
| (master/builder/slave may be unspecified on recipes) |
| master: %(master)s |
| @@ -244,16 +193,6 @@ if BUILD_INTERNAL_DIR: |
| print 'If this is an internal bot, this step may be erroneously inactive.' |
| internal_data = local_vars |
| -RECOGNIZED_PATHS = { |
| - # If SVN path matches key, the entire URL is rewritten to the Git url. |
| - '/chrome/trunk/src': |
| - CHROMIUM_SRC_URL, |
| - '/chrome/trunk/src/tools/cros.DEPS': |
| - CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git', |
| - '/chrome-internal/trunk/src-internal': |
| - 'https://chrome-internal.googlesource.com/chrome/src-internal.git', |
| -} |
| -RECOGNIZED_PATHS.update(internal_data.get('RECOGNIZED_PATHS', {})) |
| ENABLED_MASTERS = [ |
| 'bot_update.always_on', |
| @@ -337,17 +276,6 @@ DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {})) |
| DISABLED_SLAVES = {} |
| DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {})) |
| -# These masters work only in Git, meaning for got_revision, always output |
| -# a git hash rather than a SVN rev. |
| -GIT_MASTERS = [ |
| - 'client.v8', |
| - 'client.v8.branches', |
| - 'client.v8.ports', |
| - 'tryserver.v8', |
| -] |
| -GIT_MASTERS += internal_data.get('GIT_MASTERS', []) |
| - |
| - |
| # How many times to try before giving up. |
| ATTEMPTS = 5 |
| @@ -384,10 +312,6 @@ class GclientSyncFailed(SubprocessFailed): |
| pass |
| -class SVNRevisionNotFound(Exception): |
| - pass |
| - |
| - |
| class InvalidDiff(Exception): |
| pass |
| @@ -601,10 +525,10 @@ def solutions_printer(solutions): |
| -def solutions_to_git(input_solutions): |
| +def modify_solutions(input_solutions): |
| """Modifies urls in solutions to point at Git repos. |
| - returns: (git solution, svn root of first solution) tuple. |
| + returns: (git solution, buildspec) tuple. |
| """ |
| assert input_solutions |
| solutions = copy.deepcopy(input_solutions) |
| @@ -623,11 +547,8 @@ def solutions_to_git(input_solutions): |
| container=buildspec_m.group(1), |
| version=buildspec_m.group(2), |
| ) |
| - solution['deps_file'] = path.join(buildspec.container, buildspec.version, |
| - 'DEPS') |
| - elif parsed_path in RECOGNIZED_PATHS: |
| - solution['url'] = RECOGNIZED_PATHS[parsed_path] |
| - solution['deps_file'] = '.DEPS.git' |
| + solution['deps_file'] = path.join( |
| + buildspec.container, buildspec.version, 'DEPS') |
| elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc: |
| pass |
| else: |
| @@ -643,10 +564,6 @@ def solutions_to_git(input_solutions): |
| new_custom_deps[deps_name] = deps_value |
| solution['custom_deps'] = new_custom_deps |
| - if first_solution: |
| - root = parsed_path |
| - first_solution = False |
| - |
| solution['managed'] = False |
| # We don't want gclient to be using a safesync URL. Instead it should |
| # using the lkgr/lkcr branch/tags. |
| @@ -654,7 +571,9 @@ def solutions_to_git(input_solutions): |
| print 'Removing safesync url %s from %s' % (solution['safesync_url'], |
| parsed_path) |
| del solution['safesync_url'] |
| - return solutions, root, buildspec |
| + first_solution = False |
| + |
| + return solutions, buildspec |
| def remove(target): |
| @@ -665,28 +584,16 @@ def remove(target): |
| os.rename(target, path.join(dead_folder, uuid.uuid4().hex)) |
| -def ensure_no_checkout(dir_names, scm_dirname): |
| - """Ensure that there is no undesired checkout under build/. |
| - |
| - If there is an incorrect checkout under build/, then |
| - move build/ to build.dead/ |
| - This function will check each directory in dir_names. |
| - |
| - scm_dirname is expected to be either ['.svn', '.git'] |
| - """ |
| - assert scm_dirname in ['.svn', '.git', '*'] |
| - has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname)) |
| +def ensure_no_checkout(dir_names): |
| + """Ensure that there is no undesired checkout under build/.""" |
| + has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, '.git')) |
| for dir_name in dir_names) |
| - if has_checkout or scm_dirname == '*': |
| + if has_checkout: |
| build_dir = os.getcwd() |
| - prefix = '' |
| - if scm_dirname != '*': |
| - prefix = '%s detected in checkout, ' % scm_dirname |
| - |
| for filename in os.listdir(build_dir): |
| deletion_target = path.join(build_dir, filename) |
| - print '%sdeleting %s...' % (prefix, deletion_target), |
| + print '.git detected in checkout, deleting %s...' % deletion_target, |
| remove(deletion_target) |
| print 'done' |
| @@ -780,32 +687,6 @@ def get_commit_message_footer(message, key): |
| return get_commit_message_footer_map(message).get(key) |
| -def get_svn_rev(git_hash, dir_name): |
| - log = git('log', '-1', git_hash, cwd=dir_name) |
| - git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY) |
| - if not git_svn_id: |
| - return None |
| - m = GIT_SVN_ID_RE.match(git_svn_id) |
| - if not m: |
| - return None |
| - return int(m.group(2)) |
| - |
| - |
| -def get_git_hash(revision, branch, sln_dir): |
| - """We want to search for the SVN revision on the git-svn branch. |
| - |
| - Note that git will search backwards from origin/master. |
| - """ |
| - match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision) |
| - ref = branch if branch.startswith('refs/') else 'origin/%s' % branch |
| - cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref] |
| - result = git(*cmd, cwd=sln_dir).strip() |
| - if result: |
| - return result |
| - raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' % |
| - (revision, sln_dir)) |
| - |
| - |
| def emit_log_lines(name, lines): |
| for line in lines.splitlines(): |
| print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line) |
| @@ -860,17 +741,12 @@ def force_revision(folder_name, revision): |
| branch, revision = split_revision |
| if revision and revision.upper() != 'HEAD': |
| - if revision and revision.isdigit() and len(revision) < 40: |
| - # rev_num is really a svn revision number, convert it into a git hash. |
| - git_ref = get_git_hash(int(revision), branch, folder_name) |
| - else: |
| - # rev_num is actually a git hash or ref, we can just use it. |
| - git_ref = revision |
| - git('checkout', '--force', git_ref, cwd=folder_name) |
| + git('checkout', '--force', revision, cwd=folder_name) |
| else: |
| ref = branch if branch.startswith('refs/') else 'origin/%s' % branch |
| git('checkout', '--force', ref, cwd=folder_name) |
| + |
| def git_checkout(solutions, revisions, shallow, refs, git_cache_dir): |
| build_dir = os.getcwd() |
| # Before we do anything, break all git_cache locks. |
| @@ -931,16 +807,6 @@ def git_checkout(solutions, revisions, shallow, refs, git_cache_dir): |
| else: |
| raise |
| remove(sln_dir) |
| - except SVNRevisionNotFound: |
| - tries_left -= 1 |
| - if tries_left > 0: |
| - # If we don't have the correct revision, wait and try again. |
| - print 'We can\'t find revision %s.' % revision |
| - print 'The svn to git replicator is probably falling behind.' |
| - print 'waiting 5 seconds and trying again...' |
| - time.sleep(5) |
| - else: |
| - raise |
| git('clean', '-dff', cwd=sln_dir) |
| @@ -961,80 +827,6 @@ def _download(url): |
| raise |
| -def parse_diff(diff): |
| - """Takes a unified diff and returns a list of diffed files and their diffs. |
| - |
| - The return format is a list of pairs of: |
| - (<filename>, <diff contents>) |
| - <diff contents> is inclusive of the diff line. |
| - """ |
| - result = [] |
| - current_diff = '' |
| - current_header = None |
| - for line in diff.splitlines(): |
| - # "diff" is for git style patches, and "Index: " is for SVN style patches. |
| - if line.startswith('diff') or line.startswith('Index: '): |
| - if current_header: |
| - # If we are in a diff portion, then save the diff. |
| - result.append((current_header, '%s\n' % current_diff)) |
| - git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line) |
| - svn_header_match = re.match(r'Index: (.*)', line) |
| - |
| - if git_header_match: |
| - # First, see if its a git style header. |
| - from_file = git_header_match.group(1) |
| - to_file = git_header_match.group(2) |
| - if from_file != to_file and from_file.startswith('a/'): |
| - # Sometimes git prepends 'a/' and 'b/' in front of file paths. |
| - from_file = from_file[2:] |
| - current_header = from_file |
| - |
| - elif svn_header_match: |
| - # Otherwise, check if its an SVN style header. |
| - current_header = svn_header_match.group(1) |
| - |
| - else: |
| - # Otherwise... I'm not really sure what to do with this. |
| - raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' % |
| - (line, diff)) |
| - |
| - current_diff = '' |
| - current_diff += '%s\n' % line |
| - if current_header: |
| - # We hit EOF, gotta save the last diff. |
| - result.append((current_header, current_diff)) |
| - return result |
| - |
| - |
| -def get_svn_patch(patch_url): |
| - """Fetch patch from patch_url, return list of (filename, diff)""" |
| - svn_exe = 'svn.bat' if sys.platform.startswith('win') else 'svn' |
| - patch_data = call(svn_exe, 'cat', patch_url) |
| - return parse_diff(patch_data) |
| - |
| - |
| -def apply_svn_patch(patch_root, patches, whitelist=None, blacklist=None): |
| - """Expects a list of (filename, diff), applies it on top of patch_root.""" |
| - if whitelist: |
| - patches = [(name, diff) for name, diff in patches if name in whitelist] |
| - elif blacklist: |
| - patches = [(name, diff) for name, diff in patches if name not in blacklist] |
| - diffs = [diff for _, diff in patches] |
| - patch = ''.join(diffs) |
| - |
| - if patch: |
| - print '===Patching files===' |
| - for filename, _ in patches: |
| - print 'Patching %s' % filename |
| - try: |
| - call(PATCH_TOOL, '-p0', '--remove-empty-files', '--force', '--forward', |
| - stdin_data=patch, cwd=patch_root, tries=1) |
| - for filename, _ in patches: |
| - full_filename = path.abspath(path.join(patch_root, filename)) |
| - git('add', full_filename, cwd=path.dirname(full_filename)) |
| - except SubprocessFailed as e: |
| - raise PatchFailed(e.message, e.code, e.output) |
| - |
| def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision, |
| email_file, key_file, whitelist=None, blacklist=None): |
| apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win') |
| @@ -1130,51 +922,11 @@ def emit_flag(flag_file): |
| f.write('Success!') |
| -def get_commit_position_for_git_svn(url, revision): |
| - """Generates a commit position string for a 'git-svn' URL/revision. |
| - |
| - If the 'git-svn' URL maps to a known project, we will construct a commit |
| - position branch value by applying substitution on the SVN URL. |
| - """ |
| - # Identify the base URL so we can strip off trunk/branch name |
| - project_config = branch = None |
| - for _, project_config in GIT_SVN_PROJECT_MAP.iteritems(): |
| - if url.startswith(project_config['svn_url']): |
| - branch = url[len(project_config['svn_url']):] |
| - break |
| - |
| - if branch: |
| - # Strip any leading slashes |
| - branch = branch.lstrip('/') |
| - |
| - # Try and map the branch |
| - for pattern, repl in project_config.get('branch_map', ()): |
| - nbranch, subn = re.subn(pattern, repl, branch, count=1) |
| - if subn: |
| - print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % ( |
| - branch, nbranch) |
| - branch = nbranch |
| - break |
| - else: |
| - # Use generic 'svn' branch |
| - print 'INFO: Could not resolve project for SVN URL %r' % (url,) |
| - branch = 'svn' |
| - return '%s@{#%s}' % (branch, revision) |
| - |
| - |
| def get_commit_position(git_path, revision='HEAD'): |
| """Dumps the 'git' log for a specific revision and parses out the commit |
| position. |
| If a commit position metadata key is found, its value will be returned. |
| - |
| - Otherwise, we will search for a 'git-svn' metadata entry. If one is found, |
| - we will compose a commit position from it, using its SVN revision value as |
| - the revision. |
| - |
| - If the 'git-svn' URL maps to a known project, we will construct a commit |
| - position branch value by truncating the URL, mapping 'trunk' to |
| - "refs/heads/master". Otherwise, we will return the generic branch, 'svn'. |
| """ |
| git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path) |
| footer_map = get_commit_message_footer_map(git_log) |
| @@ -1184,23 +936,11 @@ def get_commit_position(git_path, revision='HEAD'): |
| footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY)) |
| if value: |
| return value |
| - |
| - # Compose a commit position from 'git-svn' metadata |
| - value = footer_map.get(GIT_SVN_ID_FOOTER_KEY) |
| - if value: |
| - m = GIT_SVN_ID_RE.match(value) |
| - if not m: |
| - raise ValueError("Invalid 'git-svn' value: [%s]" % (value,)) |
| - return get_commit_position_for_git_svn(m.group(1), m.group(2)) |
| return None |
| -def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs): |
| - """Translate git gclient revision mapping to build properties. |
| - |
| - If use_svn_revs is True, then translate git hashes in the revision mapping |
| - to svn revision numbers. |
| - """ |
| +def parse_got_revision(gclient_output, got_revision_mapping): |
| + """Translate git gclient revision mapping to build properties.""" |
| properties = {} |
| solutions_output = { |
| # Make sure path always ends with a single slash. |
| @@ -1219,13 +959,7 @@ def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs): |
| else: |
| # Since we are using .DEPS.git, everything had better be git. |
| assert solution_output.get('scm') == 'git' |
| - git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() |
| - if use_svn_revs: |
| - revision = get_svn_rev(git_revision, dir_name) |
| - if not revision: |
| - revision = git_revision |
| - else: |
| - revision = git_revision |
| + revision = git('rev-parse', 'HEAD', cwd=dir_name).strip() |
| commit_position = get_commit_position(dir_name) |
| properties[property_name] = revision |
| @@ -1257,13 +991,12 @@ def ensure_deps_revisions(deps_url_mapping, solutions, revisions): |
| revisions) |
| if not revision: |
| continue |
| - # TODO(hinoka): Catch SVNRevisionNotFound error maybe? |
| git('fetch', 'origin', cwd=deps_name) |
| force_revision(deps_name, revision) |
| def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, |
| - patch_root, issue, patchset, patch_url, rietveld_server, |
| + patch_root, issue, patchset, rietveld_server, |
| gerrit_repo, gerrit_ref, gerrit_rebase_patch_ref, |
| revision_mapping, apply_issue_email_file, |
| apply_issue_key_file, buildspec, gyp_env, shallow, runhooks, |
| @@ -1276,8 +1009,6 @@ def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, |
| git_ref = git_checkout(solutions, revisions, shallow, refs, git_cache_dir) |
| patches = None |
| - if patch_url: |
| - patches = get_svn_patch(patch_url) |
| print '===Processing patch solutions===' |
| already_patched = [] |
| @@ -1290,10 +1021,7 @@ def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, |
| relative_root = solution['name'][len(patch_root) + 1:] |
| target = '/'.join([relative_root, 'DEPS']).lstrip('/') |
| print ' relative root is %r, target is %r' % (relative_root, target) |
| - if patches: |
| - apply_svn_patch(patch_root, patches, whitelist=[target]) |
| - already_patched.append(target) |
| - elif issue: |
| + if issue: |
| apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, |
| revision_mapping, git_ref, apply_issue_email_file, |
| apply_issue_key_file, whitelist=[target]) |
| @@ -1328,9 +1056,7 @@ def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, |
| ensure_deps_revisions(gclient_output.get('solutions', {}), |
| dir_names, revisions) |
| # Apply the rest of the patch here (sans DEPS) |
| - if patches: |
| - apply_svn_patch(patch_root, patches, blacklist=already_patched) |
| - elif issue: |
| + if issue: |
| apply_rietveld_issue(issue, patchset, patch_root, rietveld_server, |
| revision_mapping, git_ref, apply_issue_email_file, |
| apply_issue_key_file, blacklist=already_patched) |
| @@ -1366,15 +1092,9 @@ def parse_revisions(revisions, root): |
| # This is an alt_root@revision argument. |
| current_root, current_rev = split_revision |
| - # We want to normalize svn/git urls into .git urls. |
| parsed_root = urlparse.urlparse(current_root) |
| - if parsed_root.scheme == 'svn': |
| - if parsed_root.path in RECOGNIZED_PATHS: |
| - normalized_root = RECOGNIZED_PATHS[parsed_root.path] |
| - else: |
| - print 'WARNING: SVN path %s not recognized, ignoring' % current_root |
| - continue |
| - elif parsed_root.scheme in ['http', 'https']: |
| + if parsed_root.scheme in ['http', 'https']: |
| + # We want to normalize git urls into .git urls. |
| normalized_root = 'https://%s/%s' % (parsed_root.netloc, |
| parsed_root.path) |
| if not normalized_root.endswith('.git'): |
| @@ -1404,7 +1124,7 @@ def parse_args(): |
| parse.add_option('--apply_issue_key_file', |
| help='--private-key-file option passthrough for ' |
| 'apply_patch.py.') |
| - parse.add_option('--patch_url', help='Optional URL to SVN patch.') |
| + parse.add_option('--patch_url', help='DEPRECATED') |
| parse.add_option('--root', dest='patch_root', |
| help='DEPRECATED: Use --patch_root.') |
| parse.add_option('--patch_root', help='Directory to patch on top of.') |
| @@ -1431,13 +1151,10 @@ def parse_args(): |
| help=('Same as revision_mapping, except its a path to a json' |
| ' file containing that format.')) |
| parse.add_option('--revision', action='append', default=[], |
| - help='Revision to check out. Can be an SVN revision number, ' |
| - 'git hash, or any form of git ref. Can prepend ' |
| - 'root@<rev> to specify which repository, where root ' |
| - 'is either a filesystem path, git https url, or ' |
| - 'svn url. To specify Tip of Tree, set rev to HEAD.' |
| - 'To specify a git branch and an SVN rev, <rev> can be ' |
| - 'set to <branch>:<revision>.') |
| + help='Revision to check out. Can be any form of git ref. ' |
| + 'Can prepend root@<rev> to specify which repository, ' |
| + 'where root is either a filesystem path or git https ' |
| + 'url. To specify Tip of Tree, set rev to HEAD. ') |
| parse.add_option('--output_manifest', action='store_true', |
| help=('Add manifest json to the json output.')) |
| parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0], |
| @@ -1511,20 +1228,12 @@ def prepare(options, git_slns, active): |
| dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
| # If we're active now, but the flag file doesn't exist (we weren't active |
| # last run) or vice versa, blow away all checkouts. |
| - if bool(active) != bool(check_flag(options.flag_file)): |
| - ensure_no_checkout(dir_names, '*') |
| + if options.clobber or (bool(active) != bool(check_flag(options.flag_file))): |
| + ensure_no_checkout(dir_names) |
| if options.output_json: |
| # Make sure we tell recipes that we didn't run if the script exits here. |
| emit_json(options.output_json, did_run=active) |
| - if active: |
| - if options.clobber: |
| - ensure_no_checkout(dir_names, '*') |
| - else: |
| - ensure_no_checkout(dir_names, '.svn') |
| - emit_flag(options.flag_file) |
| - else: |
| - delete_flag(options.flag_file) |
| - raise Inactive # This is caught in main() and we exit cleanly. |
| + emit_flag(options.flag_file) |
| # Do a shallow checkout if the disk is less than 100GB. |
| total_disk_space, free_disk_space = get_total_disk_space() |
| @@ -1551,8 +1260,7 @@ def prepare(options, git_slns, active): |
| return revisions, step_text |
| -def checkout(options, git_slns, specs, buildspec, master, |
| - svn_root, revisions, step_text): |
| +def checkout(options, git_slns, specs, buildspec, master, revisions, step_text): |
| first_sln = git_slns[0]['name'] |
| dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] |
| try: |
| @@ -1573,7 +1281,6 @@ def checkout(options, git_slns, specs, buildspec, master, |
| patch_root=options.patch_root, |
| issue=options.issue, |
| patchset=options.patchset, |
| - patch_url=options.patch_url, |
| rietveld_server=options.rietveld_server, |
| gerrit_repo=options.gerrit_repo, |
| gerrit_ref=options.gerrit_ref, |
| @@ -1595,7 +1302,7 @@ def checkout(options, git_slns, specs, buildspec, master, |
| gclient_output = ensure_checkout(**checkout_parameters) |
| except GclientSyncFailed: |
| print 'We failed gclient sync, lets delete the checkout and retry.' |
| - ensure_no_checkout(dir_names, '*') |
| + ensure_no_checkout(dir_names) |
| gclient_output = ensure_checkout(**checkout_parameters) |
| except PatchFailed as e: |
| if options.output_json: |
| @@ -1615,11 +1322,8 @@ def checkout(options, git_slns, specs, buildspec, master, |
| print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text |
| raise |
| - # Revision is an svn revision, unless it's a git master. |
| - use_svn_rev = master not in GIT_MASTERS |
| - |
| # Take care of got_revisions outputs. |
| - revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {})) |
| + revision_mapping = GOT_REVISION_MAPPINGS.get(git_slns[0]['url'], {}) |
| if options.revision_mapping: |
| revision_mapping.update(options.revision_mapping) |
| @@ -1629,8 +1333,7 @@ def checkout(options, git_slns, specs, buildspec, master, |
| if not revision_mapping: |
| revision_mapping[first_sln] = 'got_revision' |
| - got_revisions = parse_got_revision(gclient_output, revision_mapping, |
| - use_svn_rev) |
| + got_revisions = parse_got_revision(gclient_output, revision_mapping) |
| if not got_revisions: |
| # TODO(hinoka): We should probably bail out here, but in the interest |
| @@ -1705,8 +1408,8 @@ def main(): |
| # Parse, munipulate, and print the gclient solutions. |
| specs = {} |
| exec(options.specs, specs) |
| - svn_solutions = specs.get('solutions', []) |
| - git_slns, svn_root, buildspec = solutions_to_git(svn_solutions) |
| + orig_solutions = specs.get('solutions', []) |
| + git_slns, buildspec = modify_solutions(orig_solutions) |
| options.revision = maybe_ignore_revision(options.revision, buildspec) |
| solutions_printer(git_slns) |
| @@ -1714,8 +1417,7 @@ def main(): |
| try: |
| # Dun dun dun, the main part of bot_update. |
| revisions, step_text = prepare(options, git_slns, active) |
| - checkout(options, git_slns, specs, buildspec, master, svn_root, revisions, |
| - step_text) |
| + checkout(options, git_slns, specs, buildspec, master, revisions, step_text) |
| except Inactive: |
|
hinoka
2016/08/26 21:59:54
I don't think "Inactive" is referenced anymore aft
agable
2016/08/29 19:09:58
Right, deleted.
|
| # Not active, should count as passing. |