| Index: prebuilt.py
|
| diff --git a/prebuilt.py b/prebuilt.py
|
| index ee9ecf1887d3e416f6744b5c995d6eace7f39d56..c422cbe6f617009e43ca0f64d2ecc17d9a4f9653 100755
|
| --- a/prebuilt.py
|
| +++ b/prebuilt.py
|
| @@ -13,6 +13,7 @@ import tempfile
|
| import time
|
|
|
| from chromite.lib import cros_build_lib
|
| +from chromite.lib.binpkg import GrabRemotePackageIndex, PackageIndex
|
| """
|
| This script is used to upload host prebuilts as well as board BINHOSTS.
|
|
|
| @@ -117,7 +118,7 @@ def UpdateLocalFile(filename, value, key='PORTAGE_BINHOST'):
|
| file_fh.close()
|
| # write out new file
|
| new_file_fh = open(filename, 'w')
|
| - new_file_fh.write('\n'.join(file_lines))
|
| + new_file_fh.write('\n'.join(file_lines) + '\n')
|
| new_file_fh.close()
|
|
|
|
|
| @@ -143,7 +144,8 @@ def RevGitPushWithRetry(retries=5):
|
| raise GitPushFailed('Failed to push change after %s retries' % retries)
|
|
|
|
|
| -def RevGitFile(filename, value, retries=5):
|
| +def RevGitFile(filename, value, retries=5, key='PORTAGE_BINHOST',
|
| + skip_push=False):
|
| """Update and push the git file.
|
|
|
| Args:
|
| @@ -151,6 +153,8 @@ def RevGitFile(filename, value, retries=5):
|
| value: string representing the version of the prebuilt that has been
|
| uploaded.
|
| retries: The number of times to retry before giving up, default: 5
|
| + key: The variable key to update in the git file.
|
| + (Default: PORTAGE_BINHOST)
|
| """
|
| prebuilt_branch = 'prebuilt_branch'
|
| old_cwd = os.getcwd()
|
| @@ -162,15 +166,18 @@ def RevGitFile(filename, value, retries=5):
|
| 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof '
|
| 'http://git.chromium.org/git')
|
| cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True)
|
| - description = 'Update PORTAGE_BINHOST="%s" in %s' % (value, filename)
|
| + description = 'Update %s="%s" in %s' % (key, value, filename)
|
| print description
|
| try:
|
| - UpdateLocalFile(filename, value)
|
| + UpdateLocalFile(filename, value, key)
|
| cros_build_lib.RunCommand('git config push.default tracking', shell=True)
|
| cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True)
|
| - RevGitPushWithRetry(retries)
|
| + if not skip_push:
|
| + RevGitPushWithRetry(retries)
|
| finally:
|
| - cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True)
|
| + if not skip_push:
|
| + cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch,
|
| + shell=True)
|
| os.chdir(old_cwd)
|
|
|
|
|
| @@ -225,64 +232,120 @@ def ShouldFilterPackage(file_path):
|
| return False
|
|
|
|
|
| -def _ShouldFilterPackageFileSection(section):
|
| - """Return whether an section in the package file should be filtered out.
|
| +def FilterPackagesIndex(pkgindex):
|
| + """Remove filtered packages from pkgindex.
|
|
|
| Args:
|
| - section: The section, as a list of strings.
|
| + pkgindex: The PackageIndex object.
|
| + """
|
|
|
| - Returns:
|
| - True if the section should be excluded.
|
| + filtered = [p for p in pkgindex.packages if not ShouldFilterPackage(p['CPV'])]
|
| + if filtered != pkgindex.packages:
|
| + pkgindex.modified = True
|
| + pkgindex.packages = filtered
|
| +
|
| +
|
| +def ReadLocalPackageIndex(package_path):
|
| + """Read a local packages file from disk into a PackageIndex() object.
|
| +
|
| + Args:
|
| + package_path: Directory containing Packages file.
|
| """
|
| + packages_file = file(os.path.join(package_path, 'Packages'))
|
| + pkgindex = PackageIndex()
|
| + pkgindex.Read(packages_file)
|
| + packages_file.close()
|
| + return pkgindex
|
|
|
| - for line in section:
|
| - if line.startswith("CPV: "):
|
| - package = line.replace("CPV: ", "").rstrip()
|
| - if ShouldFilterPackage(package):
|
| - return True
|
| - else:
|
| - return False
|
|
|
| +def RebasePackageIndex(pkgindex, binhost_base_url, path_prefix):
|
| + """Update package index to point at specified location.
|
|
|
| -def FilterPackagesFile(packages_filename):
|
| - """Read a portage Packages file and filter out private packages.
|
| + Args:
|
| + pkgindex: PackageIndex object to update.
|
| + binhost_base_url: The base URI to use for the packages file.
|
| + path_prefix: The path prefix to use prior to each package.
|
| + """
|
| + pkgindex.header['URI'] = binhost_base_url
|
| + for pkg in pkgindex.packages:
|
| + pkg['PATH'] = os.path.join(path_prefix, pkg['CPV'] + '.tbz2')
|
|
|
| - The new, filtered packages file is written to a temporary file.
|
| +
|
| +def WritePackageIndex(pkgindex):
|
| + """Write pkgindex to a temporary file.
|
|
|
| Args:
|
| - packages_filename: The filename of the Packages file.
|
| + pkgindex: The PackageIndex object.
|
|
|
| Returns:
|
| - filtered_packages: A filtered Packages file, as a NamedTemporaryFile.
|
| + A temporary file containing the packages from pkgindex.
|
| """
|
|
|
| - packages_file = open(packages_filename)
|
| - filtered_packages = tempfile.NamedTemporaryFile()
|
| - section = []
|
| - for line in packages_file:
|
| - if line == "\n":
|
| - if not _ShouldFilterPackageFileSection(section):
|
| - # Looks like this section doesn't contain a private package. Write it
|
| - # out.
|
| - filtered_packages.write("".join(section))
|
| + f = tempfile.NamedTemporaryFile()
|
| + pkgindex.Write(f)
|
| + f.flush()
|
| + f.seek(0)
|
| + return f
|
|
|
| - # Start next section.
|
| - section = []
|
|
|
| - section.append(line)
|
| - else:
|
| - if not _ShouldFilterPackageFileSection(section):
|
| - filtered_packages.write("".join(section))
|
| - packages_file.close()
|
| +def GrabPackageURLs(pkgindex):
|
| + """Grab the SHA1s and URLs from the specified package index.
|
| +
|
| + Args:
|
| + pkgindex: A package index.
|
| +
|
| + Returns:
|
| + A dict mapping SHA1 hashes to URLs.
|
| + """
|
| + db = {}
|
| + base_uri = pkgindex.header['URI']
|
| + for package in pkgindex.packages:
|
| + cpv, sha1 = package['CPV'], package['SHA1']
|
| + path = package.get('PATH', cpv + '.tbz2')
|
| + db[sha1] = os.path.join(base_uri, path)
|
| + return db
|
| +
|
|
|
| - # Flush contents to disk.
|
| - filtered_packages.flush()
|
| - filtered_packages.seek(0)
|
| +def FindDuplicates(pkgindex, dupdb):
|
| + """Find duplicates in the specified package index.
|
| +
|
| + Args:
|
| + pkgindex: The PackageIndex object.
|
| + dupdb: A dictionary mapping SHA1's to URLs, as output by GrabPackageURLs.
|
| +
|
| + Returns:
|
| + orig: Original packages from pkgindex.
|
| + dups: A list of tuples (pkg, uri) where pkg is a duplicated package, and
|
| + uri is where the package can be found.
|
| + """
|
| + orig, dups = [], []
|
| + origdb = GrabPackageURLs(pkgindex)
|
| + for pkg in pkgindex.packages:
|
| + sha1 = pkg['SHA1']
|
| + uri = dupdb.get(sha1)
|
| + if uri:
|
| + dups.append((pkg, uri))
|
| + else:
|
| + orig.append(pkg)
|
| + return orig, dups
|
|
|
| - return filtered_packages
|
|
|
| +def ResolveDuplicates(pkgindex, dups):
|
| + """Remove duplicates from the specified package index.
|
|
|
| -def _RetryRun(cmd, print_cmd=True, shell=False):
|
| + Args:
|
| + pkgindex: The PackageIndex object.
|
| + dups: A list of duplicates, as returned by FindDuplicates.
|
| + """
|
| + prefix = pkgindex.header['URI']
|
| + if dups:
|
| + pkgindex.modified = True
|
| + for pkg, uri in dups:
|
| + assert uri.startswith(prefix)
|
| + pkg['PATH'] = uri.replace(prefix, '').lstrip('/')
|
| +
|
| +
|
| +def _RetryRun(cmd, print_cmd=True, shell=False, cwd=None):
|
| """Run the specified command, retrying if necessary.
|
|
|
| Args:
|
| @@ -298,7 +361,8 @@ def _RetryRun(cmd, print_cmd=True, shell=False):
|
| # cros_build_lib.
|
| for attempt in range(_RETRIES):
|
| try:
|
| - output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell)
|
| + output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell,
|
| + cwd=cwd)
|
| return True
|
| except cros_build_lib.RunCommandError:
|
| print 'Failed to run %s' % cmd
|
| @@ -317,12 +381,6 @@ def _GsUpload(args):
|
| Return the arg tuple of two if the upload failed
|
| """
|
| (local_file, remote_file) = args
|
| - if ShouldFilterPackage(local_file):
|
| - return
|
| -
|
| - if local_file.endswith("/Packages"):
|
| - filtered_packages_file = FilterPackagesFile(local_file)
|
| - local_file = filtered_packages_file.name
|
|
|
| cmd = '%s cp -a public-read %s %s' % (_GSUTIL_BIN, local_file, remote_file)
|
| if not _RetryRun(cmd, print_cmd=False, shell=True):
|
| @@ -356,22 +414,24 @@ def RemoteUpload(files, pool=10):
|
| pass
|
|
|
|
|
| -def GenerateUploadDict(local_path, gs_path):
|
| - """Build a dictionary of local remote file key pairs for gsutil to upload.
|
| +def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
|
| + """Build a dictionary of local remote file key pairs to upload.
|
|
|
| Args:
|
| - local_path: A path to the file on the local hard drive.
|
| - gs_path: Path to upload in Google Storage.
|
| + base_local_path: The base path to the files on the local hard drive.
|
| + remote_path: The base path to the remote paths.
|
| + pkgs: The packages to upload.
|
|
|
| Returns:
|
| - Returns a dictionary of file path/gs_dest_path pairs
|
| + Returns a dictionary of local_path/remote_path pairs
|
| """
|
| - files_to_sync = cros_build_lib.ListFiles(local_path)
|
| upload_files = {}
|
| - for file_path in files_to_sync:
|
| - filename = file_path.replace(local_path, '').lstrip('/')
|
| - gs_file_path = os.path.join(gs_path, filename)
|
| - upload_files[file_path] = gs_file_path
|
| + for pkg in pkgs:
|
| + suffix = pkg['CPV'] + '.tbz2'
|
| + local_path = os.path.join(base_local_path, suffix)
|
| + assert os.path.exists(local_path)
|
| + remote_path = os.path.join(base_remote_path, suffix)
|
| + upload_files[local_path] = remote_path
|
|
|
| return upload_files
|
|
|
| @@ -406,19 +466,21 @@ def DetermineMakeConfFile(target):
|
|
|
|
|
| def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
|
| - board=None, git_sync=False, git_sync_retries=5):
|
| + board=None, git_sync=False, git_sync_retries=5,
|
| + key='PORTAGE_BINHOST', skip_push=False, old_urls={}):
|
| """Upload Host prebuilt files to Google Storage space.
|
|
|
| Args:
|
| build_path: The path to the root of the chroot.
|
| upload_location: The upload location.
|
| - board: The board to upload to Google Storage, if this is None upload
|
| + board: The board to upload to Google Storage. If this is None, upload
|
| host packages.
|
| git_sync: If set, update make.conf of target to reference the latest
|
| - prebuilt packages genereated here.
|
| + prebuilt packages generated here.
|
| git_sync_retries: How many times to retry pushing when updating git files.
|
| This helps avoid failures when multiple bots are modifying the same Repo.
|
| default: 5
|
| + key: The variable key to update in the git file. (Default: PORTAGE_BINHOST)
|
| """
|
|
|
| if not board:
|
| @@ -436,8 +498,21 @@ def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
|
| git_file = os.path.join(build_path, DetermineMakeConfFile(board))
|
| remote_location = os.path.join(upload_location, url_suffix)
|
|
|
| + # Process Packages file, removing duplicates and filtered packages.
|
| + pkgindex = ReadLocalPackageIndex(package_path)
|
| + RebasePackageIndex(pkgindex, binhost_base_url, url_suffix)
|
| + FilterPackagesIndex(pkgindex)
|
| + orig, dups = FindDuplicates(pkgindex, old_urls)
|
| + ResolveDuplicates(pkgindex, dups)
|
| +
|
| + # Write Packages file.
|
| + tmp_packages_file = WritePackageIndex(pkgindex)
|
| +
|
| if upload_location.startswith('gs://'):
|
| - upload_files = GenerateUploadDict(package_path, remote_location)
|
| + # Build list of files to upload.
|
| + upload_files = GenerateUploadDict(package_path, remote_location, orig)
|
| + remote_file = os.path.join(remote_location, 'Packages')
|
| + upload_files[tmp_packages_file.name] = remote_file
|
|
|
| print 'Uploading %s' % package_string
|
| failed_uploads = RemoteUpload(upload_files)
|
| @@ -445,16 +520,25 @@ def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
|
| error_msg = ['%s -> %s\n' % args for args in failed_uploads]
|
| raise UploadFailed('Error uploading:\n%s' % error_msg)
|
| else:
|
| + pkgs = ' '.join(p['CPV'] + '.tbz2' for p in orig)
|
| ssh_server, remote_path = remote_location.split(':', 1)
|
| - cmds = ['ssh %s mkdir -p %s' % (ssh_server, remote_path),
|
| - 'rsync -av %s/ %s/' % (package_path, remote_location)]
|
| + d = { 'pkgindex': tmp_packages_file.name,
|
| + 'pkgs': pkgs,
|
| + 'remote_path': remote_path,
|
| + 'remote_location': remote_location,
|
| + 'ssh_server': ssh_server }
|
| + cmds = ['ssh %(ssh_server)s mkdir -p %(remote_path)s' % d,
|
| + 'rsync -av %(pkgindex)s %(remote_location)s/Packages' % d]
|
| + if pkgs:
|
| + cmds.append('rsync -Rav %(pkgs)s %(remote_location)s/' % d)
|
| for cmd in cmds:
|
| - if not _RetryRun(cmd, shell=True):
|
| + if not _RetryRun(cmd, shell=True, cwd=package_path):
|
| raise UploadFailed('Could not run %s' % cmd)
|
|
|
| if git_sync:
|
| url_value = '%s/%s/' % (binhost_base_url, url_suffix)
|
| - RevGitFile(git_file, url_value, retries=git_sync_retries)
|
| + RevGitFile(git_file, url_value, retries=git_sync_retries, key=key,
|
| + skip_push=skip_push)
|
|
|
|
|
| def usage(parser, msg):
|
| @@ -469,6 +553,12 @@ def main():
|
| parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url',
|
| default=_BINHOST_BASE_URL,
|
| help='Base URL to use for binhost in make.conf updates')
|
| + parser.add_option('', '--previous-board-binhost-url', action='append',
|
| + default=[], dest='previous_board_binhost_url',
|
| + help='Previous board binhost URL')
|
| + parser.add_option('', '--previous-host-binhost-url', action='append',
|
| + default=[], dest='previous_host_binhost_url',
|
| + help='Previous host binhost URL')
|
| parser.add_option('-b', '--board', dest='board', default=None,
|
| help='Board type that was built on this machine')
|
| parser.add_option('-p', '--build-path', dest='build_path',
|
| @@ -476,6 +566,9 @@ def main():
|
| parser.add_option('-s', '--sync-host', dest='sync_host',
|
| default=False, action='store_true',
|
| help='Sync host prebuilts')
|
| + parser.add_option('-n', '--skip-push', dest='skip_push',
|
| + default=False, action='store_true',
|
| + help='Don\'t push git changes.')
|
| parser.add_option('-g', '--git-sync', dest='git_sync',
|
| default=False, action='store_true',
|
| help='Enable git version sync (This commits to a repo)')
|
| @@ -488,6 +581,9 @@ def main():
|
| parser.add_option('-f', '--filters', dest='filters', action='store_true',
|
| default=False,
|
| help='Turn on filtering of private ebuild packages')
|
| + parser.add_option('-k', '--key', dest='key',
|
| + default='PORTAGE_BINHOST',
|
| + help='Key to update in make.conf')
|
|
|
| options, args = parser.parse_args()
|
| # Setup boto environment for gsutil to use
|
| @@ -499,10 +595,6 @@ def main():
|
| usage(parser, 'Error: you need to provide an upload location using -u')
|
|
|
| if options.filters:
|
| - # TODO(davidjames): It might be nice to be able to filter private ebuilds
|
| - # from rsync uploads as well, some day. But for now it's not needed.
|
| - if not options.upload.startswith("gs://"):
|
| - usage(parser, 'Error: filtering only works with gs:// paths')
|
| LoadPrivateFilters(options.build_path)
|
|
|
| version = GetVersion()
|
| @@ -510,13 +602,26 @@ def main():
|
| version = '%s-%s' % (options.prepend_version, version)
|
|
|
| if options.sync_host:
|
| + old_urls = {}
|
| + for url in options.previous_host_binhost_url:
|
| + old_pkgindex = GrabRemotePackageIndex(url)
|
| + old_pkgindex.header.setdefault('URI', url)
|
| + old_urls.update(GrabPackageURLs(old_pkgindex))
|
| UploadPrebuilt(options.build_path, options.upload, version,
|
| - options.binhost_base_url, git_sync=options.git_sync)
|
| + options.binhost_base_url, git_sync=options.git_sync,
|
| + key=options.key, skip_push=options.skip_push,
|
| + old_urls=old_urls)
|
|
|
| if options.board:
|
| + old_urls = {}
|
| + for url in options.previous_board_binhost_url:
|
| + old_pkgindex = GrabRemotePackageIndex(url)
|
| + old_pkgindex.header.setdefault('URI', url)
|
| + old_urls.update(GrabPackageURLs(old_pkgindex))
|
| UploadPrebuilt(options.build_path, options.upload, version,
|
| options.binhost_base_url, board=options.board,
|
| - git_sync=options.git_sync)
|
| + git_sync=options.git_sync, key=options.key,
|
| + skip_push=options.skip_push, old_urls=old_urls)
|
|
|
|
|
| if __name__ == '__main__':
|
|
|