| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/python | |
| 2 | |
| 3 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved. | |
| 4 # Use of this source code is governed by a BSD-style license that can be | |
| 5 # found in the LICENSE file. | |
| 6 | |
| 7 """CBuildbot is wrapper around the build process used by the pre-flight queue""" | |
| 8 | |
| 9 import errno | |
| 10 import heapq | |
| 11 import re | |
| 12 import optparse | |
| 13 import os | |
| 14 import shutil | |
| 15 import sys | |
| 16 | |
| 17 import cbuildbot_comm | |
| 18 from cbuildbot_config import config | |
| 19 | |
| 20 sys.path.append(os.path.join(os.path.dirname(__file__), '../lib')) | |
| 21 from cros_build_lib import (Die, Info, ReinterpretPathForChroot, RunCommand, | |
| 22 Warning) | |
| 23 | |
| 24 _DEFAULT_RETRIES = 3 | |
| 25 _PACKAGE_FILE = '%(buildroot)s/src/scripts/cbuildbot_package.list' | |
| 26 ARCHIVE_BASE = '/var/www/archive' | |
| 27 ARCHIVE_COUNT = 10 | |
| 28 PUBLIC_OVERLAY = '%(buildroot)s/src/third_party/chromiumos-overlay' | |
| 29 PRIVATE_OVERLAY = '%(buildroot)s/src/private-overlays/chromeos-overlay' | |
| 30 CHROME_KEYWORDS_FILE = ('/build/%(board)s/etc/portage/package.keywords/chrome') | |
| 31 | |
| 32 # Currently, both the full buildbot and the preflight buildbot store their | |
| 33 # data in a variable named PORTAGE_BINHOST, but they're in different files. | |
| 34 # We're planning on joining the two files soon and renaming the full binhost | |
| 35 # to FULL_BINHOST. | |
| 36 _FULL_BINHOST = 'PORTAGE_BINHOST' | |
| 37 _PREFLIGHT_BINHOST = 'PORTAGE_BINHOST' | |
| 38 | |
| 39 # ======================== Utility functions ================================ | |
| 40 | |
| 41 def _PrintFile(path): | |
| 42 """Prints out the contents of a file to stderr.""" | |
| 43 file_handle = open(path) | |
| 44 print >> sys.stderr, file_handle.read() | |
| 45 file_handle.close() | |
| 46 sys.stderr.flush() | |
| 47 | |
| 48 | |
| 49 def MakeDir(path, parents=False): | |
| 50 """Basic wrapper around os.mkdirs. | |
| 51 | |
| 52 Keyword arguments: | |
| 53 path -- Path to create. | |
| 54 parents -- Follow mkdir -p logic. | |
| 55 | |
| 56 """ | |
| 57 try: | |
| 58 os.makedirs(path) | |
| 59 except OSError, e: | |
| 60 if e.errno == errno.EEXIST and parents: | |
| 61 pass | |
| 62 else: | |
| 63 raise | |
| 64 | |
| 65 | |
| 66 def RepoSync(buildroot, retries=_DEFAULT_RETRIES): | |
| 67 """Uses repo to checkout the source code. | |
| 68 | |
| 69 Keyword arguments: | |
| 70 retries -- Number of retries to try before failing on the sync. | |
| 71 """ | |
| 72 while retries > 0: | |
| 73 try: | |
| 74 # The --trace option ensures that repo shows the output from git. This | |
| 75 # is needed so that the buildbot can kill us if git is not making | |
| 76 # progress. | |
| 77 RunCommand(['repo', '--trace', 'sync'], cwd=buildroot) | |
| 78 RunCommand(['repo', 'forall', '-c', 'git', 'config', | |
| 79 'url.ssh://git@gitrw.chromium.org:9222.insteadof', | |
| 80 'http://git.chromium.org/git'], cwd=buildroot) | |
| 81 retries = 0 | |
| 82 except: | |
| 83 retries -= 1 | |
| 84 if retries > 0: | |
| 85 Warning('CBUILDBOT -- Repo Sync Failed, retrying') | |
| 86 else: | |
| 87 Warning('CBUILDBOT -- Retries exhausted') | |
| 88 raise | |
| 89 | |
| 90 RunCommand(['repo', 'manifest', '-r', '-o', '/dev/stderr'], cwd=buildroot) | |
| 91 | |
| 92 # =========================== Command Helpers ================================= | |
| 93 | |
| 94 def _GetAllGitRepos(buildroot, debug=False): | |
| 95 """Returns a list of tuples containing [git_repo, src_path].""" | |
| 96 manifest_tuples = [] | |
| 97 # Gets all the git repos from a full repo manifest. | |
| 98 repo_cmd = "repo manifest -o -".split() | |
| 99 output = RunCommand(repo_cmd, cwd=buildroot, redirect_stdout=True, | |
| 100 redirect_stderr=True, print_cmd=debug) | |
| 101 | |
| 102 # Extract all lines containg a project. | |
| 103 extract_cmd = ['grep', 'project name='] | |
| 104 output = RunCommand(extract_cmd, cwd=buildroot, input=output, | |
| 105 redirect_stdout=True, print_cmd=debug) | |
| 106 # Parse line using re to get tuple. | |
| 107 result_array = re.findall('.+name=\"([\w-]+)\".+path=\"(\S+)".+', output) | |
| 108 | |
| 109 # Create the array. | |
| 110 for result in result_array: | |
| 111 if len(result) != 2: | |
| 112 Warning('Found incorrect xml object %s' % result) | |
| 113 else: | |
| 114 # Remove pre-pended src directory from manifest. | |
| 115 manifest_tuples.append([result[0], result[1].replace('src/', '')]) | |
| 116 | |
| 117 return manifest_tuples | |
| 118 | |
| 119 | |
| 120 def _GetCrosWorkOnSrcPath(buildroot, board, package, debug=False): | |
| 121 """Returns ${CROS_WORKON_SRC_PATH} for given package.""" | |
| 122 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 123 equery_cmd = ('equery-%s which %s' % (board, package)).split() | |
| 124 ebuild_path = RunCommand(equery_cmd, cwd=cwd, redirect_stdout=True, | |
| 125 redirect_stderr=True, enter_chroot=True, | |
| 126 error_ok=True, print_cmd=debug) | |
| 127 if ebuild_path: | |
| 128 ebuild_cmd = ('ebuild-%s %s info' % (board, ebuild_path)).split() | |
| 129 cros_workon_output = RunCommand(ebuild_cmd, cwd=cwd, | |
| 130 redirect_stdout=True, redirect_stderr=True, | |
| 131 enter_chroot=True, print_cmd=debug) | |
| 132 | |
| 133 temp = re.findall('CROS_WORKON_SRCDIR="(\S+)"', cros_workon_output) | |
| 134 if temp: | |
| 135 return temp[0] | |
| 136 | |
| 137 return None | |
| 138 | |
| 139 | |
| 140 def _CreateRepoDictionary(buildroot, board, debug=False): | |
| 141 """Returns the repo->list_of_ebuilds dictionary.""" | |
| 142 repo_dictionary = {} | |
| 143 manifest_tuples = _GetAllGitRepos(buildroot) | |
| 144 Info('Creating dictionary of git repos to portage packages ...') | |
| 145 | |
| 146 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 147 get_all_workon_pkgs_cmd = './cros_workon list --all'.split() | |
| 148 packages = RunCommand(get_all_workon_pkgs_cmd, cwd=cwd, | |
| 149 redirect_stdout=True, redirect_stderr=True, | |
| 150 enter_chroot=True, print_cmd=debug) | |
| 151 for package in packages.split(): | |
| 152 cros_workon_src_path = _GetCrosWorkOnSrcPath(buildroot, board, package) | |
| 153 if cros_workon_src_path: | |
| 154 for tuple in manifest_tuples: | |
| 155 # This path tends to have the user's home_dir prepended to it. | |
| 156 if cros_workon_src_path.endswith(tuple[1]): | |
| 157 Info('For %s found matching package %s' % (tuple[0], package)) | |
| 158 if repo_dictionary.has_key(tuple[0]): | |
| 159 repo_dictionary[tuple[0]] += [package] | |
| 160 else: | |
| 161 repo_dictionary[tuple[0]] = [package] | |
| 162 | |
| 163 return repo_dictionary | |
| 164 | |
| 165 | |
| 166 def _ParseRevisionString(revision_string, repo_dictionary): | |
| 167 """Parses the given revision_string into a revision dictionary. | |
| 168 | |
| 169 Returns a list of tuples that contain [portage_package_name, commit_id] to | |
| 170 update. | |
| 171 | |
| 172 Keyword arguments: | |
| 173 revision_string -- revision_string with format | |
| 174 'repo1.git@commit_1 repo2.git@commit2 ...'. | |
| 175 repo_dictionary -- dictionary with git repository names as keys (w/out git) | |
| 176 to portage package names. | |
| 177 | |
| 178 """ | |
| 179 # Using a dictionary removes duplicates. | |
| 180 revisions = {} | |
| 181 for revision in revision_string.split(): | |
| 182 # Format 'package@commit-id'. | |
| 183 revision_tuple = revision.split('@') | |
| 184 if len(revision_tuple) != 2: | |
| 185 Warning('Incorrectly formatted revision %s' % revision) | |
| 186 | |
| 187 repo_name = revision_tuple[0].replace('.git', '') | |
| 188 # Might not have entry if no matching ebuild. | |
| 189 if repo_dictionary.has_key(repo_name): | |
| 190 # May be many corresponding packages to a given git repo e.g. kernel). | |
| 191 for package in repo_dictionary[repo_name]: | |
| 192 revisions[package] = revision_tuple[1] | |
| 193 | |
| 194 return revisions.items() | |
| 195 | |
| 196 | |
| 197 def _UprevFromRevisionList(buildroot, tracking_branch, revision_list, board, | |
| 198 overlays): | |
| 199 """Uprevs based on revision list.""" | |
| 200 if not revision_list: | |
| 201 Info('No packages found to uprev') | |
| 202 return | |
| 203 | |
| 204 packages = [] | |
| 205 for package, revision in revision_list: | |
| 206 assert ':' not in package, 'Invalid package name: %s' % package | |
| 207 packages.append(package) | |
| 208 | |
| 209 chroot_overlays = [ReinterpretPathForChroot(path) for path in overlays] | |
| 210 | |
| 211 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 212 RunCommand(['./cros_mark_as_stable', | |
| 213 '--board=%s' % board, | |
| 214 '--tracking_branch=%s' % tracking_branch, | |
| 215 '--overlays=%s' % ':'.join(chroot_overlays), | |
| 216 '--packages=%s' % ':'.join(packages), | |
| 217 '--drop_file=%s' % ReinterpretPathForChroot(_PACKAGE_FILE % | |
| 218 {'buildroot': buildroot}), | |
| 219 'commit'], | |
| 220 cwd=cwd, enter_chroot=True) | |
| 221 | |
| 222 | |
| 223 def _MarkChromeAsStable(buildroot, tracking_branch, chrome_rev, board): | |
| 224 """Returns the portage atom for the revved chrome ebuild - see man emerge.""" | |
| 225 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 226 portage_atom_string = RunCommand(['bin/cros_mark_chrome_as_stable', | |
| 227 '--tracking_branch=%s' % tracking_branch, | |
| 228 chrome_rev], cwd=cwd, redirect_stdout=True, | |
| 229 enter_chroot=True).rstrip() | |
| 230 if not portage_atom_string: | |
| 231 Info('Found nothing to rev.') | |
| 232 return None | |
| 233 else: | |
| 234 chrome_atom = portage_atom_string.split('=')[1] | |
| 235 keywords_file = CHROME_KEYWORDS_FILE % {'board': board} | |
| 236 # TODO(sosa): Workaround to build unstable chrome ebuild we uprevved. | |
| 237 RunCommand(['sudo', 'mkdir', '-p', os.path.dirname(keywords_file)], | |
| 238 enter_chroot=True, cwd=cwd) | |
| 239 RunCommand(['sudo', 'tee', keywords_file], input='=%s\n' % chrome_atom, | |
| 240 enter_chroot=True, cwd=cwd) | |
| 241 return chrome_atom | |
| 242 | |
| 243 | |
| 244 def _UprevAllPackages(buildroot, tracking_branch, board, overlays): | |
| 245 """Uprevs all packages that have been updated since last uprev.""" | |
| 246 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 247 chroot_overlays = [ReinterpretPathForChroot(path) for path in overlays] | |
| 248 RunCommand(['./cros_mark_as_stable', '--all', | |
| 249 '--board=%s' % board, | |
| 250 '--overlays=%s' % ':'.join(chroot_overlays), | |
| 251 '--tracking_branch=%s' % tracking_branch, | |
| 252 '--drop_file=%s' % ReinterpretPathForChroot(_PACKAGE_FILE % | |
| 253 {'buildroot': buildroot}), | |
| 254 'commit'], | |
| 255 cwd=cwd, enter_chroot=True) | |
| 256 | |
| 257 | |
| 258 def _GetVMConstants(buildroot): | |
| 259 """Returns minimum (vdisk_size, statefulfs_size) recommended for VM's.""" | |
| 260 cwd = os.path.join(buildroot, 'src', 'scripts', 'lib') | |
| 261 source_cmd = 'source %s/cros_vm_constants.sh' % cwd | |
| 262 vdisk_size = RunCommand([ | |
| 263 '/bin/bash', '-c', '%s && echo $MIN_VDISK_SIZE_FULL' % source_cmd], | |
| 264 redirect_stdout=True) | |
| 265 statefulfs_size = RunCommand([ | |
| 266 '/bin/bash', '-c', '%s && echo $MIN_STATEFUL_FS_SIZE_FULL' % source_cmd], | |
| 267 redirect_stdout=True) | |
| 268 return (vdisk_size.strip(), statefulfs_size.strip()) | |
| 269 | |
| 270 | |
| 271 def _GitCleanup(buildroot, board, tracking_branch, overlays): | |
| 272 """Clean up git branch after previous uprev attempt.""" | |
| 273 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 274 if os.path.exists(cwd): | |
| 275 RunCommand(['./cros_mark_as_stable', '--srcroot=..', | |
| 276 '--board=%s' % board, | |
| 277 '--overlays=%s' % ':'.join(overlays), | |
| 278 '--tracking_branch=%s' % tracking_branch, 'clean'], | |
| 279 cwd=cwd, error_ok=True) | |
| 280 | |
| 281 | |
| 282 def _CleanUpMountPoints(buildroot): | |
| 283 """Cleans up any stale mount points from previous runs.""" | |
| 284 mount_output = RunCommand(['mount'], redirect_stdout=True) | |
| 285 mount_pts_in_buildroot = RunCommand(['grep', buildroot], input=mount_output, | |
| 286 redirect_stdout=True, error_ok=True) | |
| 287 | |
| 288 for mount_pt_str in mount_pts_in_buildroot.splitlines(): | |
| 289 mount_pt = mount_pt_str.rpartition(' type ')[0].partition(' on ')[2] | |
| 290 RunCommand(['sudo', 'umount', '-l', mount_pt], error_ok=True) | |
| 291 | |
| 292 | |
| 293 def _WipeOldOutput(buildroot): | |
| 294 """Wipes out build output directories.""" | |
| 295 RunCommand(['rm', '-rf', 'src/build/images'], cwd=buildroot) | |
| 296 | |
| 297 | |
| 298 # =========================== Main Commands =================================== | |
| 299 | |
| 300 | |
| 301 def _PreFlightRinse(buildroot, board, tracking_branch, overlays): | |
| 302 """Cleans up any leftover state from previous runs.""" | |
| 303 _GitCleanup(buildroot, board, tracking_branch, overlays) | |
| 304 _CleanUpMountPoints(buildroot) | |
| 305 RunCommand(['sudo', 'killall', 'kvm'], error_ok=True) | |
| 306 | |
| 307 | |
| 308 def _FullCheckout(buildroot, tracking_branch, | |
| 309 retries=_DEFAULT_RETRIES, | |
| 310 url='http://git.chromium.org/git/manifest'): | |
| 311 """Performs a full checkout and clobbers any previous checkouts.""" | |
| 312 RunCommand(['sudo', 'rm', '-rf', buildroot]) | |
| 313 MakeDir(buildroot, parents=True) | |
| 314 branch = tracking_branch.split('/'); | |
| 315 RunCommand(['repo', 'init', '-u', | |
| 316 url, '-b', | |
| 317 '%s' % branch[-1]], cwd=buildroot, input='\n\ny\n') | |
| 318 RepoSync(buildroot, retries) | |
| 319 | |
| 320 | |
| 321 def _IncrementalCheckout(buildroot, retries=_DEFAULT_RETRIES): | |
| 322 """Performs a checkout without clobbering previous checkout.""" | |
| 323 RepoSync(buildroot, retries) | |
| 324 | |
| 325 | |
| 326 def _MakeChroot(buildroot, replace=False): | |
| 327 """Wrapper around make_chroot.""" | |
| 328 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 329 | |
| 330 cmd = ['./make_chroot', '--fast'] | |
| 331 | |
| 332 if replace: | |
| 333 cmd.append('--replace') | |
| 334 | |
| 335 RunCommand(cmd, cwd=cwd) | |
| 336 | |
| 337 | |
| 338 def _GetPortageEnvVar(buildroot, board, envvar): | |
| 339 """Get a portage environment variable for the specified board, if any. | |
| 340 | |
| 341 buildroot: The root directory where the build occurs. Must be an absolute | |
| 342 path. | |
| 343 board: Board type that was built on this machine. E.g. x86-generic. If this | |
| 344 is None, get the env var from the host. | |
| 345 envvar: The environment variable to get. E.g. 'PORTAGE_BINHOST'. | |
| 346 | |
| 347 Returns: | |
| 348 The value of the environment variable, as a string. If no such variable | |
| 349 can be found, return the empty string. | |
| 350 """ | |
| 351 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 352 portageq = 'portageq' | |
| 353 if board: | |
| 354 portageq += '-%s' % board | |
| 355 binhost = RunCommand([portageq, 'envvar', envvar], cwd=cwd, | |
| 356 redirect_stdout=True, enter_chroot=True, error_ok=True) | |
| 357 return binhost.rstrip('\n') | |
| 358 | |
| 359 | |
| 360 def _SetupBoard(buildroot, board='x86-generic'): | |
| 361 """Wrapper around setup_board.""" | |
| 362 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 363 RunCommand(['./setup_board', '--fast', '--default', '--board=%s' % board], | |
| 364 cwd=cwd, enter_chroot=True) | |
| 365 | |
| 366 | |
| 367 def _Build(buildroot, emptytree, build_autotest=True, usepkg=True): | |
| 368 """Wrapper around build_packages.""" | |
| 369 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 370 if emptytree: | |
| 371 cmd = ['sh', '-c', 'EXTRA_BOARD_FLAGS=--emptytree ./build_packages'] | |
| 372 else: | |
| 373 cmd = ['./build_packages'] | |
| 374 | |
| 375 if not build_autotest: | |
| 376 cmd.append('--nowithautotest') | |
| 377 | |
| 378 if not usepkg: | |
| 379 cmd.append('--nousepkg') | |
| 380 | |
| 381 RunCommand(cmd, cwd=cwd, enter_chroot=True) | |
| 382 | |
| 383 | |
| 384 def _EnableLocalAccount(buildroot): | |
| 385 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 386 # Set local account for test images. | |
| 387 RunCommand(['./enable_localaccount.sh', | |
| 388 'chronos'], | |
| 389 print_cmd=False, cwd=cwd) | |
| 390 | |
| 391 | |
| 392 def _BuildImage(buildroot): | |
| 393 _WipeOldOutput(buildroot) | |
| 394 | |
| 395 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 396 RunCommand(['./build_image', '--replace'], cwd=cwd, enter_chroot=True) | |
| 397 | |
| 398 | |
| 399 def _BuildVMImageForTesting(buildroot): | |
| 400 (vdisk_size, statefulfs_size) = _GetVMConstants(buildroot) | |
| 401 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 402 RunCommand(['./image_to_vm.sh', | |
| 403 '--test_image', | |
| 404 '--full', | |
| 405 '--vdisk_size=%s' % vdisk_size, | |
| 406 '--statefulfs_size=%s' % statefulfs_size, | |
| 407 ], cwd=cwd, enter_chroot=True) | |
| 408 | |
| 409 | |
| 410 def _RunUnitTests(buildroot): | |
| 411 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 412 RunCommand(['./cros_run_unit_tests', | |
| 413 '--package_file=%s' % ReinterpretPathForChroot(_PACKAGE_FILE % | |
| 414 {'buildroot': buildroot}), | |
| 415 ], cwd=cwd, enter_chroot=True) | |
| 416 | |
| 417 | |
| 418 def _RunSmokeSuite(buildroot, results_dir): | |
| 419 results_dir_in_chroot = os.path.join(buildroot, 'chroot', | |
| 420 results_dir.lstrip('/')) | |
| 421 if os.path.exists(results_dir_in_chroot): | |
| 422 shutil.rmtree(results_dir_in_chroot) | |
| 423 | |
| 424 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 425 RunCommand(['bin/cros_run_vm_test', | |
| 426 '--no_graphics', | |
| 427 '--results_dir_root=%s' % results_dir, | |
| 428 'suite_Smoke', | |
| 429 ], cwd=cwd, error_ok=False) | |
| 430 | |
| 431 | |
| 432 def _RunAUTest(buildroot, board): | |
| 433 """Runs a basic update test from the au test harness.""" | |
| 434 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 435 image_path = os.path.join(buildroot, 'src', 'build', 'images', board, | |
| 436 'latest', 'chromiumos_test_image.bin') | |
| 437 RunCommand(['bin/cros_au_test_harness', | |
| 438 '--no_graphics', | |
| 439 '--no_delta', | |
| 440 '--board=%s' % board, | |
| 441 '--test_prefix=SimpleTest', | |
| 442 '--verbose', | |
| 443 '--base_image=%s' % image_path, | |
| 444 '--target_image=%s' % image_path, | |
| 445 ], cwd=cwd, error_ok=False) | |
| 446 | |
| 447 | |
| 448 def _UprevPackages(buildroot, tracking_branch, revisionfile, board, overlays): | |
| 449 """Uprevs a package based on given revisionfile. | |
| 450 | |
| 451 If revisionfile is set to None or does not resolve to an actual file, this | |
| 452 function will uprev all packages. | |
| 453 | |
| 454 Keyword arguments: | |
| 455 revisionfile -- string specifying a file that contains a list of revisions to | |
| 456 uprev. | |
| 457 """ | |
| 458 # Purposefully set to None as it means Force Build was pressed. | |
| 459 revisions = 'None' | |
| 460 if (revisionfile): | |
| 461 try: | |
| 462 rev_file = open(revisionfile) | |
| 463 revisions = rev_file.read() | |
| 464 rev_file.close() | |
| 465 except Exception, e: | |
| 466 Warning('Error reading %s, revving all' % revisionfile) | |
| 467 revisions = 'None' | |
| 468 | |
| 469 revisions = revisions.strip() | |
| 470 | |
| 471 # TODO(sosa): Un-comment once we close individual trees. | |
| 472 # revisions == "None" indicates a Force Build. | |
| 473 #if revisions != 'None': | |
| 474 # print >> sys.stderr, 'CBUILDBOT Revision list found %s' % revisions | |
| 475 # revision_list = _ParseRevisionString(revisions, | |
| 476 # _CreateRepoDictionary(buildroot, board)) | |
| 477 # _UprevFromRevisionList(buildroot, tracking_branch, revision_list, board, | |
| 478 # overlays) | |
| 479 #else: | |
| 480 Info('CBUILDBOT Revving all') | |
| 481 _UprevAllPackages(buildroot, tracking_branch, board, overlays) | |
| 482 | |
| 483 | |
| 484 def _UprevPush(buildroot, tracking_branch, board, overlays, dryrun): | |
| 485 """Pushes uprev changes to the main line.""" | |
| 486 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 487 cmd = ['./cros_mark_as_stable', | |
| 488 '--srcroot=%s' % os.path.join(buildroot, 'src'), | |
| 489 '--board=%s' % board, | |
| 490 '--overlays=%s' % ':'.join(overlays), | |
| 491 '--tracking_branch=%s' % tracking_branch | |
| 492 ] | |
| 493 if dryrun: | |
| 494 cmd.append('--dryrun') | |
| 495 | |
| 496 cmd.append('push') | |
| 497 RunCommand(cmd, cwd=cwd) | |
| 498 | |
| 499 | |
| 500 def _LegacyArchiveBuild(buildroot, bot_id, buildconfig, buildnumber, | |
| 501 debug=False): | |
| 502 """Adds a step to the factory to archive a build.""" | |
| 503 | |
| 504 # Fixed properties | |
| 505 keep_max = 3 | |
| 506 gsutil_archive = 'gs://chromeos-archive/' + bot_id | |
| 507 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 508 | |
| 509 cmd = ['./archive_build.sh', | |
| 510 '--build_number', str(buildnumber), | |
| 511 '--to', '/var/www/archive/' + bot_id, | |
| 512 '--keep_max', str(keep_max), | |
| 513 '--prebuilt_upload', | |
| 514 '--board', buildconfig['board'], | |
| 515 | |
| 516 '--acl', '/home/chrome-bot/slave_archive_acl', | |
| 517 '--gsutil_archive', gsutil_archive, | |
| 518 '--gsd_gen_index', | |
| 519 '/b/scripts/gsd_generate_index/gsd_generate_index.py', | |
| 520 '--gsutil', '/b/scripts/slave/gsutil', | |
| 521 '--test_mod' | |
| 522 ] | |
| 523 | |
| 524 if buildconfig.get('test_mod', True): | |
| 525 cmd.append('--test_mod') | |
| 526 | |
| 527 if buildconfig.get('factory_install_mod', True): | |
| 528 cmd.append('--factory_install_mod') | |
| 529 | |
| 530 if buildconfig.get('factory_test_mod', True): | |
| 531 cmd.append('--factory_test_mod') | |
| 532 | |
| 533 if debug: | |
| 534 Warning('***** ***** LegacyArchiveBuild CMD: ' + ' '.join(cmd)) | |
| 535 else: | |
| 536 RunCommand(cmd, cwd=cwd) | |
| 537 | |
| 538 def _ArchiveTestResults(buildroot, board, test_results_dir, | |
| 539 gsutil, archive_dir, acl): | |
| 540 """Archives the test results into Google Storage | |
| 541 | |
| 542 Takes the results from the test_results_dir and the last qemu image and | |
| 543 uploads them to Google Storage. | |
| 544 | |
| 545 Arguments: | |
| 546 buildroot: Root directory where build occurs | |
| 547 board: Board to find the qemu image. | |
| 548 test_results_dir: Path from buildroot/chroot to find test results. | |
| 549 This must a subdir of /tmp. | |
| 550 gsutil: Location of gsutil | |
| 551 archive_dir: Google Storage path to store the archive | |
| 552 acl: ACL to set on archive in Google Storage | |
| 553 """ | |
| 554 num_gsutil_retries = 5 | |
| 555 test_results_dir = test_results_dir.lstrip('/') | |
| 556 results_path = os.path.join(buildroot, 'chroot', test_results_dir) | |
| 557 RunCommand(['sudo', 'chmod', '-R', '+r', results_path]) | |
| 558 try: | |
| 559 # gsutil has the ability to resume an upload when the command is retried | |
| 560 RunCommand([gsutil, 'cp', '-R', results_path, archive_dir], | |
| 561 num_retries=num_gsutil_retries) | |
| 562 RunCommand([gsutil, 'setacl', acl, archive_dir]) | |
| 563 | |
| 564 image_name = 'chromiumos_qemu_image.bin' | |
| 565 image_path = os.path.join(buildroot, 'src', 'build', 'images', board, | |
| 566 'latest', image_name) | |
| 567 RunCommand(['gzip', '-f', '--fast', image_path]) | |
| 568 RunCommand([gsutil, 'cp', image_path + '.gz', archive_dir], | |
| 569 num_retries=num_gsutil_retries) | |
| 570 except Exception, e: | |
| 571 Warning('Could not archive test results (error=%s)' % str(e)) | |
| 572 | |
| 573 | |
| 574 def _GetConfig(config_name): | |
| 575 """Gets the configuration for the build""" | |
| 576 buildconfig = {} | |
| 577 if not config.has_key(config_name): | |
| 578 Warning('Non-existent configuration specified.') | |
| 579 Warning('Please specify one of:') | |
| 580 config_names = config.keys() | |
| 581 config_names.sort() | |
| 582 for name in config_names: | |
| 583 Warning(' %s' % name) | |
| 584 sys.exit(1) | |
| 585 | |
| 586 return config[config_name] | |
| 587 | |
| 588 | |
| 589 def _ResolveOverlays(buildroot, overlays): | |
| 590 """Return the list of overlays to use for a given buildbot. | |
| 591 | |
| 592 Args: | |
| 593 buildroot: The root directory where the build occurs. Must be an absolute | |
| 594 path. | |
| 595 overlays: A string describing which overlays you want. | |
| 596 'private': Just the private overlay. | |
| 597 'public': Just the public overlay. | |
| 598 'both': Both the public and private overlays. | |
| 599 """ | |
| 600 public_overlay = PUBLIC_OVERLAY % {'buildroot': buildroot} | |
| 601 private_overlay = PRIVATE_OVERLAY % {'buildroot': buildroot} | |
| 602 if overlays == 'private': | |
| 603 paths = [private_overlay] | |
| 604 elif overlays == 'public': | |
| 605 paths = [public_overlay] | |
| 606 elif overlays == 'both': | |
| 607 paths = [public_overlay, private_overlay] | |
| 608 else: | |
| 609 Info('No overlays found.') | |
| 610 paths = [] | |
| 611 return paths | |
| 612 | |
| 613 | |
| 614 def _UploadPrebuilts(buildroot, board, overlay_config, binhosts): | |
| 615 """Upload prebuilts. | |
| 616 | |
| 617 Args: | |
| 618 buildroot: The root directory where the build occurs. | |
| 619 board: Board type that was built on this machine | |
| 620 overlay_config: A string describing which overlays you want. | |
| 621 'private': Just the private overlay. | |
| 622 'public': Just the public overlay. | |
| 623 'both': Both the public and private overlays. | |
| 624 binhosts: The URLs of the current binhosts. Binaries that are already | |
| 625 present will not be uploaded twice. Empty URLs will be ignored. | |
| 626 """ | |
| 627 | |
| 628 cwd = os.path.join(buildroot, 'src', 'scripts') | |
| 629 cmd = [os.path.join(cwd, 'prebuilt.py'), | |
| 630 '--sync-binhost-conf', | |
| 631 '--build-path', buildroot, | |
| 632 '--board', board, | |
| 633 '--prepend-version', 'preflight', | |
| 634 '--key', _PREFLIGHT_BINHOST] | |
| 635 for binhost in binhosts: | |
| 636 if binhost: | |
| 637 cmd.extend(['--previous-binhost-url', binhost]) | |
| 638 if overlay_config == 'public': | |
| 639 cmd.extend(['--upload', 'gs://chromeos-prebuilt']) | |
| 640 else: | |
| 641 assert overlay_config in ('private', 'both') | |
| 642 cmd.extend(['--upload', 'chromeos-images:/var/www/prebuilt/', | |
| 643 '--binhost-base-url', 'http://chromeos-prebuilt']) | |
| 644 | |
| 645 RunCommand(cmd, cwd=cwd) | |
| 646 | |
| 647 | |
| 648 def main(): | |
| 649 # Parse options | |
| 650 usage = "usage: %prog [options] cbuildbot_config" | |
| 651 parser = optparse.OptionParser(usage=usage) | |
| 652 parser.add_option('-a', '--acl', default='private', | |
| 653 help='ACL to set on GSD archives') | |
| 654 parser.add_option('-r', '--buildroot', | |
| 655 help='root directory where build occurs', default=".") | |
| 656 parser.add_option('-n', '--buildnumber', | |
| 657 help='build number', type='int', default=0) | |
| 658 parser.add_option('--chrome_rev', default=None, type='string', | |
| 659 dest='chrome_rev', | |
| 660 help=('Chrome_rev of type [tot|latest_release|' | |
| 661 'sticky_release]')) | |
| 662 parser.add_option('-g', '--gsutil', default='', help='Location of gsutil') | |
| 663 parser.add_option('-c', '--gsutil_archive', default='', | |
| 664 help='Datastore archive location') | |
| 665 parser.add_option('--clobber', action='store_true', dest='clobber', | |
| 666 default=False, | |
| 667 help='Clobbers an old checkout before syncing') | |
| 668 parser.add_option('--debug', action='store_true', dest='debug', | |
| 669 default=False, | |
| 670 help='Override some options to run as a developer.') | |
| 671 parser.add_option('--nobuild', action='store_false', dest='build', | |
| 672 default=True, | |
| 673 help="Don't actually build (for cbuildbot dev") | |
| 674 parser.add_option('--noprebuilts', action='store_false', dest='prebuilts', | |
| 675 default=True, | |
| 676 help="Don't upload prebuilts.") | |
| 677 parser.add_option('--nosync', action='store_false', dest='sync', | |
| 678 default=True, | |
| 679 help="Don't sync before building.") | |
| 680 parser.add_option('--notests', action='store_false', dest='tests', | |
| 681 default=True, | |
| 682 help='Override values from buildconfig and run no tests.') | |
| 683 parser.add_option('-f', '--revisionfile', | |
| 684 help='file where new revisions are stored') | |
| 685 parser.add_option('-t', '--tracking-branch', dest='tracking_branch', | |
| 686 default='cros/master', help='Run the buildbot on a branch') | |
| 687 parser.add_option('-u', '--url', dest='url', | |
| 688 default='http://git.chromium.org/git/manifest', | |
| 689 help='Run the buildbot on internal manifest') | |
| 690 | |
| 691 (options, args) = parser.parse_args() | |
| 692 | |
| 693 buildroot = os.path.abspath(options.buildroot) | |
| 694 revisionfile = options.revisionfile | |
| 695 tracking_branch = options.tracking_branch | |
| 696 chrome_atom_to_build = None | |
| 697 | |
| 698 if len(args) >= 1: | |
| 699 bot_id = args[-1] | |
| 700 buildconfig = _GetConfig(bot_id) | |
| 701 else: | |
| 702 Warning('Missing configuration description') | |
| 703 parser.print_usage() | |
| 704 sys.exit(1) | |
| 705 | |
| 706 try: | |
| 707 # Calculate list of overlay directories. | |
| 708 rev_overlays = _ResolveOverlays(buildroot, buildconfig['rev_overlays']) | |
| 709 push_overlays = _ResolveOverlays(buildroot, buildconfig['push_overlays']) | |
| 710 # We cannot push to overlays that we don't rev. | |
| 711 assert set(push_overlays).issubset(set(rev_overlays)) | |
| 712 # Either has to be a master or not have any push overlays. | |
| 713 assert buildconfig['master'] or not push_overlays | |
| 714 | |
| 715 board = buildconfig['board'] | |
| 716 old_binhost = None | |
| 717 | |
| 718 _PreFlightRinse(buildroot, buildconfig['board'], tracking_branch, | |
| 719 rev_overlays) | |
| 720 chroot_path = os.path.join(buildroot, 'chroot') | |
| 721 boardpath = os.path.join(chroot_path, 'build', board) | |
| 722 if options.sync: | |
| 723 if options.clobber or not os.path.isdir(buildroot): | |
| 724 _FullCheckout(buildroot, tracking_branch, url=options.url) | |
| 725 else: | |
| 726 old_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) | |
| 727 _IncrementalCheckout(buildroot) | |
| 728 | |
| 729 new_binhost = _GetPortageEnvVar(buildroot, board, _FULL_BINHOST) | |
| 730 emptytree = (old_binhost and old_binhost != new_binhost) | |
| 731 | |
| 732 # Check that all overlays can be found. | |
| 733 for path in rev_overlays: | |
| 734 if not os.path.isdir(path): | |
| 735 Die('Missing overlay: %s' % path) | |
| 736 | |
| 737 if not os.path.isdir(chroot_path) or buildconfig['chroot_replace']: | |
| 738 _MakeChroot(buildroot, buildconfig['chroot_replace']) | |
| 739 | |
| 740 if not os.path.isdir(boardpath): | |
| 741 _SetupBoard(buildroot, board=buildconfig['board']) | |
| 742 | |
| 743 # Perform chrome uprev. | |
| 744 if options.chrome_rev: | |
| 745 chrome_atom_to_build = _MarkChromeAsStable(buildroot, tracking_branch, | |
| 746 options.chrome_rev, board) | |
| 747 # Perform other uprevs. | |
| 748 if buildconfig['uprev']: | |
| 749 _UprevPackages(buildroot, tracking_branch, revisionfile, | |
| 750 buildconfig['board'], rev_overlays) | |
| 751 elif options.chrome_rev and not chrome_atom_to_build: | |
| 752 # We found nothing to rev, we're done here. | |
| 753 return | |
| 754 | |
| 755 _EnableLocalAccount(buildroot) | |
| 756 | |
| 757 if options.build: | |
| 758 _Build(buildroot, | |
| 759 emptytree, | |
| 760 build_autotest=(buildconfig['vm_tests'] and options.tests), | |
| 761 usepkg=buildconfig['usepkg']) | |
| 762 | |
| 763 if buildconfig['unittests'] and options.tests: | |
| 764 _RunUnitTests(buildroot) | |
| 765 | |
| 766 _BuildImage(buildroot) | |
| 767 | |
| 768 if buildconfig['vm_tests'] and options.tests: | |
| 769 _BuildVMImageForTesting(buildroot) | |
| 770 test_results_dir = '/tmp/run_remote_tests.%s' % options.buildnumber | |
| 771 try: | |
| 772 _RunSmokeSuite(buildroot, test_results_dir) | |
| 773 _RunAUTest(buildroot, buildconfig['board']) | |
| 774 finally: | |
| 775 if not options.debug: | |
| 776 archive_full_path = os.path.join(options.gsutil_archive, | |
| 777 str(options.buildnumber)) | |
| 778 _ArchiveTestResults(buildroot, buildconfig['board'], | |
| 779 test_results_dir=test_results_dir, | |
| 780 gsutil=options.gsutil, | |
| 781 archive_dir=archive_full_path, | |
| 782 acl=options.acl) | |
| 783 | |
| 784 if buildconfig['uprev']: | |
| 785 # Don't push changes for developers. | |
| 786 if buildconfig['master']: | |
| 787 # Master bot needs to check if the other slaves completed. | |
| 788 if cbuildbot_comm.HaveSlavesCompleted(config): | |
| 789 if not options.debug and options.prebuilts: | |
| 790 _UploadPrebuilts(buildroot, board, buildconfig['rev_overlays'], | |
| 791 [new_binhost]) | |
| 792 _UprevPush(buildroot, tracking_branch, buildconfig['board'], | |
| 793 push_overlays, options.debug) | |
| 794 else: | |
| 795 Die('CBUILDBOT - One of the slaves has failed!!!') | |
| 796 | |
| 797 else: | |
| 798 # Publish my status to the master if its expecting it. | |
| 799 if buildconfig['important'] and not options.debug: | |
| 800 cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_COMPLETE) | |
| 801 | |
| 802 if buildconfig['archive_build']: | |
| 803 _LegacyArchiveBuild(buildroot, | |
| 804 bot_id, | |
| 805 buildconfig, | |
| 806 options.buildnumber, | |
| 807 options.debug) | |
| 808 except: | |
| 809 # Send failure to master bot. | |
| 810 if not buildconfig['master'] and buildconfig['important']: | |
| 811 cbuildbot_comm.PublishStatus(cbuildbot_comm.STATUS_BUILD_FAILED) | |
| 812 | |
| 813 raise | |
| 814 | |
| 815 | |
| 816 if __name__ == '__main__': | |
| 817 main() | |
| OLD | NEW |