OLD | NEW |
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved. | 2 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 import datetime | 6 import datetime |
7 import multiprocessing | 7 import multiprocessing |
8 import optparse | 8 import optparse |
9 import os | 9 import os |
10 import re | 10 import re |
11 import sys | 11 import sys |
12 import tempfile | 12 import tempfile |
13 | 13 |
14 from chromite.lib import cros_build_lib | 14 from chromite.lib import cros_build_lib |
15 """ | 15 """ |
16 This script is used to upload host prebuilts as well as board BINHOSTS to | 16 This script is used to upload host prebuilts as well as board BINHOSTS. |
17 Google Storage. | 17 |
| 18 If the URL starts with 'gs://', we upload using gsutil to Google Storage. |
| 19 Otherwise, rsync is used. |
18 | 20 |
19 After a build is successfully uploaded a file is updated with the proper | 21 After a build is successfully uploaded a file is updated with the proper |
20 BINHOST version as well as the target board. This file is defined in GIT_FILE | 22 BINHOST version as well as the target board. This file is defined in GIT_FILE |
21 | 23 |
22 | 24 |
23 To read more about prebuilts/binhost binary packages please refer to: | 25 To read more about prebuilts/binhost binary packages please refer to: |
24 http://sites/chromeos/for-team-members/engineering/releng/prebuilt-binaries-for-
streamlining-the-build-process | 26 http://sites/chromeos/for-team-members/engineering/releng/prebuilt-binaries-for-
streamlining-the-build-process |
25 | 27 |
26 | 28 |
27 Example of uploading prebuilt amd64 host files | 29 Example of uploading prebuilt amd64 host files to Google Storage: |
28 ./prebuilt.py -p /b/cbuild/build -s -u gs://chromeos-prebuilt | 30 ./prebuilt.py -p /b/cbuild/build -s -u gs://chromeos-prebuilt |
29 | 31 |
30 Example of uploading x86-dogfood binhosts | 32 Example of uploading x86-dogfood binhosts to Google Storage: |
31 ./prebuilt.py -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g | 33 ./prebuilt.py -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g |
| 34 |
| 35 Example of uploading prebuilt amd64 host files using rsync: |
| 36 ./prebuilt.py -p /b/cbuild/build -s -u codf30.jail:/tmp |
32 """ | 37 """ |
33 | 38 |
34 # as per http://crosbug.com/5855 always filter the below packages | 39 # as per http://crosbug.com/5855 always filter the below packages |
35 _FILTER_PACKAGES = set() | 40 _FILTER_PACKAGES = set() |
36 _RETRIES = 3 | 41 _RETRIES = 3 |
37 _GSUTIL_BIN = '/b/third_party/gsutil/gsutil' | 42 _GSUTIL_BIN = '/b/third_party/gsutil/gsutil' |
38 _HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs' | 43 _HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs' |
39 _HOST_TARGET = 'amd64' | 44 _HOST_TARGET = 'amd64' |
40 _BOARD_PATH = 'chroot/build/%(board)s' | 45 _BOARD_PATH = 'chroot/build/%(board)s' |
41 _BOTO_CONFIG = '/home/chrome-bot/external-boto' | 46 _BOTO_CONFIG = '/home/chrome-bot/external-boto' |
42 # board/board-target/version' | 47 # board/board-target/version/packages/' |
43 _GS_BOARD_PATH = 'board/%(board)s/%(version)s/' | 48 _REL_BOARD_PATH = 'board/%(board)s/%(version)s/packages' |
44 # We only support amd64 right now | 49 # host/host-target/version/packages/' |
45 _GS_HOST_PATH = 'host/%s' % _HOST_TARGET | 50 _REL_HOST_PATH = 'host/%(target)s/%(version)s/packages' |
46 # Private overlays to look at for builds to filter | 51 # Private overlays to look at for builds to filter |
47 # relative to build path | 52 # relative to build path |
48 _PRIVATE_OVERLAY_DIR = 'src/private-overlays' | 53 _PRIVATE_OVERLAY_DIR = 'src/private-overlays' |
49 _BINHOST_BASE_DIR = 'src/overlays' | 54 _BINHOST_BASE_DIR = 'src/overlays' |
50 #_BINHOST_BASE_URL = 'http://commondatastorage.googleapis.com/chromeos-prebuilt' | 55 #_BINHOST_BASE_URL = 'http://commondatastorage.googleapis.com/chromeos-prebuilt' |
51 _BINHOST_BASE_URL = 'http://gsdview.appspot.com/chromeos-prebuilt' | 56 _BINHOST_BASE_URL = 'http://gsdview.appspot.com/chromeos-prebuilt' |
52 _PREBUILT_BASE_DIR = 'src/third_party/chromiumos-overlay/chromeos/config/' | 57 _PREBUILT_BASE_DIR = 'src/third_party/chromiumos-overlay/chromeos/config/' |
53 # Created in the event of new host targets becoming available | 58 # Created in the event of new host targets becoming available |
54 _PREBUILT_MAKE_CONF = {'amd64': os.path.join(_PREBUILT_BASE_DIR, | 59 _PREBUILT_MAKE_CONF = {'amd64': os.path.join(_PREBUILT_BASE_DIR, |
55 'make.conf.amd64-host')} | 60 'make.conf.amd64-host')} |
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
242 filtered_packages.write("".join(section)) | 247 filtered_packages.write("".join(section)) |
243 packages_file.close() | 248 packages_file.close() |
244 | 249 |
245 # Flush contents to disk. | 250 # Flush contents to disk. |
246 filtered_packages.flush() | 251 filtered_packages.flush() |
247 filtered_packages.seek(0) | 252 filtered_packages.seek(0) |
248 | 253 |
249 return filtered_packages | 254 return filtered_packages |
250 | 255 |
251 | 256 |
| 257 def _RetryRun(cmd, print_cmd=True, shell=False): |
| 258 """Run the specified command, retrying if necessary. |
| 259 |
| 260 Args: |
| 261 cmd: The command to run. |
| 262 print_cmd: Whether to print out the cmd. |
| 263 shell: Whether to treat the command as a shell. |
| 264 |
| 265 Returns: |
| 266 True if the command succeeded. Otherwise, returns False. |
| 267 """ |
| 268 |
| 269 # TODO(scottz): port to use _Run or similar when it is available in |
| 270 # cros_build_lib. |
| 271 for attempt in range(_RETRIES): |
| 272 try: |
| 273 output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell) |
| 274 return True |
| 275 except cros_build_lib.RunCommandError: |
| 276 print 'Failed to run %s' % cmd |
| 277 else: |
| 278 print 'Retry failed run %s, giving up' % cmd |
| 279 return False |
| 280 |
| 281 |
252 def _GsUpload(args): | 282 def _GsUpload(args): |
253 """Upload to GS bucket. | 283 """Upload to GS bucket. |
254 | 284 |
255 Args: | 285 Args: |
256 args: a tuple of two arguments that contains local_file and remote_file. | 286 args: a tuple of two arguments that contains local_file and remote_file. |
257 | 287 |
258 Returns: | 288 Returns: |
259 Return the arg tuple of two if the upload failed | 289 Return the arg tuple of two if the upload failed |
260 """ | 290 """ |
261 (local_file, remote_file) = args | 291 (local_file, remote_file) = args |
262 if ShouldFilterPackage(local_file): | 292 if ShouldFilterPackage(local_file): |
263 return | 293 return |
264 | 294 |
265 if local_file.endswith("/Packages"): | 295 if local_file.endswith("/Packages"): |
266 filtered_packages_file = FilterPackagesFile(local_file) | 296 filtered_packages_file = FilterPackagesFile(local_file) |
267 local_file = filtered_packages_file.name | 297 local_file = filtered_packages_file.name |
268 | 298 |
269 cmd = '%s cp -a public-read %s %s' % (_GSUTIL_BIN, local_file, remote_file) | 299 cmd = '%s cp -a public-read %s %s' % (_GSUTIL_BIN, local_file, remote_file) |
270 # TODO(scottz): port to use _Run or similar when it is available in | 300 if not _RetryRun(cmd, print_cmd=False, shell=True): |
271 # cros_build_lib. | 301 return (local_file, remote_file) |
272 for attempt in range(_RETRIES): | |
273 try: | |
274 output = cros_build_lib.RunCommand(cmd, print_cmd=False, shell=True) | |
275 break | |
276 except cros_build_lib.RunCommandError: | |
277 print 'Failed to sync %s -> %s, retrying' % (local_file, remote_file) | |
278 else: | |
279 # TODO(scottz): potentially return what failed so we can do something with | |
280 # with it but for now just print an error. | |
281 print 'Retry failed uploading %s -> %s, giving up' % (local_file, | |
282 remote_file) | |
283 return args | |
284 | 302 |
285 | 303 |
286 def RemoteUpload(files, pool=10): | 304 def RemoteUpload(files, pool=10): |
287 """Upload to google storage. | 305 """Upload to google storage. |
288 | 306 |
289 Create a pool of process and call _GsUpload with the proper arguments. | 307 Create a pool of process and call _GsUpload with the proper arguments. |
290 | 308 |
291 Args: | 309 Args: |
292 files: dictionary with keys to local files and values to remote path. | 310 files: dictionary with keys to local files and values to remote path. |
293 pool: integer of maximum proesses to have at the same time. | 311 pool: integer of maximum proesses to have at the same time. |
294 | 312 |
295 Returns: | 313 Returns: |
296 Return a set of tuple arguments of the failed uploads | 314 Return a set of tuple arguments of the failed uploads |
297 """ | 315 """ |
298 # TODO(scottz) port this to use _RunManyParallel when it is available in | 316 # TODO(scottz) port this to use _RunManyParallel when it is available in |
299 # cros_build_lib | 317 # cros_build_lib |
300 pool = multiprocessing.Pool(processes=pool) | 318 pool = multiprocessing.Pool(processes=pool) |
301 workers = [] | 319 workers = [] |
302 for local_file, remote_path in files.iteritems(): | 320 for local_file, remote_path in files.iteritems(): |
303 workers.append((local_file, remote_path)) | 321 workers.append((local_file, remote_path)) |
304 | 322 |
305 result = pool.map_async(_GsUpload, workers, chunksize=1) | 323 result = pool.map_async(_GsUpload, workers, chunksize=1) |
306 while True: | 324 while True: |
307 try: | 325 try: |
308 return set(result.get(60*60)) | 326 return set(result.get(60*60)) |
309 except multiprocessing.TimeoutError: | 327 except multiprocessing.TimeoutError: |
310 pass | 328 pass |
311 | 329 |
312 | 330 |
313 def GenerateUploadDict(local_path, gs_path, strip_str): | 331 def GenerateUploadDict(local_path, gs_path): |
314 """Build a dictionary of local remote file key pairs for gsutil to upload. | 332 """Build a dictionary of local remote file key pairs for gsutil to upload. |
315 | 333 |
316 Args: | 334 Args: |
317 local_path: A path to the file on the local hard drive. | 335 local_path: A path to the file on the local hard drive. |
318 gs_path: Path to upload in Google Storage. | 336 gs_path: Path to upload in Google Storage. |
319 strip_str: String to remove from the local_path so that the relative | |
320 file path can be tacked on to the gs_path. | |
321 | 337 |
322 Returns: | 338 Returns: |
323 Returns a dictionary of file path/gs_dest_path pairs | 339 Returns a dictionary of file path/gs_dest_path pairs |
324 """ | 340 """ |
325 files_to_sync = cros_build_lib.ListFiles(local_path) | 341 files_to_sync = cros_build_lib.ListFiles(local_path) |
326 upload_files = {} | 342 upload_files = {} |
327 for file_path in files_to_sync: | 343 for file_path in files_to_sync: |
328 filename = file_path.replace(strip_str, '').lstrip('/') | 344 filename = file_path.replace(local_path, '').lstrip('/') |
329 gs_file_path = os.path.join(gs_path, filename) | 345 gs_file_path = os.path.join(gs_path, filename) |
330 upload_files[file_path] = gs_file_path | 346 upload_files[file_path] = gs_file_path |
331 | 347 |
332 return upload_files | 348 return upload_files |
333 | 349 |
334 | 350 |
335 def DetermineMakeConfFile(target): | 351 def DetermineMakeConfFile(target): |
336 """Determine the make.conf file that needs to be updated for prebuilts. | 352 """Determine the make.conf file that needs to be updated for prebuilts. |
337 | 353 |
338 Args: | 354 Args: |
(...skipping 15 matching lines...) Expand all Loading... |
354 make_path = os.path.join(_BINHOST_BASE_DIR, overlay_str, 'make.conf') | 370 make_path = os.path.join(_BINHOST_BASE_DIR, overlay_str, 'make.conf') |
355 elif re.match('.*?-\w+', target): | 371 elif re.match('.*?-\w+', target): |
356 overlay_str = 'overlay-%s' % target | 372 overlay_str = 'overlay-%s' % target |
357 make_path = os.path.join(_BINHOST_BASE_DIR, overlay_str, 'make.conf') | 373 make_path = os.path.join(_BINHOST_BASE_DIR, overlay_str, 'make.conf') |
358 else: | 374 else: |
359 raise UnknownBoardFormat('Unknown format: %s' % target) | 375 raise UnknownBoardFormat('Unknown format: %s' % target) |
360 | 376 |
361 return os.path.join(make_path) | 377 return os.path.join(make_path) |
362 | 378 |
363 | 379 |
364 def UploadPrebuilt(build_path, bucket, version, board=None, git_sync=False): | 380 def UploadPrebuilt(build_path, upload_location, version, binhost_base_url, |
| 381 board=None, git_sync=False): |
365 """Upload Host prebuilt files to Google Storage space. | 382 """Upload Host prebuilt files to Google Storage space. |
366 | 383 |
367 Args: | 384 Args: |
368 build_path: The path to the root of the chroot. | 385 build_path: The path to the root of the chroot. |
369 bucket: The Google Storage bucket to upload to. | 386 upload_location: The upload location. |
370 board: The board to upload to Google Storage, if this is None upload | 387 board: The board to upload to Google Storage, if this is None upload |
371 host packages. | 388 host packages. |
372 git_sync: If set, update make.conf of target to reference the latest | 389 git_sync: If set, update make.conf of target to reference the latest |
373 prebuilt packages genereated here. | 390 prebuilt packages genereated here. |
374 """ | 391 """ |
375 | 392 |
376 if not board: | 393 if not board: |
377 # We are uploading host packages | 394 # We are uploading host packages |
378 # TODO(scottz): eventually add support for different host_targets | 395 # TODO(scottz): eventually add support for different host_targets |
379 package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) | 396 package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) |
380 gs_path = os.path.join(bucket, _GS_HOST_PATH, version) | 397 url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET} |
381 strip_pattern = package_path | |
382 package_string = _HOST_TARGET | 398 package_string = _HOST_TARGET |
383 git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) | 399 git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) |
384 url_suffix = '%s/%s/' % (_GS_HOST_PATH, version) | |
385 else: | 400 else: |
386 board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) | 401 board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) |
387 package_path = os.path.join(board_path, 'packages') | 402 package_path = os.path.join(board_path, 'packages') |
388 package_string = board | 403 package_string = board |
389 strip_pattern = board_path | 404 url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version} |
390 remote_board_path = _GS_BOARD_PATH % {'board': board, 'version': version} | |
391 gs_path = os.path.join(bucket, remote_board_path) | |
392 git_file = os.path.join(build_path, DetermineMakeConfFile(board)) | 405 git_file = os.path.join(build_path, DetermineMakeConfFile(board)) |
393 url_suffix = remote_board_path | 406 remote_location = os.path.join(upload_location, url_suffix) |
394 | 407 |
395 upload_files = GenerateUploadDict(package_path, gs_path, strip_pattern) | 408 if upload_location.startswith('gs://'): |
| 409 upload_files = GenerateUploadDict(package_path, remote_location) |
396 | 410 |
397 print 'Uploading %s' % package_string | 411 print 'Uploading %s' % package_string |
398 failed_uploads = RemoteUpload(upload_files) | 412 failed_uploads = RemoteUpload(upload_files) |
399 if len(failed_uploads) > 1 or (None not in failed_uploads): | 413 if len(failed_uploads) > 1 or (None not in failed_uploads): |
400 error_msg = ['%s -> %s\n' % args for args in failed_uploads] | 414 error_msg = ['%s -> %s\n' % args for args in failed_uploads] |
401 raise UploadFailed('Error uploading:\n%s' % error_msg) | 415 raise UploadFailed('Error uploading:\n%s' % error_msg) |
| 416 else: |
| 417 ssh_server, remote_path = remote_location.split(':', 1) |
| 418 cmds = ['ssh %s mkdir -p %s' % (ssh_server, remote_path), |
| 419 'rsync -av %s/ %s/' % (package_path, remote_location)] |
| 420 for cmd in cmds: |
| 421 if not _RetryRun(cmd, shell=True): |
| 422 raise UploadFailed('Could not run %s' % cmd) |
402 | 423 |
403 if git_sync: | 424 if git_sync: |
404 url_value = '%s/%s' % (_BINHOST_BASE_URL, url_suffix) | 425 url_value = '%s/%s/' % (binhost_base_url, url_suffix) |
405 RevGitFile(git_file, url_value) | 426 RevGitFile(git_file, url_value) |
406 | 427 |
407 | 428 |
408 def usage(parser, msg): | 429 def usage(parser, msg): |
409 """Display usage message and parser help then exit with 1.""" | 430 """Display usage message and parser help then exit with 1.""" |
410 print >> sys.stderr, msg | 431 print >> sys.stderr, msg |
411 parser.print_help() | 432 parser.print_help() |
412 sys.exit(1) | 433 sys.exit(1) |
413 | 434 |
414 | 435 |
415 def main(): | 436 def main(): |
416 parser = optparse.OptionParser() | 437 parser = optparse.OptionParser() |
| 438 parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url', |
| 439 default=_BINHOST_BASE_URL, |
| 440 help='Base URL to use for binhost in make.conf updates') |
417 parser.add_option('-b', '--board', dest='board', default=None, | 441 parser.add_option('-b', '--board', dest='board', default=None, |
418 help='Board type that was built on this machine') | 442 help='Board type that was built on this machine') |
419 parser.add_option('-p', '--build-path', dest='build_path', | 443 parser.add_option('-p', '--build-path', dest='build_path', |
420 help='Path to the chroot') | 444 help='Path to the chroot') |
421 parser.add_option('-s', '--sync-host', dest='sync_host', | 445 parser.add_option('-s', '--sync-host', dest='sync_host', |
422 default=False, action='store_true', | 446 default=False, action='store_true', |
423 help='Sync host prebuilts') | 447 help='Sync host prebuilts') |
424 parser.add_option('-g', '--git-sync', dest='git_sync', | 448 parser.add_option('-g', '--git-sync', dest='git_sync', |
425 default=False, action='store_true', | 449 default=False, action='store_true', |
426 help='Enable git version sync (This commits to a repo)') | 450 help='Enable git version sync (This commits to a repo)') |
427 parser.add_option('-u', '--upload', dest='upload', | 451 parser.add_option('-u', '--upload', dest='upload', |
428 default=None, | 452 default=None, |
429 help='Upload to GS bucket') | 453 help='Upload location') |
430 parser.add_option('-V', '--prepend-version', dest='prepend_version', | 454 parser.add_option('-V', '--prepend-version', dest='prepend_version', |
431 default=None, | 455 default=None, |
432 help='Add an identifier to the front of the version') | 456 help='Add an identifier to the front of the version') |
433 parser.add_option('-f', '--filters', dest='filters', action='store_true', | 457 parser.add_option('-f', '--filters', dest='filters', action='store_true', |
434 default=False, | 458 default=False, |
435 help='Turn on filtering of private ebuild packages') | 459 help='Turn on filtering of private ebuild packages') |
436 | 460 |
437 options, args = parser.parse_args() | 461 options, args = parser.parse_args() |
438 # Setup boto environment for gsutil to use | 462 # Setup boto environment for gsutil to use |
439 os.environ['BOTO_CONFIG'] = _BOTO_CONFIG | 463 os.environ['BOTO_CONFIG'] = _BOTO_CONFIG |
440 if not options.build_path: | 464 if not options.build_path: |
441 usage(parser, 'Error: you need provide a chroot path') | 465 usage(parser, 'Error: you need provide a chroot path') |
442 | 466 |
443 if not options.upload: | 467 if not options.upload: |
444 usage(parser, 'Error: you need to provide a gsutil upload bucket -u') | 468 usage(parser, 'Error: you need to provide an upload location using -u') |
445 | 469 |
446 if options.filters: | 470 if options.filters: |
| 471 # TODO(davidjames): It might be nice to be able to filter private ebuilds |
| 472 # from rsync uploads as well, some day. But for now it's not needed. |
| 473 if not options.upload.startswith("gs://"): |
| 474 usage(parser, 'Error: filtering only works with gs:// paths') |
447 LoadPrivateFilters(options.build_path) | 475 LoadPrivateFilters(options.build_path) |
448 | 476 |
449 version = GetVersion() | 477 version = GetVersion() |
450 if options.prepend_version: | 478 if options.prepend_version: |
451 version = '%s-%s' % (options.prepend_version, version) | 479 version = '%s-%s' % (options.prepend_version, version) |
452 | 480 |
453 if options.sync_host: | 481 if options.sync_host: |
454 UploadPrebuilt(options.build_path, options.upload, version, | 482 UploadPrebuilt(options.build_path, options.upload, version, |
455 git_sync=options.git_sync) | 483 options.binhost_base_url, git_sync=options.git_sync) |
456 | 484 |
457 if options.board: | 485 if options.board: |
458 UploadPrebuilt(options.build_path, options.upload, version, | 486 UploadPrebuilt(options.build_path, options.upload, version, |
459 board=options.board, git_sync=options.git_sync) | 487 options.binhost_base_url, board=options.board, |
| 488 git_sync=options.git_sync) |
460 | 489 |
461 | 490 |
462 if __name__ == '__main__': | 491 if __name__ == '__main__': |
463 main() | 492 main() |
OLD | NEW |