Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1075)

Side by Side Diff: prebuilt.py

Issue 4969003: Update cbuildbot.py to upload prebuilts from preflight buildbot. (Closed) Base URL: ssh://git@gitrw.chromium.org:9222/crosutils.git@master
Patch Set: Add more cbuildbot unit tests. Created 10 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 #!/usr/bin/python 1 #!/usr/bin/python
2 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved. 2 # Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 import datetime 6 import datetime
7 import multiprocessing 7 import multiprocessing
8 import optparse 8 import optparse
9 import os 9 import os
10 import re 10 import re
11 import sys 11 import sys
12 import tempfile 12 import tempfile
13 import time 13 import time
14 14
15 from chromite.lib import cros_build_lib 15 from chromite.lib import cros_build_lib
16 from chromite.lib.binpkg import GrabRemotePackageIndex, PackageIndex
16 """ 17 """
17 This script is used to upload host prebuilts as well as board BINHOSTS. 18 This script is used to upload host prebuilts as well as board BINHOSTS.
18 19
19 If the URL starts with 'gs://', we upload using gsutil to Google Storage. 20 If the URL starts with 'gs://', we upload using gsutil to Google Storage.
20 Otherwise, rsync is used. 21 Otherwise, rsync is used.
21 22
22 After a build is successfully uploaded a file is updated with the proper 23 After a build is successfully uploaded a file is updated with the proper
23 BINHOST version as well as the target board. This file is defined in GIT_FILE 24 BINHOST version as well as the target board. This file is defined in GIT_FILE
24 25
25 26
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
110 file_lines.append(keyval_str % {'key': key, 'value': value}) 111 file_lines.append(keyval_str % {'key': key, 'value': value})
111 else: 112 else:
112 file_lines.append(keyval_str % {'key': file_var, 'value': file_val}) 113 file_lines.append(keyval_str % {'key': file_var, 'value': file_val})
113 114
114 if not found: 115 if not found:
115 file_lines.append(keyval_str % {'key': key, 'value': value}) 116 file_lines.append(keyval_str % {'key': key, 'value': value})
116 117
117 file_fh.close() 118 file_fh.close()
118 # write out new file 119 # write out new file
119 new_file_fh = open(filename, 'w') 120 new_file_fh = open(filename, 'w')
120 new_file_fh.write('\n'.join(file_lines)) 121 new_file_fh.write('\n'.join(file_lines) + '\n')
121 new_file_fh.close() 122 new_file_fh.close()
122 123
123 124
124 def RevGitPushWithRetry(retries=5): 125 def RevGitPushWithRetry(retries=5):
125 """Repo sync and then push git changes in flight. 126 """Repo sync and then push git changes in flight.
126 127
127 Args: 128 Args:
128 retries: The number of times to retry before giving up, default: 5 129 retries: The number of times to retry before giving up, default: 5
129 130
130 Raises: 131 Raises:
131 GitPushFailed if push was unsuccessful after retries 132 GitPushFailed if push was unsuccessful after retries
132 """ 133 """
133 for retry in range(1, retries+1): 134 for retry in range(1, retries+1):
134 try: 135 try:
135 cros_build_lib.RunCommand('repo sync .', shell=True) 136 cros_build_lib.RunCommand('repo sync .', shell=True)
136 cros_build_lib.RunCommand('git push', shell=True) 137 cros_build_lib.RunCommand('git push', shell=True)
137 break 138 break
138 except cros_build_lib.RunCommandError: 139 except cros_build_lib.RunCommandError:
139 if retry < retries: 140 if retry < retries:
140 print 'Error pushing changes trying again (%s/%s)' % (retry, retries) 141 print 'Error pushing changes trying again (%s/%s)' % (retry, retries)
141 time.sleep(5*retry) 142 time.sleep(5*retry)
142 else: 143 else:
143 raise GitPushFailed('Failed to push change after %s retries' % retries) 144 raise GitPushFailed('Failed to push change after %s retries' % retries)
144 145
145 146
146 def RevGitFile(filename, value, retries=5): 147 def RevGitFile(filename, value, retries=5, key='PORTAGE_BINHOST',
148 skip_push=False):
147 """Update and push the git file. 149 """Update and push the git file.
148 150
149 Args: 151 Args:
150 filename: file to modify that is in a git repo already 152 filename: file to modify that is in a git repo already
151 value: string representing the version of the prebuilt that has been 153 value: string representing the version of the prebuilt that has been
152 uploaded. 154 uploaded.
153 retries: The number of times to retry before giving up, default: 5 155 retries: The number of times to retry before giving up, default: 5
156 key: The variable key to update in the git file.
157 (Default: PORTAGE_BINHOST)
154 """ 158 """
155 prebuilt_branch = 'prebuilt_branch' 159 prebuilt_branch = 'prebuilt_branch'
156 old_cwd = os.getcwd() 160 old_cwd = os.getcwd()
157 os.chdir(os.path.dirname(filename)) 161 os.chdir(os.path.dirname(filename))
158 162
159 cros_build_lib.RunCommand('repo sync .', shell=True) 163 cros_build_lib.RunCommand('repo sync .', shell=True)
160 cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True) 164 cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True)
161 git_ssh_config_cmd = ( 165 git_ssh_config_cmd = (
162 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof ' 166 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof '
163 'http://git.chromium.org/git') 167 'http://git.chromium.org/git')
164 cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True) 168 cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True)
165 description = 'Update PORTAGE_BINHOST="%s" in %s' % (value, filename) 169 description = 'Update %s="%s" in %s' % (key, value, filename)
166 print description 170 print description
167 try: 171 try:
168 UpdateLocalFile(filename, value) 172 UpdateLocalFile(filename, value, key)
169 cros_build_lib.RunCommand('git config push.default tracking', shell=True) 173 cros_build_lib.RunCommand('git config push.default tracking', shell=True)
170 cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True) 174 cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True)
171 RevGitPushWithRetry(retries) 175 if not skip_push:
176 RevGitPushWithRetry(retries)
172 finally: 177 finally:
173 cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True) 178 if not skip_push:
179 cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch,
180 shell=True)
174 os.chdir(old_cwd) 181 os.chdir(old_cwd)
175 182
176 183
177 def GetVersion(): 184 def GetVersion():
178 """Get the version to put in LATEST and update the git version with.""" 185 """Get the version to put in LATEST and update the git version with."""
179 return datetime.datetime.now().strftime('%d.%m.%y.%H%M%S') 186 return datetime.datetime.now().strftime('%d.%m.%y.%H%M%S')
180 187
181 188
182 def LoadPrivateFilters(build_path): 189 def LoadPrivateFilters(build_path):
183 """Load private filters based on ebuilds found under _PRIVATE_OVERLAY_DIR. 190 """Load private filters based on ebuilds found under _PRIVATE_OVERLAY_DIR.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
218 False otherwise. 225 False otherwise.
219 """ 226 """
220 for name in _FILTER_PACKAGES: 227 for name in _FILTER_PACKAGES:
221 if name in file_path: 228 if name in file_path:
222 print 'FILTERING %s' % file_path 229 print 'FILTERING %s' % file_path
223 return True 230 return True
224 231
225 return False 232 return False
226 233
227 234
228 def _ShouldFilterPackageFileSection(section): 235 def FilterPackagesIndex(pkgindex):
229 """Return whether an section in the package file should be filtered out. 236 """Remove filtered packages from pkgindex.
230 237
231 Args: 238 Args:
232 section: The section, as a list of strings. 239 pkgindex: The PackageIndex object.
240 """
241
242 filtered = [p for p in pkgindex.packages if not ShouldFilterPackage(p['CPV'])]
243 if filtered != pkgindex.packages:
244 pkgindex.modified = True
245 pkgindex.packages = filtered
246
247
248 def ReadLocalPackageIndex(package_path):
249 """Read a local packages file from disk into a PackageIndex() object.
250
251 Args:
252 package_path: Directory containing Packages file.
253 """
254 packages_file = file(os.path.join(package_path, 'Packages'))
255 pkgindex = PackageIndex()
256 pkgindex.Read(packages_file)
257 packages_file.close()
258 return pkgindex
259
260
261 def RebasePackageIndex(pkgindex, binhost_base_url, path_prefix):
262 """Update package index to point at specified location.
263
264 Args:
265 pkgindex: PackageIndex object to update.
266 binhost_base_url: The base URI to use for the packages file.
267 path_prefix: The path prefix to use prior to each package.
268 """
269 pkgindex.header['URI'] = binhost_base_url
270 for pkg in pkgindex.packages:
271 pkg['PATH'] = os.path.join(path_prefix, pkg['CPV'] + '.tbz2')
272
273
274 def WritePackageIndex(pkgindex):
275 """Write pkgindex to a temporary file.
276
277 Args:
278 pkgindex: The PackageIndex object.
233 279
234 Returns: 280 Returns:
235 True if the section should be excluded. 281 A temporary file containing the packages from pkgindex.
236 """ 282 """
237 283
238 for line in section: 284 f = tempfile.NamedTemporaryFile()
239 if line.startswith("CPV: "): 285 pkgindex.Write(f)
240 package = line.replace("CPV: ", "").rstrip() 286 f.flush()
241 if ShouldFilterPackage(package): 287 f.seek(0)
242 return True 288 return f
243 else:
244 return False
245 289
246 290
247 def FilterPackagesFile(packages_filename): 291 def GrabPackageURLs(pkgindex):
248 """Read a portage Packages file and filter out private packages. 292 """Grab the SHA1s and URLs from the specified package index.
249
250 The new, filtered packages file is written to a temporary file.
251 293
252 Args: 294 Args:
253 packages_filename: The filename of the Packages file. 295 pkgindex: A package index.
254 296
255 Returns: 297 Returns:
256 filtered_packages: A filtered Packages file, as a NamedTemporaryFile. 298 A dict mapping SHA1 hashes to URLs.
257 """ 299 """
258 300 db = {}
259 packages_file = open(packages_filename) 301 base_uri = pkgindex.header['URI']
260 filtered_packages = tempfile.NamedTemporaryFile() 302 for package in pkgindex.packages:
261 section = [] 303 cpv, sha1 = package['CPV'], package['SHA1']
262 for line in packages_file: 304 path = package.get('PATH', cpv + '.tbz2')
263 if line == "\n": 305 db[sha1] = os.path.join(base_uri, path)
264 if not _ShouldFilterPackageFileSection(section): 306 return db
265 # Looks like this section doesn't contain a private package. Write it
266 # out.
267 filtered_packages.write("".join(section))
268
269 # Start next section.
270 section = []
271
272 section.append(line)
273 else:
274 if not _ShouldFilterPackageFileSection(section):
275 filtered_packages.write("".join(section))
276 packages_file.close()
277
278 # Flush contents to disk.
279 filtered_packages.flush()
280 filtered_packages.seek(0)
281
282 return filtered_packages
283 307
284 308
285 def _RetryRun(cmd, print_cmd=True, shell=False): 309 def FindDuplicates(pkgindex, dupdb):
310 """Find duplicates in the specified package index.
311
312 Args:
313 pkgindex: The PackageIndex object.
314 dupdb: A dictionary mapping SHA1's to URLs, as output by GrabPackageURLs.
315
316 Returns:
317 orig: Original packages from pkgindex.
318 dups: A list of tuples (pkg, uri) where pkg is a duplicated package, and
319 uri is where the package can be found.
320 """
321 orig, dups = [], []
322 origdb = GrabPackageURLs(pkgindex)
323 for pkg in pkgindex.packages:
324 sha1 = pkg['SHA1']
325 uri = dupdb.get(sha1)
326 if uri:
327 dups.append((pkg, uri))
328 else:
329 orig.append(pkg)
330 return orig, dups
331
332
333 def ResolveDuplicates(pkgindex, dups):
334 """Remove duplicates from the specified package index.
335
336 Args:
337 pkgindex: The PackageIndex object.
338 dups: A list of duplicates, as returned by FindDuplicates.
339 """
340 prefix = pkgindex.header['URI']
341 if dups:
342 pkgindex.modified = True
343 for pkg, uri in dups:
344 assert uri.startswith(prefix)
345 pkg['PATH'] = uri.replace(prefix, '').lstrip('/')
346
347
348 def _RetryRun(cmd, print_cmd=True, shell=False, cwd=None):
286 """Run the specified command, retrying if necessary. 349 """Run the specified command, retrying if necessary.
287 350
288 Args: 351 Args:
289 cmd: The command to run. 352 cmd: The command to run.
290 print_cmd: Whether to print out the cmd. 353 print_cmd: Whether to print out the cmd.
291 shell: Whether to treat the command as a shell. 354 shell: Whether to treat the command as a shell.
292 355
293 Returns: 356 Returns:
294 True if the command succeeded. Otherwise, returns False. 357 True if the command succeeded. Otherwise, returns False.
295 """ 358 """
296 359
297 # TODO(scottz): port to use _Run or similar when it is available in 360 # TODO(scottz): port to use _Run or similar when it is available in
298 # cros_build_lib. 361 # cros_build_lib.
299 for attempt in range(_RETRIES): 362 for attempt in range(_RETRIES):
300 try: 363 try:
301 output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell) 364 output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell,
365 cwd=cwd)
302 return True 366 return True
303 except cros_build_lib.RunCommandError: 367 except cros_build_lib.RunCommandError:
304 print 'Failed to run %s' % cmd 368 print 'Failed to run %s' % cmd
305 else: 369 else:
306 print 'Retry failed run %s, giving up' % cmd 370 print 'Retry failed run %s, giving up' % cmd
307 return False 371 return False
308 372
309 373
310 def _GsUpload(args): 374 def _GsUpload(args):
311 """Upload to GS bucket. 375 """Upload to GS bucket.
312 376
313 Args: 377 Args:
314 args: a tuple of two arguments that contains local_file and remote_file. 378 args: a tuple of two arguments that contains local_file and remote_file.
315 379
316 Returns: 380 Returns:
317 Return the arg tuple of two if the upload failed 381 Return the arg tuple of two if the upload failed
318 """ 382 """
319 (local_file, remote_file) = args 383 (local_file, remote_file) = args
320 if ShouldFilterPackage(local_file):
321 return
322
323 if local_file.endswith("/Packages"):
324 filtered_packages_file = FilterPackagesFile(local_file)
325 local_file = filtered_packages_file.name
326 384
327 cmd = '%s cp -a public-read %s %s' % (_GSUTIL_BIN, local_file, remote_file) 385 cmd = '%s cp -a public-read %s %s' % (_GSUTIL_BIN, local_file, remote_file)
328 if not _RetryRun(cmd, print_cmd=False, shell=True): 386 if not _RetryRun(cmd, print_cmd=False, shell=True):
329 return (local_file, remote_file) 387 return (local_file, remote_file)
330 388
331 389
332 def RemoteUpload(files, pool=10): 390 def RemoteUpload(files, pool=10):
333 """Upload to google storage. 391 """Upload to google storage.
334 392
335 Create a pool of process and call _GsUpload with the proper arguments. 393 Create a pool of process and call _GsUpload with the proper arguments.
(...skipping 13 matching lines...) Expand all
349 workers.append((local_file, remote_path)) 407 workers.append((local_file, remote_path))
350 408
351 result = pool.map_async(_GsUpload, workers, chunksize=1) 409 result = pool.map_async(_GsUpload, workers, chunksize=1)
352 while True: 410 while True:
353 try: 411 try:
354 return set(result.get(60*60)) 412 return set(result.get(60*60))
355 except multiprocessing.TimeoutError: 413 except multiprocessing.TimeoutError:
356 pass 414 pass
357 415
358 416
359 def GenerateUploadDict(local_path, gs_path): 417 def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
360 """Build a dictionary of local remote file key pairs for gsutil to upload. 418 """Build a dictionary of local remote file key pairs to upload.
361 419
362 Args: 420 Args:
363 local_path: A path to the file on the local hard drive. 421 base_local_path: The base path to the files on the local hard drive.
364 gs_path: Path to upload in Google Storage. 422 remote_path: The base path to the remote paths.
423 pkgs: The packages to upload.
365 424
366 Returns: 425 Returns:
367 Returns a dictionary of file path/gs_dest_path pairs 426 Returns a dictionary of local_path/remote_path pairs
368 """ 427 """
369 files_to_sync = cros_build_lib.ListFiles(local_path)
370 upload_files = {} 428 upload_files = {}
371 for file_path in files_to_sync: 429 for pkg in pkgs:
372 filename = file_path.replace(local_path, '').lstrip('/') 430 suffix = pkg['CPV'] + '.tbz2'
373 gs_file_path = os.path.join(gs_path, filename) 431 local_path = os.path.join(base_local_path, suffix)
374 upload_files[file_path] = gs_file_path 432 assert os.path.exists(local_path)
433 remote_path = os.path.join(base_remote_path, suffix)
434 upload_files[local_path] = remote_path
375 435
376 return upload_files 436 return upload_files
377 437
378 438
379 def DetermineMakeConfFile(target): 439 def DetermineMakeConfFile(target):
380 """Determine the make.conf file that needs to be updated for prebuilts. 440 """Determine the make.conf file that needs to be updated for prebuilts.
381 441
382 Args: 442 Args:
383 target: String representation of the board. This includes host and board 443 target: String representation of the board. This includes host and board
384 targets 444 targets
(...skipping 14 matching lines...) Expand all
399 elif re.match('.*?-\w+', target): 459 elif re.match('.*?-\w+', target):
400 overlay_str = 'overlay-%s' % target 460 overlay_str = 'overlay-%s' % target
401 make_path = os.path.join(_BINHOST_BASE_DIR, overlay_str, 'make.conf') 461 make_path = os.path.join(_BINHOST_BASE_DIR, overlay_str, 'make.conf')
402 else: 462 else:
403 raise UnknownBoardFormat('Unknown format: %s' % target) 463 raise UnknownBoardFormat('Unknown format: %s' % target)
404 464
405 return os.path.join(make_path) 465 return os.path.join(make_path)
406 466
407 467
408 def UploadPrebuilt(build_path, upload_location, version, binhost_base_url, 468 def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
409 board=None, git_sync=False, git_sync_retries=5): 469 board=None, git_sync=False, git_sync_retries=5,
470 key='PORTAGE_BINHOST', skip_push=False, old_urls={}):
410 """Upload Host prebuilt files to Google Storage space. 471 """Upload Host prebuilt files to Google Storage space.
411 472
412 Args: 473 Args:
413 build_path: The path to the root of the chroot. 474 build_path: The path to the root of the chroot.
414 upload_location: The upload location. 475 upload_location: The upload location.
415 board: The board to upload to Google Storage, if this is None upload 476 board: The board to upload to Google Storage. If this is None, upload
416 host packages. 477 host packages.
417 git_sync: If set, update make.conf of target to reference the latest 478 git_sync: If set, update make.conf of target to reference the latest
418 prebuilt packages genereated here. 479 prebuilt packages generated here.
419 git_sync_retries: How many times to retry pushing when updating git files. 480 git_sync_retries: How many times to retry pushing when updating git files.
420 This helps avoid failures when multiple bots are modifying the same Repo. 481 This helps avoid failures when multiple bots are modifying the same Repo.
421 default: 5 482 default: 5
483 key: The variable key to update in the git file. (Default: PORTAGE_BINHOST)
422 """ 484 """
423 485
424 if not board: 486 if not board:
425 # We are uploading host packages 487 # We are uploading host packages
426 # TODO(scottz): eventually add support for different host_targets 488 # TODO(scottz): eventually add support for different host_targets
427 package_path = os.path.join(build_path, _HOST_PACKAGES_PATH) 489 package_path = os.path.join(build_path, _HOST_PACKAGES_PATH)
428 url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET} 490 url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET}
429 package_string = _HOST_TARGET 491 package_string = _HOST_TARGET
430 git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET]) 492 git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET])
431 else: 493 else:
432 board_path = os.path.join(build_path, _BOARD_PATH % {'board': board}) 494 board_path = os.path.join(build_path, _BOARD_PATH % {'board': board})
433 package_path = os.path.join(board_path, 'packages') 495 package_path = os.path.join(board_path, 'packages')
434 package_string = board 496 package_string = board
435 url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version} 497 url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version}
436 git_file = os.path.join(build_path, DetermineMakeConfFile(board)) 498 git_file = os.path.join(build_path, DetermineMakeConfFile(board))
437 remote_location = os.path.join(upload_location, url_suffix) 499 remote_location = os.path.join(upload_location, url_suffix)
438 500
501 # Process Packages file, removing duplicates and filtered packages.
502 pkgindex = ReadLocalPackageIndex(package_path)
503 RebasePackageIndex(pkgindex, binhost_base_url, url_suffix)
504 FilterPackagesIndex(pkgindex)
505 orig, dups = FindDuplicates(pkgindex, old_urls)
506 ResolveDuplicates(pkgindex, dups)
507
508 # Write Packages file.
509 tmp_packages_file = WritePackageIndex(pkgindex)
510
439 if upload_location.startswith('gs://'): 511 if upload_location.startswith('gs://'):
440 upload_files = GenerateUploadDict(package_path, remote_location) 512 # Build list of files to upload.
513 upload_files = GenerateUploadDict(package_path, remote_location, orig)
514 remote_file = os.path.join(remote_location, 'Packages')
515 upload_files[tmp_packages_file.name] = remote_file
441 516
442 print 'Uploading %s' % package_string 517 print 'Uploading %s' % package_string
443 failed_uploads = RemoteUpload(upload_files) 518 failed_uploads = RemoteUpload(upload_files)
444 if len(failed_uploads) > 1 or (None not in failed_uploads): 519 if len(failed_uploads) > 1 or (None not in failed_uploads):
445 error_msg = ['%s -> %s\n' % args for args in failed_uploads] 520 error_msg = ['%s -> %s\n' % args for args in failed_uploads]
446 raise UploadFailed('Error uploading:\n%s' % error_msg) 521 raise UploadFailed('Error uploading:\n%s' % error_msg)
447 else: 522 else:
523 pkgs = ' '.join(p['CPV'] + '.tbz2' for p in orig)
448 ssh_server, remote_path = remote_location.split(':', 1) 524 ssh_server, remote_path = remote_location.split(':', 1)
449 cmds = ['ssh %s mkdir -p %s' % (ssh_server, remote_path), 525 d = { 'pkgindex': tmp_packages_file.name,
450 'rsync -av %s/ %s/' % (package_path, remote_location)] 526 'pkgs': pkgs,
527 'remote_path': remote_path,
528 'remote_location': remote_location,
529 'ssh_server': ssh_server }
530 cmds = ['ssh %(ssh_server)s mkdir -p %(remote_path)s' % d,
531 'rsync -av %(pkgindex)s %(remote_location)s/Packages' % d]
532 if pkgs:
533 cmds.append('rsync -Rav %(pkgs)s %(remote_location)s/' % d)
451 for cmd in cmds: 534 for cmd in cmds:
452 if not _RetryRun(cmd, shell=True): 535 if not _RetryRun(cmd, shell=True, cwd=package_path):
453 raise UploadFailed('Could not run %s' % cmd) 536 raise UploadFailed('Could not run %s' % cmd)
454 537
455 if git_sync: 538 if git_sync:
456 url_value = '%s/%s/' % (binhost_base_url, url_suffix) 539 url_value = '%s/%s/' % (binhost_base_url, url_suffix)
457 RevGitFile(git_file, url_value, retries=git_sync_retries) 540 RevGitFile(git_file, url_value, retries=git_sync_retries, key=key,
541 skip_push=skip_push)
458 542
459 543
460 def usage(parser, msg): 544 def usage(parser, msg):
461 """Display usage message and parser help then exit with 1.""" 545 """Display usage message and parser help then exit with 1."""
462 print >> sys.stderr, msg 546 print >> sys.stderr, msg
463 parser.print_help() 547 parser.print_help()
464 sys.exit(1) 548 sys.exit(1)
465 549
466 550
467 def main(): 551 def main():
468 parser = optparse.OptionParser() 552 parser = optparse.OptionParser()
469 parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url', 553 parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url',
470 default=_BINHOST_BASE_URL, 554 default=_BINHOST_BASE_URL,
471 help='Base URL to use for binhost in make.conf updates') 555 help='Base URL to use for binhost in make.conf updates')
556 parser.add_option('', '--previous-board-binhost-url', action='append',
557 default=[], dest='previous_board_binhost_url',
558 help='Previous board binhost URL')
559 parser.add_option('', '--previous-host-binhost-url', action='append',
560 default=[], dest='previous_host_binhost_url',
561 help='Previous host binhost URL')
472 parser.add_option('-b', '--board', dest='board', default=None, 562 parser.add_option('-b', '--board', dest='board', default=None,
473 help='Board type that was built on this machine') 563 help='Board type that was built on this machine')
474 parser.add_option('-p', '--build-path', dest='build_path', 564 parser.add_option('-p', '--build-path', dest='build_path',
475 help='Path to the chroot') 565 help='Path to the chroot')
476 parser.add_option('-s', '--sync-host', dest='sync_host', 566 parser.add_option('-s', '--sync-host', dest='sync_host',
477 default=False, action='store_true', 567 default=False, action='store_true',
478 help='Sync host prebuilts') 568 help='Sync host prebuilts')
569 parser.add_option('-n', '--skip-push', dest='skip_push',
570 default=False, action='store_true',
571 help='Don\'t push git changes.')
479 parser.add_option('-g', '--git-sync', dest='git_sync', 572 parser.add_option('-g', '--git-sync', dest='git_sync',
480 default=False, action='store_true', 573 default=False, action='store_true',
481 help='Enable git version sync (This commits to a repo)') 574 help='Enable git version sync (This commits to a repo)')
482 parser.add_option('-u', '--upload', dest='upload', 575 parser.add_option('-u', '--upload', dest='upload',
483 default=None, 576 default=None,
484 help='Upload location') 577 help='Upload location')
485 parser.add_option('-V', '--prepend-version', dest='prepend_version', 578 parser.add_option('-V', '--prepend-version', dest='prepend_version',
486 default=None, 579 default=None,
487 help='Add an identifier to the front of the version') 580 help='Add an identifier to the front of the version')
488 parser.add_option('-f', '--filters', dest='filters', action='store_true', 581 parser.add_option('-f', '--filters', dest='filters', action='store_true',
489 default=False, 582 default=False,
490 help='Turn on filtering of private ebuild packages') 583 help='Turn on filtering of private ebuild packages')
584 parser.add_option('-k', '--key', dest='key',
585 default='PORTAGE_BINHOST',
586 help='Key to update in make.conf')
491 587
492 options, args = parser.parse_args() 588 options, args = parser.parse_args()
493 # Setup boto environment for gsutil to use 589 # Setup boto environment for gsutil to use
494 os.environ['BOTO_CONFIG'] = _BOTO_CONFIG 590 os.environ['BOTO_CONFIG'] = _BOTO_CONFIG
495 if not options.build_path: 591 if not options.build_path:
496 usage(parser, 'Error: you need provide a chroot path') 592 usage(parser, 'Error: you need provide a chroot path')
497 593
498 if not options.upload: 594 if not options.upload:
499 usage(parser, 'Error: you need to provide an upload location using -u') 595 usage(parser, 'Error: you need to provide an upload location using -u')
500 596
501 if options.filters: 597 if options.filters:
502 # TODO(davidjames): It might be nice to be able to filter private ebuilds
503 # from rsync uploads as well, some day. But for now it's not needed.
504 if not options.upload.startswith("gs://"):
505 usage(parser, 'Error: filtering only works with gs:// paths')
506 LoadPrivateFilters(options.build_path) 598 LoadPrivateFilters(options.build_path)
507 599
508 version = GetVersion() 600 version = GetVersion()
509 if options.prepend_version: 601 if options.prepend_version:
510 version = '%s-%s' % (options.prepend_version, version) 602 version = '%s-%s' % (options.prepend_version, version)
511 603
512 if options.sync_host: 604 if options.sync_host:
605 old_urls = {}
606 for url in options.previous_host_binhost_url:
607 old_pkgindex = GrabRemotePackageIndex(url)
608 old_pkgindex.header.setdefault('URI', url)
609 old_urls.update(GrabPackageURLs(old_pkgindex))
513 UploadPrebuilt(options.build_path, options.upload, version, 610 UploadPrebuilt(options.build_path, options.upload, version,
514 options.binhost_base_url, git_sync=options.git_sync) 611 options.binhost_base_url, git_sync=options.git_sync,
612 key=options.key, skip_push=options.skip_push,
613 old_urls=old_urls)
515 614
516 if options.board: 615 if options.board:
616 old_urls = {}
617 for url in options.previous_board_binhost_url:
618 old_pkgindex = GrabRemotePackageIndex(url)
619 old_pkgindex.header.setdefault('URI', url)
620 old_urls.update(GrabPackageURLs(old_pkgindex))
517 UploadPrebuilt(options.build_path, options.upload, version, 621 UploadPrebuilt(options.build_path, options.upload, version,
518 options.binhost_base_url, board=options.board, 622 options.binhost_base_url, board=options.board,
519 git_sync=options.git_sync) 623 git_sync=options.git_sync, key=options.key,
624 skip_push=options.skip_push, old_urls=old_urls)
520 625
521 626
522 if __name__ == '__main__': 627 if __name__ == '__main__':
523 main() 628 main()
OLDNEW
« bin/cbuildbot.py ('K') | « chromite/lib/binpkg.py ('k') | prebuilt_unittest.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698