OLD | NEW |
---|---|
1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
2 # Copyright 2014 The Chromium Authors. All rights reserved. | 2 # Copyright 2014 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """A utility script for downloading versioned Syzygy binaries.""" | 6 """A utility script for downloading versioned Syzygy binaries.""" |
7 | 7 |
8 import cStringIO | 8 import cStringIO |
9 import hashlib | 9 import hashlib |
10 import errno | 10 import errno |
(...skipping 232 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
243 if os.path.exists(p) and _DirIsEmpty(p): | 243 if os.path.exists(p) and _DirIsEmpty(p): |
244 _LOGGER.debug('Deleting empty directory "%s".', p) | 244 _LOGGER.debug('Deleting empty directory "%s".', p) |
245 if not dry_run: | 245 if not dry_run: |
246 _RmTree(p) | 246 _RmTree(p) |
247 | 247 |
248 return deleted | 248 return deleted |
249 | 249 |
250 | 250 |
251 def _Download(url): | 251 def _Download(url): |
252 """Downloads the given URL and returns the contents as a string.""" | 252 """Downloads the given URL and returns the contents as a string.""" |
253 response = urllib2.urlopen(url) | 253 _LOGGER.debug('Downloading "%s".', url) |
254 if response.code != 200: | 254 retries = 3 |
255 raise RuntimeError('Failed to download "%s".' % url) | 255 while retries > 0: |
256 return response.read() | 256 code = 0 |
257 try: | |
258 response = urllib2.urlopen(url) | |
259 if response.code == 200: | |
260 return response.read() | |
261 code = response.code | |
262 except urllib2.HTTPError, e: | |
263 code = e.code | |
264 | |
265 _LOGGER.debug('Download failed with a %d response, retrying.', | |
266 code) | |
267 retries -= 1 | |
Nico
2015/07/10 15:04:39
Trying again immediately likely won't work. Try sl
chrisha
2015/07/14 17:51:38
Err, yup. Meant to do that.
| |
268 | |
269 raise RuntimeError('Failed to download "%s".' % url) | |
257 | 270 |
258 | 271 |
259 def _InstallBinaries(options, deleted={}): | 272 def _InstallBinaries(options, deleted={}): |
260 """Installs Syzygy binaries. This assumes that the output directory has | 273 """Installs Syzygy binaries. This assumes that the output directory has |
261 already been cleaned, as it will refuse to overwrite existing files.""" | 274 already been cleaned, as it will refuse to overwrite existing files.""" |
262 contents = {} | 275 contents = {} |
263 state = { 'revision': options.revision, 'contents': contents } | 276 state = { 'revision': options.revision, 'contents': contents } |
264 archive_url = _SYZYGY_ARCHIVE_URL % { 'revision': options.revision } | 277 archive_url = _SYZYGY_ARCHIVE_URL % { 'revision': options.revision } |
265 if options.resources: | 278 if options.resources: |
266 resources = [(resource, resource, '', None) | 279 resources = [(resource, resource, '', None) |
267 for resource in options.resources] | 280 for resource in options.resources] |
268 else: | 281 else: |
269 resources = _RESOURCES | 282 resources = _RESOURCES |
270 for (base, name, subdir, filt) in resources: | 283 for (base, name, subdir, filt) in resources: |
271 # Create the output directory if it doesn't exist. | 284 # Create the output directory if it doesn't exist. |
272 fulldir = os.path.join(options.output_dir, subdir) | 285 fulldir = os.path.join(options.output_dir, subdir) |
273 if os.path.isfile(fulldir): | 286 if os.path.isfile(fulldir): |
274 raise Exception('File exists where a directory needs to be created: %s' % | 287 raise Exception('File exists where a directory needs to be created: %s' % |
275 fulldir) | 288 fulldir) |
276 if not os.path.exists(fulldir): | 289 if not os.path.exists(fulldir): |
277 _LOGGER.debug('Creating directory: %s', fulldir) | 290 _LOGGER.debug('Creating directory: %s', fulldir) |
278 if not options.dry_run: | 291 if not options.dry_run: |
279 os.makedirs(fulldir) | 292 os.makedirs(fulldir) |
280 | 293 |
281 # Download the archive. | 294 # Download and unzip the archive. Try this a few times if it fails, as the |
282 url = archive_url + '/' + base | 295 # cloud storage server occasionally flakes and sends incomplete data. |
283 _LOGGER.debug('Retrieving %s archive at "%s".', name, url) | 296 retries = 0 |
284 data = _Download(url) | 297 while True: |
298 url = archive_url + '/' + base | |
299 _LOGGER.debug('Retrieving %s archive at "%s".', name, url) | |
300 data = _Download(url) | |
285 | 301 |
286 _LOGGER.debug('Unzipping %s archive.', name) | 302 try: |
287 archive = zipfile.ZipFile(cStringIO.StringIO(data)) | 303 _LOGGER.debug('Unzipping %s archive.', name) |
304 archive = zipfile.ZipFile(cStringIO.StringIO(data)) | |
305 except zipfile.BadZipfile, e: | |
306 # If retried too often then let the error continue. | |
307 retries += 1 | |
308 if retries == 3: | |
309 raise e | |
310 | |
311 # Otherwise retry the download to see if the archive comes down clean | |
312 # this time. | |
313 _LOGGER.debug('Bad zip file, retrying download.') | |
314 continue | |
315 | |
316 # Successfully unzipped the archive, so continue to extract files. | |
317 break | |
318 | |
319 # Extract desired contents and install to disk. | |
288 for entry in archive.infolist(): | 320 for entry in archive.infolist(): |
289 if not filt or filt(entry): | 321 if not filt or filt(entry): |
290 fullpath = os.path.normpath(os.path.join(fulldir, entry.filename)) | 322 fullpath = os.path.normpath(os.path.join(fulldir, entry.filename)) |
291 relpath = os.path.relpath(fullpath, options.output_dir) | 323 relpath = os.path.relpath(fullpath, options.output_dir) |
292 if os.path.exists(fullpath): | 324 if os.path.exists(fullpath): |
293 # If in a dry-run take into account the fact that the file *would* | 325 # If in a dry-run take into account the fact that the file *would* |
294 # have been deleted. | 326 # have been deleted. |
295 if options.dry_run and relpath in deleted: | 327 if options.dry_run and relpath in deleted: |
296 pass | 328 pass |
297 else: | 329 else: |
(...skipping 11 matching lines...) Expand all Loading... | |
309 return state | 341 return state |
310 | 342 |
311 | 343 |
312 def _ParseCommandLine(): | 344 def _ParseCommandLine(): |
313 """Parses the command-line and returns an options structure.""" | 345 """Parses the command-line and returns an options structure.""" |
314 option_parser = optparse.OptionParser() | 346 option_parser = optparse.OptionParser() |
315 option_parser.add_option('--dry-run', action='store_true', default=False, | 347 option_parser.add_option('--dry-run', action='store_true', default=False, |
316 help='If true then will simply list actions that would be performed.') | 348 help='If true then will simply list actions that would be performed.') |
317 option_parser.add_option('--force', action='store_true', default=False, | 349 option_parser.add_option('--force', action='store_true', default=False, |
318 help='Force an installation even if the binaries are up to date.') | 350 help='Force an installation even if the binaries are up to date.') |
351 option_parser.add_option('--no-cleanup', action='store_true', default=False, | |
352 help='Allow installation on non-Windows platforms, and skip the forced ' | |
353 'cleanup step.') | |
319 option_parser.add_option('--output-dir', type='string', | 354 option_parser.add_option('--output-dir', type='string', |
320 help='The path where the binaries will be replaced. Existing binaries ' | 355 help='The path where the binaries will be replaced. Existing binaries ' |
321 'will only be overwritten if not up to date.') | 356 'will only be overwritten if not up to date.') |
322 option_parser.add_option('--overwrite', action='store_true', default=False, | 357 option_parser.add_option('--overwrite', action='store_true', default=False, |
323 help='If specified then the installation will happily delete and rewrite ' | 358 help='If specified then the installation will happily delete and rewrite ' |
324 'the entire output directory, blasting any local changes.') | 359 'the entire output directory, blasting any local changes.') |
325 option_parser.add_option('--revision', type='string', | 360 option_parser.add_option('--revision', type='string', |
326 help='The SVN revision or GIT hash associated with the required version.') | 361 help='The SVN revision or GIT hash associated with the required version.') |
327 option_parser.add_option('--revision-file', type='string', | 362 option_parser.add_option('--revision-file', type='string', |
328 help='A text file containing an SVN revision or GIT hash.') | 363 help='A text file containing an SVN revision or GIT hash.') |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
401 options = _ParseCommandLine() | 436 options = _ParseCommandLine() |
402 | 437 |
403 if options.dry_run: | 438 if options.dry_run: |
404 _LOGGER.debug('Performing a dry-run.') | 439 _LOGGER.debug('Performing a dry-run.') |
405 | 440 |
406 # We only care about Windows platforms, as the Syzygy binaries aren't used | 441 # We only care about Windows platforms, as the Syzygy binaries aren't used |
407 # elsewhere. However, there was a short period of time where this script | 442 # elsewhere. However, there was a short period of time where this script |
408 # wasn't gated on OS types, and those OSes downloaded and installed binaries. | 443 # wasn't gated on OS types, and those OSes downloaded and installed binaries. |
409 # This will cleanup orphaned files on those operating systems. | 444 # This will cleanup orphaned files on those operating systems. |
410 if sys.platform not in ('win32', 'cygwin'): | 445 if sys.platform not in ('win32', 'cygwin'): |
411 return _RemoveOrphanedFiles(options) | 446 if options.no_cleanup: |
447 _LOGGER.debug('Skipping usual cleanup for non-Windows platforms.') | |
448 else: | |
449 return _RemoveOrphanedFiles(options) | |
412 | 450 |
413 # Load the current installation state, and validate it against the | 451 # Load the current installation state, and validate it against the |
414 # requested installation. | 452 # requested installation. |
415 state, is_consistent = _GetCurrentState(options.revision, options.output_dir) | 453 state, is_consistent = _GetCurrentState(options.revision, options.output_dir) |
416 | 454 |
417 # Decide whether or not an install is necessary. | 455 # Decide whether or not an install is necessary. |
418 if options.force: | 456 if options.force: |
419 _LOGGER.debug('Forcing reinstall of binaries.') | 457 _LOGGER.debug('Forcing reinstall of binaries.') |
420 elif is_consistent: | 458 elif is_consistent: |
421 # Avoid doing any work if the contents of the directory are consistent. | 459 # Avoid doing any work if the contents of the directory are consistent. |
(...skipping 20 matching lines...) Expand all Loading... | |
442 # Install the new binaries. In a dry-run this will actually download the | 480 # Install the new binaries. In a dry-run this will actually download the |
443 # archives, but it won't write anything to disk. | 481 # archives, but it won't write anything to disk. |
444 state = _InstallBinaries(options, deleted) | 482 state = _InstallBinaries(options, deleted) |
445 | 483 |
446 # Build and save the state for the directory. | 484 # Build and save the state for the directory. |
447 _SaveState(options.output_dir, state, options.dry_run) | 485 _SaveState(options.output_dir, state, options.dry_run) |
448 | 486 |
449 | 487 |
450 if __name__ == '__main__': | 488 if __name__ == '__main__': |
451 main() | 489 main() |
OLD | NEW |