| OLD | NEW |
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2013 The LUCI Authors. All rights reserved. | 2 # Copyright 2013 The LUCI Authors. All rights reserved. |
| 3 # Use of this source code is governed under the Apache License, Version 2.0 | 3 # Use of this source code is governed under the Apache License, Version 2.0 |
| 4 # that can be found in the LICENSE file. | 4 # that can be found in the LICENSE file. |
| 5 | 5 |
| 6 """Archives a set of files or directories to an Isolate Server.""" | 6 """Archives a set of files or directories to an Isolate Server.""" |
| 7 | 7 |
| 8 __version__ = '0.8.0' | 8 __version__ = '0.8.0' |
| 9 | 9 |
| 10 import errno | 10 import errno |
| (...skipping 346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 357 os.path.basename(path), e) | 357 os.path.basename(path), e) |
| 358 return False | 358 return False |
| 359 if size != actual_size: | 359 if size != actual_size: |
| 360 logging.warning( | 360 logging.warning( |
| 361 'Found invalid item %s; %d != %d', | 361 'Found invalid item %s; %d != %d', |
| 362 os.path.basename(path), actual_size, size) | 362 os.path.basename(path), actual_size, size) |
| 363 return False | 363 return False |
| 364 return True | 364 return True |
| 365 | 365 |
| 366 | 366 |
| 367 def is_cached(ifile, cache): |
| 368 """Determines if all the isolated file's contents are in the given LocalCache |
| 369 |
| 370 """ |
| 371 files = ifile.data.get(u'files', {}) |
| 372 for f in files.keys(): |
| 373 # Can't do for f, props in files.keys() otherwise we see "too many |
| 374 # values to unpack". |
| 375 props = files.get(f, None) |
| 376 if not props: |
| 377 logging.warning('Problem getting info for %s', f) |
| 378 return False |
| 379 digest = props.get('h', None) |
| 380 if not digest: |
| 381 logging.warning('Hash can\'t be empty %s', f) |
| 382 return False |
| 383 if digest not in cache: |
| 384 logging.info('File with digest %s is missing', digest) |
| 385 return False |
| 386 return True |
| 387 |
| 388 |
| 389 def extract(ifile, target_dir, cache): |
| 390 """Extracts the isolated file's contents to target dir. |
| 391 |
| 392 It stops if any couldn't be found. |
| 393 """ |
| 394 files = ifile.data.get(u'files', {}) |
| 395 for f in files.keys(): |
| 396 # Can't do for f, props in files.keys() otherwise we see "too many |
| 397 # values to unpack". |
| 398 props = files.get(f, None) |
| 399 if not props: |
| 400 logging.warning('Problem getting info for %s', f) |
| 401 return False |
| 402 file_mode = props.get('m', None) |
| 403 if file_mode: |
| 404 # Ignore all bits apart from the user |
| 405 file_mode &= 0700 |
| 406 |
| 407 dstpath = os.path.join(target_dir, f) |
| 408 file_path.ensure_tree(os.path.dirname(dstpath)) |
| 409 digest = props.get('h', None) |
| 410 if not digest: |
| 411 logging.warning('Hash can\'t be empty %s', f) |
| 412 return False |
| 413 if digest not in cache: |
| 414 logging.info('File with digest %s is missing', digest) |
| 415 return False |
| 416 srcpath = cache.getfileobj(digest).name |
| 417 |
| 418 file_path.link_file(unicode(dstpath), unicode(srcpath), |
| 419 file_path.HARDLINK_WITH_FALLBACK) |
| 420 |
| 421 if file_mode is not None: |
| 422 fs.chmod(dstpath, file_mode) |
| 423 return True |
| 424 |
| 425 |
| 367 class FileItem(Item): | 426 class FileItem(Item): |
| 368 """A file to push to Storage. | 427 """A file to push to Storage. |
| 369 | 428 |
| 370 Its digest and size may be provided in advance, if known. Otherwise they will | 429 Its digest and size may be provided in advance, if known. Otherwise they will |
| 371 be derived from the file content. | 430 be derived from the file content. |
| 372 """ | 431 """ |
| 373 | 432 |
| 374 def __init__(self, path, digest=None, size=None, high_priority=False): | 433 def __init__(self, path, digest=None, size=None, high_priority=False): |
| 375 super(FileItem, self).__init__( | 434 super(FileItem, self).__init__( |
| 376 digest, | 435 digest, |
| (...skipping 1335 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1712 logging.info(msg) | 1771 logging.info(msg) |
| 1713 last_update = time.time() | 1772 last_update = time.time() |
| 1714 | 1773 |
| 1715 # Cache could evict some items we just tried to fetch, it's a fatal error. | 1774 # Cache could evict some items we just tried to fetch, it's a fatal error. |
| 1716 if not fetch_queue.verify_all_cached(): | 1775 if not fetch_queue.verify_all_cached(): |
| 1717 raise isolated_format.MappingError( | 1776 raise isolated_format.MappingError( |
| 1718 'Cache is too small to hold all requested files') | 1777 'Cache is too small to hold all requested files') |
| 1719 return bundle | 1778 return bundle |
| 1720 | 1779 |
| 1721 | 1780 |
| 1722 def directory_to_metadata(root, algo, blacklist): | 1781 def directory_to_metadata(root, algo, blacklist, collapse_symlinks): |
| 1723 """Returns the FileItem list and .isolated metadata for a directory.""" | 1782 """Returns the FileItem list and .isolated metadata for a directory.""" |
| 1724 root = file_path.get_native_path_case(root) | 1783 root = file_path.get_native_path_case(root) |
| 1725 paths = isolated_format.expand_directory_and_symlink( | 1784 paths = isolated_format.expand_directory_and_symlink( |
| 1726 root, '.' + os.path.sep, blacklist, sys.platform != 'win32') | 1785 root, '.' + os.path.sep, blacklist, sys.platform != 'win32') |
| 1727 metadata = { | 1786 metadata = { |
| 1728 relpath: isolated_format.file_to_metadata( | 1787 relpath: isolated_format.file_to_metadata( |
| 1729 os.path.join(root, relpath), {}, 0, algo, False) | 1788 os.path.join(root, relpath), {}, 0, algo, collapse_symlinks) |
| 1730 for relpath in paths | 1789 for relpath in paths |
| 1731 } | 1790 } |
| 1732 for v in metadata.itervalues(): | 1791 for v in metadata.itervalues(): |
| 1733 v.pop('t') | 1792 v.pop('t') |
| 1734 items = [ | 1793 items = [ |
| 1735 FileItem( | 1794 FileItem( |
| 1736 path=os.path.join(root, relpath), | 1795 path=os.path.join(root, relpath), |
| 1737 digest=meta['h'], | 1796 digest=meta['h'], |
| 1738 size=meta['s'], | 1797 size=meta['s'], |
| 1739 high_priority=relpath.endswith('.isolated')) | 1798 high_priority=relpath.endswith('.isolated')) |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1765 tempdir = None | 1824 tempdir = None |
| 1766 try: | 1825 try: |
| 1767 # TODO(maruel): Yield the files to a worker thread. | 1826 # TODO(maruel): Yield the files to a worker thread. |
| 1768 items_to_upload = [] | 1827 items_to_upload = [] |
| 1769 for f in files: | 1828 for f in files: |
| 1770 try: | 1829 try: |
| 1771 filepath = os.path.abspath(f) | 1830 filepath = os.path.abspath(f) |
| 1772 if fs.isdir(filepath): | 1831 if fs.isdir(filepath): |
| 1773 # Uploading a whole directory. | 1832 # Uploading a whole directory. |
| 1774 items, metadata = directory_to_metadata( | 1833 items, metadata = directory_to_metadata( |
| 1775 filepath, storage.hash_algo, blacklist) | 1834 filepath, storage.hash_algo, blacklist, False) |
| 1776 | 1835 |
| 1777 # Create the .isolated file. | 1836 # Create the .isolated file. |
| 1778 if not tempdir: | 1837 if not tempdir: |
| 1779 tempdir = tempfile.mkdtemp(prefix=u'isolateserver') | 1838 tempdir = tempfile.mkdtemp(prefix=u'isolateserver') |
| 1780 handle, isolated = tempfile.mkstemp(dir=tempdir, suffix=u'.isolated') | 1839 handle, isolated = tempfile.mkstemp(dir=tempdir, suffix=u'.isolated') |
| 1781 os.close(handle) | 1840 os.close(handle) |
| 1782 data = { | 1841 data = { |
| 1783 'algo': | 1842 'algo': |
| 1784 isolated_format.SUPPORTED_ALGOS_REVERSE[storage.hash_algo], | 1843 isolated_format.SUPPORTED_ALGOS_REVERSE[storage.hash_algo], |
| 1785 'files': metadata, | 1844 'files': metadata, |
| (...skipping 260 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2046 return dispatcher.execute(OptionParserIsolateServer(), args) | 2105 return dispatcher.execute(OptionParserIsolateServer(), args) |
| 2047 | 2106 |
| 2048 | 2107 |
| 2049 if __name__ == '__main__': | 2108 if __name__ == '__main__': |
| 2050 subprocess42.inhibit_os_error_reporting() | 2109 subprocess42.inhibit_os_error_reporting() |
| 2051 fix_encoding.fix_encoding() | 2110 fix_encoding.fix_encoding() |
| 2052 tools.disable_buffering() | 2111 tools.disable_buffering() |
| 2053 colorama.init() | 2112 colorama.init() |
| 2054 file_path.enable_symlink() | 2113 file_path.enable_symlink() |
| 2055 sys.exit(main(sys.argv[1:])) | 2114 sys.exit(main(sys.argv[1:])) |
| OLD | NEW |