Chromium Code Reviews| Index: client/isolateserver.py |
| diff --git a/client/isolateserver.py b/client/isolateserver.py |
| index c8feb0961a2e1e3785cf265c122b827c88771537..6d7ca1910699294b81f8fbcc678c3f0646c26134 100755 |
| --- a/client/isolateserver.py |
| +++ b/client/isolateserver.py |
| @@ -364,6 +364,65 @@ def is_valid_file(path, size): |
| return True |
| +def is_cached(ifile, cache): |
| + """Determines if all the isolated file's contents are in the given LocalCache |
| + |
| + """ |
| + files = ifile.data.get(u'files', {}) |
| + for f in files.keys(): |
|
Vadim Sh.
2017/05/11 03:09:09
nit: .keys() is not necessary. Enumerating a dict
|
| + # Can't do for f, props in files.keys() otherwise we see "too many |
| + # values to unpack". |
| + props = files.get(f, None) |
| + if not props: |
| + logging.warning('Problem getting info for %s', f) |
| + return False |
| + digest = props.get('h', None) |
| + if not digest: |
| + logging.warning('Hash can\'t be empty %s', f) |
| + return False |
| + if digest not in cache: |
| + logging.info('File with digest %s is missing', digest) |
| + return False |
| + return True |
| + |
| + |
| +def extract(ifile, target_dir, cache): |
| + """Extracts the isolated file's contents to target dir. |
| + |
| + It stops if any couldn't be found. |
| + """ |
| + files = ifile.data.get(u'files', {}) |
| + for f in files.keys(): |
| + # Can't do for f, props in files.keys() otherwise we see "too many |
| + # values to unpack". |
| + props = files.get(f, None) |
| + if not props: |
| + logging.warning('Problem getting info for %s', f) |
| + return False |
| + file_mode = props.get('m', None) |
| + if file_mode: |
| + # Ignore all bits apart from the user |
| + file_mode &= 0700 |
| + |
| + dstpath = os.path.join(target_dir, f) |
| + file_path.ensure_tree(os.path.dirname(dstpath)) |
| + digest = props.get('h', None) |
| + if not digest: |
| + logging.warning('Hash can\'t be empty %s', f) |
| + return False |
| + if digest not in cache: |
| + logging.info('File with digest %s is missing', digest) |
| + return False |
| + srcpath = cache.getfileobj(digest).name |
| + |
| + file_path.link_file(unicode(dstpath), unicode(srcpath), |
| + file_path.HARDLINK_WITH_FALLBACK) |
| + |
| + if file_mode is not None: |
| + fs.chmod(dstpath, file_mode) |
| + return True |
| + |
| + |
| class FileItem(Item): |
| """A file to push to Storage. |
| @@ -1719,14 +1778,14 @@ def fetch_isolated(isolated_hash, storage, cache, outdir, use_symlinks): |
| return bundle |
| -def directory_to_metadata(root, algo, blacklist): |
| +def directory_to_metadata(root, algo, blacklist, collapse_symlinks): |
| """Returns the FileItem list and .isolated metadata for a directory.""" |
| root = file_path.get_native_path_case(root) |
| paths = isolated_format.expand_directory_and_symlink( |
| root, '.' + os.path.sep, blacklist, sys.platform != 'win32') |
| metadata = { |
| relpath: isolated_format.file_to_metadata( |
| - os.path.join(root, relpath), {}, 0, algo, False) |
| + os.path.join(root, relpath), {}, 0, algo, collapse_symlinks) |
| for relpath in paths |
| } |
| for v in metadata.itervalues(): |
| @@ -1772,7 +1831,7 @@ def archive_files_to_storage(storage, files, blacklist): |
| if fs.isdir(filepath): |
| # Uploading a whole directory. |
| items, metadata = directory_to_metadata( |
| - filepath, storage.hash_algo, blacklist) |
| + filepath, storage.hash_algo, blacklist, False) |
| # Create the .isolated file. |
| if not tempdir: |