Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import posixpath | 5 import posixpath |
| 6 import sys | 6 import sys |
| 7 | 7 |
| 8 from file_system import FileSystem, StatInfo, FileNotFoundError | 8 from file_system import FileSystem, StatInfo, FileNotFoundError |
| 9 from future import All, Future | 9 from future import All, Future |
| 10 from path_util import AssertIsDirectory, IsDirectory, ToDirectory | 10 from path_util import AssertIsDirectory, IsDirectory, ToDirectory |
| 11 from third_party.json_schema_compiler.memoize import memoize | 11 from third_party.json_schema_compiler.memoize import memoize |
| 12 | 12 |
| 13 | 13 |
| 14 class CachingFileSystem(FileSystem): | 14 class CachingFileSystem(FileSystem): |
| 15 '''FileSystem which implements a caching layer on top of |file_system|. It's | 15 '''FileSystem which implements a caching layer on top of |file_system|. It's |
| 16 smart, using Stat() to decided whether to skip Read()ing from |file_system|, | 16 smart, using Stat() to decided whether to skip Read()ing from |file_system|, |
| 17 and only Stat()ing directories never files. | 17 and only Stat()ing directories never files. If |fail_on_miss| is True, then |
| 18 stat cache misses are treated like FileNotFound, rather than falling back onto | |
| 19 the underlying filesystem. | |
| 18 ''' | 20 ''' |
| 19 def __init__(self, file_system, object_store_creator): | 21 def __init__(self, file_system, object_store_creator, fail_on_miss=False): |
| 20 self._file_system = file_system | 22 self._file_system = file_system |
| 23 self._fail_on_miss = fail_on_miss | |
| 21 def create_object_store(category, **optargs): | 24 def create_object_store(category, **optargs): |
| 22 return object_store_creator.Create( | 25 return object_store_creator.Create( |
| 23 CachingFileSystem, | 26 CachingFileSystem, |
| 24 category='%s/%s' % (file_system.GetIdentity(), category), | 27 category='%s/%s' % (file_system.GetIdentity(), category), |
| 25 **optargs) | 28 **optargs) |
| 26 self._stat_cache = create_object_store('stat') | 29 self._stat_cache = create_object_store('stat', start_empty=False) |
| 27 # The read caches can start populated (start_empty=False) because file | 30 # The read caches can start populated (start_empty=False) because file |
| 28 # updates are picked up by the stat, so it doesn't need the force-refresh | 31 # updates are picked up by the stat, so it doesn't need the force-refresh |
| 29 # which starting empty is designed for. Without this optimisation, cron | 32 # which starting empty is designed for. Without this optimisation, cron |
| 30 # runs are extra slow. | 33 # runs are extra slow. |
| 31 self._read_cache = create_object_store('read', start_empty=False) | 34 self._read_cache = create_object_store('read', start_empty=False) |
| 32 self._walk_cache = create_object_store('walk', start_empty=False) | 35 self._walk_cache = create_object_store('walk', start_empty=False) |
| 33 | 36 |
| 34 def Refresh(self): | 37 def Refresh(self): |
| 35 return self._file_system.Refresh() | 38 return self._file_system.Refresh() |
| 36 | 39 |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 49 ''' | 52 ''' |
| 50 if path == dir_path: | 53 if path == dir_path: |
| 51 return dir_stat | 54 return dir_stat |
| 52 # Was a file stat. Extract that file. | 55 # Was a file stat. Extract that file. |
| 53 file_version = dir_stat.child_versions.get(file_path) | 56 file_version = dir_stat.child_versions.get(file_path) |
| 54 if file_version is None: | 57 if file_version is None: |
| 55 raise FileNotFoundError('No stat found for %s in %s (found %s)' % | 58 raise FileNotFoundError('No stat found for %s in %s (found %s)' % |
| 56 (path, dir_path, dir_stat.child_versions)) | 59 (path, dir_path, dir_stat.child_versions)) |
| 57 return StatInfo(file_version) | 60 return StatInfo(file_version) |
| 58 | 61 |
| 62 def raise_not_found(path): | |
| 63 raise FileNotFoundError('Got 404 when stat\'ing %s' % path) | |
|
not at google - send to devlin
2014/10/20 21:06:57
It might complicate things for callers - but can w
Ken Rockot(use gerrit already)
2014/10/22 03:19:54
Will do, but it will take longer to sort that out.
| |
| 64 | |
| 59 dir_stat = self._stat_cache.Get(dir_path).Get() | 65 dir_stat = self._stat_cache.Get(dir_path).Get() |
| 60 if dir_stat is not None: | 66 if dir_stat is not None: |
| 61 return Future(callback=lambda: make_stat_info(dir_stat)) | 67 return Future(callback=lambda: make_stat_info(dir_stat)) |
| 62 | 68 |
| 69 if self._fail_on_miss: | |
| 70 return Future(callback=lambda: raise_not_found(dir_path)) | |
| 71 | |
| 63 def next(dir_stat): | 72 def next(dir_stat): |
| 64 assert dir_stat is not None # should have raised a FileNotFoundError | 73 assert dir_stat is not None # should have raised a FileNotFoundError |
| 65 # We only ever need to cache the dir stat. | 74 # We only ever need to cache the dir stat. |
| 66 self._stat_cache.Set(dir_path, dir_stat) | 75 self._stat_cache.Set(dir_path, dir_stat) |
| 67 return make_stat_info(dir_stat) | 76 return make_stat_info(dir_stat) |
| 68 return self._MemoizedStatAsyncFromFileSystem(dir_path).Then(next) | 77 return self._MemoizedStatAsyncFromFileSystem(dir_path).Then(next) |
| 69 | 78 |
| 70 @memoize | 79 @memoize |
| 71 def _MemoizedStatAsyncFromFileSystem(self, dir_path): | 80 def _MemoizedStatAsyncFromFileSystem(self, dir_path): |
| 72 '''This is a simple wrapper to memoize Futures to directory stats, since | 81 '''This is a simple wrapper to memoize Futures to directory stats, since |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 162 for f in self.ReadSingle(root).Get(): | 171 for f in self.ReadSingle(root).Get(): |
| 163 if IsDirectory(f): | 172 if IsDirectory(f): |
| 164 dirs.append(f) | 173 dirs.append(f) |
| 165 else: | 174 else: |
| 166 files.append(f) | 175 files.append(f) |
| 167 # Update the cache. This is a root -> (dirs, files, version) mapping. | 176 # Update the cache. This is a root -> (dirs, files, version) mapping. |
| 168 self._walk_cache.Set(root, (dirs, files, root_stat.version)) | 177 self._walk_cache.Set(root, (dirs, files, root_stat.version)) |
| 169 return dirs, files | 178 return dirs, files |
| 170 return self._file_system.Walk(root, depth=depth, file_lister=file_lister) | 179 return self._file_system.Walk(root, depth=depth, file_lister=file_lister) |
| 171 | 180 |
| 172 def GetCommitID(self): | |
| 173 return self._file_system.GetCommitID() | |
| 174 | |
| 175 def GetPreviousCommitID(self): | |
| 176 return self._file_system.GetPreviousCommitID() | |
| 177 | |
| 178 def GetIdentity(self): | 181 def GetIdentity(self): |
| 179 return self._file_system.GetIdentity() | 182 return self._file_system.GetIdentity() |
| 180 | 183 |
| 181 def __repr__(self): | 184 def __repr__(self): |
| 182 return '%s of <%s>' % (type(self).__name__, repr(self._file_system)) | 185 return '%s of <%s>' % (type(self).__name__, repr(self._file_system)) |
| OLD | NEW |