| Index: tools/android/loading/sandwich_prefetch.py
|
| diff --git a/tools/android/loading/sandwich_prefetch.py b/tools/android/loading/sandwich_prefetch.py
|
| index be8f6098136ac8f08da0cab25a1ef291f43d23c1..a76ca7d4b0fff8d6b4f5fef11e758636756f3f39 100644
|
| --- a/tools/android/loading/sandwich_prefetch.py
|
| +++ b/tools/android/loading/sandwich_prefetch.py
|
| @@ -116,7 +116,7 @@ def _FilterOutDataAndIncompleteRequests(requests):
|
| if request.protocol is None:
|
| assert not request.HasReceivedResponse()
|
| continue
|
| - if request.protocol == 'about':
|
| + if request.protocol in {'about', 'blob'}:
|
| continue
|
| if request.protocol not in {'http/0.9', 'http/1.0', 'http/1.1'}:
|
| raise RuntimeError('Unknown request protocol {}'.format(request.protocol))
|
| @@ -138,39 +138,49 @@ def _PatchCacheArchive(cache_archive_path, loading_trace_path,
|
| archive <cache_archive_path>.
|
| cache_archive_dest_path: Archive destination's path.
|
| """
|
| + logging.info('loading trace: %s', loading_trace_path)
|
| trace = loading_trace.LoadingTrace.FromJsonFile(loading_trace_path)
|
| with common_util.TemporaryDirectory(prefix='sandwich_tmp') as tmp_path:
|
| cache_path = os.path.join(tmp_path, 'cache')
|
| chrome_cache.UnzipDirectoryContent(cache_archive_path, cache_path)
|
| - cache_backend = chrome_cache.CacheBackend(cache_path, 'simple')
|
| - cache_entries = set(cache_backend.ListKeys())
|
| - logging.info('Original cache size: %d bytes' % cache_backend.GetSize())
|
| - for request in _FilterOutDataAndIncompleteRequests(
|
| - trace.request_track.GetEvents()):
|
| - # On requests having an upload data stream such as POST requests,
|
| - # net::HttpCache::GenerateCacheKey() prefixes the cache entry's key with
|
| - # the upload data stream's session unique identifier.
|
| - #
|
| - # It is fine to not patch these requests since when reopening Chrome,
|
| - # there is no way the entry can be reused since the upload data stream's
|
| - # identifier will be different.
|
| - #
|
| - # The fact that these entries are kept in the cache after closing Chrome
|
| - # properly by closing the Chrome tab as the ChromeControler.SetSlowDeath()
|
| - # do is known chrome bug (crbug.com/610725).
|
| - if request.url not in cache_entries:
|
| - continue
|
| - # Chrome prunes Set-Cookie from response headers before storing them in
|
| - # disk cache. Also, it adds implicit "Vary: cookie" header to all redirect
|
| - # response headers. Sandwich manages the cache, but between recording the
|
| - # cache and benchmarking the cookie jar is invalidated. This leads to
|
| - # invalidation of all cacheable redirects.
|
| - raw_headers = request.GetRawResponseHeaders()
|
| - cache_backend.UpdateRawResponseHeaders(request.url, raw_headers)
|
| - # NoState-Prefetch would only fetch the resources, but not parse them.
|
| - cache_backend.DeleteStreamForKey(request.url, 2)
|
| + with chrome_cache.OnlineCacheBackend(
|
| + cache_path, chrome_cache.CacheBackendType.SIMPLE) as cache_backend:
|
| + cache_entries = set(cache_backend.ListKeys())
|
| + logging.info('Original cache size: %d bytes' % cache_backend.GetSize())
|
| + for request in _FilterOutDataAndIncompleteRequests(
|
| + trace.request_track.GetEvents()):
|
| + # On requests having an upload data stream such as POST requests,
|
| + # net::HttpCache::GenerateCacheKey() prefixes the cache entry's key with
|
| + # the upload data stream's session unique identifier.
|
| + #
|
| + # It is fine to not patch these requests since when reopening Chrome,
|
| + # there is no way the entry can be reused since the upload data stream's
|
| + # identifier will be different.
|
| + #
|
| + # The fact that these entries are kept in the cache after closing Chrome
|
| + # properly by closing the Chrome tab as the
|
| + # ChromeControler.SetSlowDeath() do is known chrome bug
|
| + # (crbug.com/610725).
|
| + if request.url not in cache_entries:
|
| + continue
|
| + # Chrome prunes Set-Cookie from response headers before storing them in
|
| + # disk cache. Also, it adds implicit "Vary: cookie" header to all
|
| + # redirect response headers. Sandwich manages the cache, but between
|
| + # recording the # cache and benchmarking the cookie jar is invalidated.
|
| + # This leads to invalidation of all cacheable redirects.
|
| + raw_headers = request.GetRawResponseHeaders()
|
| + try:
|
| + cache_backend.UpdateRawResponseHeaders(request.url, raw_headers)
|
| + # NoState-Prefetch would only fetch the resources, but not parse them.
|
| + cache_backend.DeleteStreamForKey(request.url, 2)
|
| + # Sync operations to actually catch errors here.
|
| + cache_backend.Sync()
|
| + except chrome_cache.CacheBackendError as error:
|
| + # For some reason, sometimes the cachetool can't find key's entry when
|
| + # not using the online mode.
|
| + logging.warning('cachetool error: %s', repr(error))
|
| + logging.info('Patched cache size: %d bytes' % cache_backend.GetSize())
|
| chrome_cache.ZipDirectoryContent(cache_path, cache_archive_dest_path)
|
| - logging.info('Patched cache size: %d bytes' % cache_backend.GetSize())
|
|
|
|
|
| def _DiscoverRequests(dependencies_lens, subresource_discoverer):
|
| @@ -403,8 +413,8 @@ def _ValidateCacheArchiveContent(cache_build_trace_path, cache_archive_path):
|
| logging.info('lists cached urls from %s' % cache_archive_path)
|
| with common_util.TemporaryDirectory() as cache_directory:
|
| chrome_cache.UnzipDirectoryContent(cache_archive_path, cache_directory)
|
| - cache_keys = set(
|
| - chrome_cache.CacheBackend(cache_directory, 'simple').ListKeys())
|
| + cache_keys = set(chrome_cache.CacheBackend(
|
| + cache_directory, chrome_cache.CacheBackendType.SIMPLE).ListKeys())
|
| trace = loading_trace.LoadingTrace.FromJsonFile(cache_build_trace_path)
|
| effective_requests = _ListUrlRequests(trace, _RequestOutcome.All)
|
| effective_post_requests = _ListUrlRequests(trace, _RequestOutcome.Post)
|
|
|