| Index: tools/android/loading/chrome_cache.py
|
| diff --git a/tools/android/loading/chrome_cache.py b/tools/android/loading/chrome_cache.py
|
| index 24d3f75638d33f864831dab97c329a84b61519aa..49fb596f8674704abca10b93479ec6485c061647 100644
|
| --- a/tools/android/loading/chrome_cache.py
|
| +++ b/tools/android/loading/chrome_cache.py
|
| @@ -6,13 +6,17 @@
|
| """
|
|
|
| from datetime import datetime
|
| +import errno
|
| +import fcntl
|
| import json
|
| import os
|
| import re
|
| import shutil
|
| +import struct
|
| import subprocess
|
| import sys
|
| import tempfile
|
| +import time
|
| import zipfile
|
|
|
| _SRC_DIR = os.path.abspath(os.path.join(
|
| @@ -28,8 +32,12 @@ import options
|
| OPTIONS = options.OPTIONS
|
|
|
|
|
| +class CacheBackendType(object):
|
| + SIMPLE = 'simple'
|
| + BLOCKFILE = 'blockfile'
|
| +
|
| # Cache back-end types supported by cachetool.
|
| -BACKEND_TYPES = {'simple', 'blockfile'}
|
| +BACKEND_TYPES = {CacheBackendType.SIMPLE, CacheBackendType.BLOCKFILE}
|
|
|
| # Regex used to parse HTTP headers line by line.
|
| HEADER_PARSING_REGEX = re.compile(r'^(?P<header>\S+):(?P<value>.*)$')
|
| @@ -232,9 +240,14 @@ def CopyCacheDirectory(directory_src_path, directory_dest_path):
|
| shutil.copytree(directory_src_path, directory_dest_path)
|
|
|
|
|
| -class CacheBackend(object):
|
| - """Takes care of reading and deleting cached keys.
|
| - """
|
| +class CacheBackendError(Exception):
|
| + def __init__(self, errors):
|
| + Exception.__init__(self, repr(errors))
|
| + self.errors = errors
|
| +
|
| +
|
| +class CacheBackendBase(object):
|
| + """Takes care of reading and deleting cached keys."""
|
|
|
| def __init__(self, cache_directory_path, cache_backend_type):
|
| """Chrome cache back-end constructor.
|
| @@ -248,13 +261,10 @@ class CacheBackend(object):
|
| assert cache_backend_type in BACKEND_TYPES
|
| self._cache_directory_path = cache_directory_path
|
| self._cache_backend_type = cache_backend_type
|
| - # Make sure cache_directory_path is a valid cache.
|
| - self._CachetoolCmd('validate')
|
|
|
| def GetSize(self):
|
| """Gets total size of cache entries in bytes."""
|
| - size = self._CachetoolCmd('get_size')
|
| - return int(size.strip())
|
| + raise NotImplementedError
|
|
|
| def ListKeys(self):
|
| """Lists cache's keys.
|
| @@ -262,7 +272,7 @@ class CacheBackend(object):
|
| Returns:
|
| A list of all keys stored in the cache.
|
| """
|
| - return [k.strip() for k in self._CachetoolCmd('list_keys').split('\n')[:-1]]
|
| + raise NotImplementedError
|
|
|
| def GetStreamForKey(self, key, index):
|
| """Gets a key's stream.
|
| @@ -277,7 +287,7 @@ class CacheBackend(object):
|
| Returns:
|
| String holding stream binary content.
|
| """
|
| - return self._CachetoolCmd('get_stream', [key, str(index)])
|
| + raise NotImplementedError
|
|
|
| def DeleteStreamForKey(self, key, index):
|
| """Delete a key's stream.
|
| @@ -286,7 +296,7 @@ class CacheBackend(object):
|
| key: The key to access the stream.
|
| index: The stream index
|
| """
|
| - self._CachetoolCmd('delete_stream', [key, str(index)])
|
| + raise NotImplementedError
|
|
|
| def DeleteKey(self, key):
|
| """Deletes a key from the cache.
|
| @@ -294,30 +304,7 @@ class CacheBackend(object):
|
| Args:
|
| key: The key delete.
|
| """
|
| - self._CachetoolCmd('delete_key', [key])
|
| -
|
| - def _CachetoolCmd(self, operation, args=None, stdin=''):
|
| - """Runs the cache editor tool and return the stdout.
|
| -
|
| - Args:
|
| - operation: Cachetool operation.
|
| - args: Additional operation argument to append to the command line.
|
| - stdin: String to pipe to the Cachetool's stdin.
|
| -
|
| - Returns:
|
| - Cachetool's stdout string.
|
| - """
|
| - editor_tool_cmd = [
|
| - OPTIONS.LocalBinary('cachetool'),
|
| - self._cache_directory_path,
|
| - self._cache_backend_type,
|
| - operation]
|
| - editor_tool_cmd.extend(args or [])
|
| - process = subprocess.Popen(
|
| - editor_tool_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
| - stdout_data, _ = process.communicate(input=stdin)
|
| - assert process.returncode == 0
|
| - return stdout_data
|
| + raise NotImplementedError
|
|
|
| def UpdateRawResponseHeaders(self, key, raw_headers):
|
| """Updates a key's raw response headers.
|
| @@ -364,6 +351,193 @@ class CacheBackend(object):
|
| assert process.returncode == 0
|
| return decoded_content
|
|
|
| + def ProcessBatch(self):
|
| + """No-op for compatibility with BatchCacheBackend."""
|
| + pass
|
| +
|
| +
|
| +class CacheBackend(CacheBackendBase):
|
| + """Takes care of manipulating cache directories. Can be used as a context
|
| + manager to be seamlessly compatible with BatchCacheBackend.
|
| +
|
| + Each method issue a command line invocation of cachetool.
|
| + """
|
| +
|
| + def __init__(self, cache_directory_path, cache_backend_type):
|
| + """Chrome cache back-end constructor.
|
| +
|
| + Args:
|
| + cache_directory_path: As in for CacheBackendBase.__init__
|
| + cache_backend_type: As in for CacheBackendBase.__init__
|
| + """
|
| + CacheBackendBase.__init__(self, cache_directory_path, cache_backend_type)
|
| + # Make sure cache_directory_path is a valid cache.
|
| + self._CachetoolCmd('stop')
|
| +
|
| + def GetSize(self):
|
| + """Implements CacheBackendBase.GetSize()."""
|
| + size = self._CachetoolCmd('get_size')
|
| + return int(size.strip())
|
| +
|
| + def ListKeys(self):
|
| + """Implements CacheBackendBase.ListKeys()."""
|
| + out_lines = self._CachetoolCmd('list_keys').split('\n')
|
| + # cachetool finishes the list of keys with '\n\n'.
|
| + assert out_lines[-2:] == ['', '']
|
| + return [k.strip() for k in out_lines[:-2]]
|
| +
|
| + def GetStreamForKey(self, key, index):
|
| + """Implements CacheBackendBase.GetStreamForKey()."""
|
| + return self._CachetoolCmd('get_stream', [key, str(index)])
|
| +
|
| + def DeleteStreamForKey(self, key, index):
|
| + """Implements CacheBackendBase.DeleteStreamForKey()."""
|
| + self._CachetoolCmd('delete_stream', [key, str(index)])
|
| +
|
| + def DeleteKey(self, key):
|
| + """Implements CacheBackendBase.DeleteKey()."""
|
| + self._CachetoolCmd('delete_key', [key])
|
| +
|
| + def UpdateRawResponseHeaders(self, key, raw_headers):
|
| + """Implements CacheBackendBase.UpdateRawResponseHeaders()."""
|
| + self._CachetoolCmd('update_raw_headers', [key], stdin=raw_headers)
|
| +
|
| + def _CachetoolCmd(self, operation, args=None, stdin=''):
|
| + """Runs the cache editor tool and return the stdout.
|
| +
|
| + Args:
|
| + operation: Cachetool operation.
|
| + args: Additional operation argument to append to the command line.
|
| + stdin: String to pipe to the Cachetool's stdin.
|
| +
|
| + Returns:
|
| + Cachetool's stdout string.
|
| + """
|
| + args = args or []
|
| + editor_tool_cmd = [
|
| + OPTIONS.LocalBinary('cachetool'),
|
| + self._cache_directory_path,
|
| + self._cache_backend_type,
|
| + operation] + args
|
| + process = subprocess.Popen(editor_tool_cmd, stdout=subprocess.PIPE,
|
| + stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
| + stdout_data, stderr_data = process.communicate(input=stdin)
|
| + if process.returncode != 0:
|
| + raise CacheBackendError([([operation] + args, stderr_data.strip())])
|
| + return stdout_data
|
| +
|
| +
|
| +class BatchCacheBackend(CacheBackendBase):
|
| + """Takes care of manipulating cache directories efficiently using the
|
| + cachetool's online mode.
|
| + """
|
| + _INST_IDS = {
|
| + 'stop': 0,
|
| + 'get_size': 1,
|
| + 'list_keys': 2,
|
| + 'get_stream_for_key': 3,
|
| + 'delete_stream': 4,
|
| + 'delete_key': 5,
|
| + 'update_raw_headers': 6
|
| + }
|
| +
|
| + def __init__(self, cache_directory_path, cache_backend_type):
|
| + """Chrome cache back-end constructor.
|
| +
|
| + Args:
|
| + cache_directory_path: As in for CacheBackendBase.__init__
|
| + cache_backend_type: As in for CacheBackendBase.__init__
|
| + """
|
| + CacheBackendBase.__init__(self, cache_directory_path, cache_backend_type)
|
| + self._in_flight_insts = []
|
| + self._enqueued_compiled_insts = b''
|
| + self._compiled_results = b''
|
| + self._compiled_result_cursor = 0
|
| +
|
| + def GetSize(self):
|
| + """Implements CacheBackendBase.GetSize()."""
|
| + self._PushInst('get_size')
|
| + self.ProcessBatch()
|
| + return self._UnpackResult('i')[0]
|
| +
|
| + def ListKeys(self):
|
| + """Implements CacheBackendBase.GetSize()."""
|
| + self._PushInst('list_keys')
|
| + self.ProcessBatch()
|
| + keys = []
|
| + while True:
|
| + key_size = self._UnpackResult('i')[0]
|
| + if key_size == 0:
|
| + break
|
| + keys.append(self._UnpackResult('{}s'.format(key_size))[0])
|
| + return keys
|
| +
|
| + def GetStreamForKey(self, key, index):
|
| + """Implements CacheBackendBase.GetSize()."""
|
| + self._PushInst('update_raw_headers', str(key), index)
|
| + self.ProcessBatch()
|
| + stream_size = self._UnpackResult('i')[0]
|
| + return self._UnpackResult('{}s'.format(stream_size))[0]
|
| +
|
| + def DeleteStreamForKey(self, key, index):
|
| + """Implements CacheBackendBase.DeleteStreamForKey()."""
|
| + self._PushInst('delete_stream', str(key), index)
|
| +
|
| + def DeleteKey(self, key):
|
| + """Implements CacheBackendBase.DeleteKey()."""
|
| + self._PushInst('delete_key', str(key))
|
| +
|
| + def UpdateRawResponseHeaders(self, key, raw_headers):
|
| + """Implements CacheBackendBase.UpdateRawResponseHeaders()."""
|
| + self._PushInst('update_raw_headers', str(key), raw_headers)
|
| +
|
| + def ProcessBatch(self):
|
| + """Overrides CacheBackendBase.ProcessBatch()."""
|
| + cache_tool_cmd = [
|
| + OPTIONS.LocalBinary('cachetool'),
|
| + self._cache_directory_path,
|
| + self._cache_backend_type,
|
| + 'batch']
|
| + cachetool_process = subprocess.Popen(
|
| + cache_tool_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
| + self._PushInst('stop')
|
| + self._compiled_result_cursor = 0
|
| + self._compiled_results, _ = cachetool_process.communicate(
|
| + input=self._enqueued_compiled_insts)
|
| + errors = []
|
| + for inst in self._in_flight_insts[:-1]:
|
| + status_len = self._UnpackResult('i')[0]
|
| + if status_len == 0:
|
| + continue
|
| + status = self._UnpackResult('{}s'.format(status_len))[0]
|
| + errors.append((inst, status))
|
| + del self._in_flight_insts[:]
|
| + self._enqueued_compiled_insts = b''
|
| + if errors:
|
| + raise CacheBackendError(errors)
|
| +
|
| + def _PushInst(self, inst_name, *args):
|
| + inst_id = self._INST_IDS[inst_name]
|
| + inst_code = struct.pack('b', inst_id)
|
| + for param in args:
|
| + if type(param) == int:
|
| + inst_code += struct.pack('i', param)
|
| + elif type(param) == str:
|
| + inst_code += struct.pack('i{}s'.format(len(param)), len(param), param)
|
| + else:
|
| + assert False, 'Couldn\'t passdown parameter: {}'.format(repr(param))
|
| + self._enqueued_compiled_insts += inst_code
|
| + self._in_flight_insts.append([inst_name] + list(args))
|
| +
|
| + def _UnpackResult(self, fmt):
|
| + buf_size = struct.calcsize(fmt)
|
| + assert (
|
| + self._compiled_result_cursor + buf_size <= len(self._compiled_results))
|
| + buf = self._compiled_results[
|
| + self._compiled_result_cursor:self._compiled_result_cursor + buf_size]
|
| + self._compiled_result_cursor += buf_size
|
| + return struct.unpack(fmt, buf)
|
| +
|
|
|
| def ApplyUrlWhitelistToCacheArchive(cache_archive_path,
|
| whitelisted_urls,
|
| @@ -379,13 +553,15 @@ def ApplyUrlWhitelistToCacheArchive(cache_archive_path,
|
| cache_temp_directory = tempfile.mkdtemp(suffix='.cache')
|
| try:
|
| UnzipDirectoryContent(cache_archive_path, cache_temp_directory)
|
| - backend = CacheBackend(cache_temp_directory, 'simple')
|
| + backend = BatchCacheBackend(
|
| + cache_temp_directory, CacheBackendType.SIMPLE)
|
| cached_urls = backend.ListKeys()
|
| for cached_url in cached_urls:
|
| if cached_url not in whitelisted_urls:
|
| backend.DeleteKey(cached_url)
|
| for cached_url in backend.ListKeys():
|
| assert cached_url in whitelisted_urls
|
| + backend.ProcessBatch()
|
| ZipDirectoryContent(cache_temp_directory, output_cache_archive_path)
|
| finally:
|
| shutil.rmtree(cache_temp_directory)
|
|
|