Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: tools/android/loading/chrome_cache.py

Issue 1737103002: sandwich: Implements filter-cache sub-command. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@i10
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | tools/android/loading/sandwich.py » ('j') | tools/android/loading/sandwich.py » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 """Takes care of manipulating the chrome's HTTP cache. 5 """Takes care of manipulating the chrome's HTTP cache.
6 """ 6 """
7 7
8 from datetime import datetime 8 from datetime import datetime
9 import json 9 import json
10 import os 10 import os
11 import shutil
11 import subprocess 12 import subprocess
12 import sys 13 import sys
13 import tempfile 14 import tempfile
14 import zipfile 15 import zipfile
15 16
16 _SRC_DIR = os.path.abspath(os.path.join( 17 _SRC_DIR = os.path.abspath(os.path.join(
17 os.path.dirname(__file__), '..', '..', '..')) 18 os.path.dirname(__file__), '..', '..', '..'))
18 19
19 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android')) 20 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android'))
20 from pylib import constants 21 from pylib import constants
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after
134 135
135 136
136 def ZipDirectoryContent(root_directory_path, archive_dest_path): 137 def ZipDirectoryContent(root_directory_path, archive_dest_path):
137 """Zip a directory's content recursively with all the directories' 138 """Zip a directory's content recursively with all the directories'
138 timestamps preserved. 139 timestamps preserved.
139 140
140 Args: 141 Args:
141 root_directory_path: The directory's path to archive. 142 root_directory_path: The directory's path to archive.
142 archive_dest_path: Archive destination's path. 143 archive_dest_path: Archive destination's path.
143 """ 144 """
145 if os.path.isfile(archive_dest_path):
mattcary 2016/02/26 09:33:08 This doesn't seem to be necessary---zipfile will s
pasko 2016/02/26 17:10:06 is it really necessary to remove before writing? A
gabadie 2016/03/01 10:40:48 Done.
146 os.remove(archive_dest_path)
144 with zipfile.ZipFile(archive_dest_path, 'w') as zip_output: 147 with zipfile.ZipFile(archive_dest_path, 'w') as zip_output:
145 timestamps = {} 148 timestamps = {}
146 root_directory_stats = os.stat(root_directory_path) 149 root_directory_stats = os.stat(root_directory_path)
147 timestamps['.'] = { 150 timestamps['.'] = {
148 'atime': root_directory_stats.st_atime, 151 'atime': root_directory_stats.st_atime,
149 'mtime': root_directory_stats.st_mtime} 152 'mtime': root_directory_stats.st_mtime}
150 for directory_path, dirnames, filenames in os.walk(root_directory_path): 153 for directory_path, dirnames, filenames in os.walk(root_directory_path):
151 for dirname in dirnames: 154 for dirname in dirnames:
152 subdirectory_path = os.path.join(directory_path, dirname) 155 subdirectory_path = os.path.join(directory_path, dirname)
153 subdirectory_relative_path = os.path.relpath(subdirectory_path, 156 subdirectory_relative_path = os.path.relpath(subdirectory_path,
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
271 self._cache_directory_path, 274 self._cache_directory_path,
272 self._cache_backend_type, 275 self._cache_backend_type,
273 operation] 276 operation]
274 editor_tool_cmd.extend(args) 277 editor_tool_cmd.extend(args)
275 process = subprocess.Popen(editor_tool_cmd, stdout=subprocess.PIPE) 278 process = subprocess.Popen(editor_tool_cmd, stdout=subprocess.PIPE)
276 stdout_data, _ = process.communicate() 279 stdout_data, _ = process.communicate()
277 assert process.returncode == 0 280 assert process.returncode == 0
278 return stdout_data 281 return stdout_data
279 282
280 283
284 def ApplyUrlWhitelistToCacheArchive(cache_archive_path,
285 whitelisted_urls,
286 output_cache_archive_path):
287 """Generate a new cache archive containing only whitelisted urls.
288
289 Args:
290 cache_archive_path: Path of the cache archive to apply the white listing.
291 whitelisted_urls: Set of url to keep in cache.
292 output_cache_archive_path: Destination path of cache archive containing only
293 white-listed urls.
294 """
295 cache_temp_directory = tempfile.mkdtemp(suffix='.cache')
296 UnzipDirectoryContent(cache_archive_path, cache_temp_directory)
297 backend = CacheBackend(cache_temp_directory, 'simple')
298 cached_urls = backend.ListKeys()
299 for cached_url in cached_urls:
300 if cached_url not in whitelisted_urls:
301 backend.DeleteKey(cached_url)
302 for cached_url in backend.ListKeys():
303 assert cached_url in whitelisted_urls
304 ZipDirectoryContent(cache_temp_directory, output_cache_archive_path)
305 shutil.rmtree(cache_temp_directory)
pasko 2016/02/26 17:10:06 the try..finally would avoid polluting the filesys
gabadie 2016/03/01 10:40:48 Good point. I think we should just do a util like
306
307
281 if __name__ == '__main__': 308 if __name__ == '__main__':
282 import argparse 309 import argparse
pasko 2016/02/26 17:10:06 why import here?
gabadie 2016/03/01 10:40:48 Because this is a lib manual test (in the mean tim
pasko 2016/03/01 16:01:11 isn't this against the style guide? To avoid the p
gabadie 2016/03/01 16:49:09 According to Benoit, that is a common practice. ht
pasko 2016/03/01 17:55:53 I disagree with Benoit here. In the Chromium codeb
283 parser = argparse.ArgumentParser(description='Tests cache back-end.') 310 parser = argparse.ArgumentParser(description='Tests cache back-end.')
pasko 2016/02/26 17:10:06 nit: backend in one word
gabadie 2016/03/01 10:40:48 Looks like the two are correct. Keeping it to be c
pasko 2016/03/01 16:01:11 They are both correct, but we should prefer using
gabadie 2016/03/01 16:49:09 Acknowledged.
284 parser.add_argument('cache_path', type=str) 311 parser.add_argument('cache_path', type=str)
285 parser.add_argument('backend_type', type=str, choices=BACKEND_TYPES) 312 parser.add_argument('backend_type', type=str, choices=BACKEND_TYPES)
286 command_line_args = parser.parse_args() 313 command_line_args = parser.parse_args()
287 314
288 cache_backend = CacheBackend( 315 cache_backend = CacheBackend(
289 cache_directory_path=command_line_args.cache_path, 316 cache_directory_path=command_line_args.cache_path,
290 cache_backend_type=command_line_args.backend_type) 317 cache_backend_type=command_line_args.backend_type)
291 keys = cache_backend.ListKeys() 318 keys = cache_backend.ListKeys()
292 print '{}\'s HTTP response header:'.format(keys[0]) 319 print '{}\'s HTTP response header:'.format(keys[0])
293 print cache_backend.GetStreamForKey(keys[0], 0) 320 print cache_backend.GetStreamForKey(keys[0], 0)
294 cache_backend.DeleteKey(keys[1]) 321 cache_backend.DeleteKey(keys[1])
295 assert keys[1] not in cache_backend.ListKeys() 322 assert keys[1] not in cache_backend.ListKeys()
OLDNEW
« no previous file with comments | « no previous file | tools/android/loading/sandwich.py » ('j') | tools/android/loading/sandwich.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698