Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(7)

Side by Side Diff: tools/android/loading/run_sandwich.py

Issue 1692873003: sandwich: Pushes locally saved HTTP cache to the device. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@d03
Patch Set: Addresses lizeb's nits Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #! /usr/bin/env python 1 #! /usr/bin/env python
2 # Copyright 2016 The Chromium Authors. All rights reserved. 2 # Copyright 2016 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Instructs Chrome to load series of web pages and reports results. 6 """Instructs Chrome to load series of web pages and reports results.
7 7
8 When running Chrome is sandwiched between preprocessed disk caches and 8 When running Chrome is sandwiched between preprocessed disk caches and
9 WepPageReplay serving all connections. 9 WepPageReplay serving all connections.
10 10
11 TODO(pasko): implement cache preparation and WPR. 11 TODO(pasko): implement cache preparation and WPR.
12 """ 12 """
13 13
14 import argparse 14 import argparse
15 from datetime import datetime
15 import json 16 import json
16 import logging 17 import logging
17 import os 18 import os
18 import shutil 19 import shutil
20 import subprocess
19 import sys 21 import sys
20 import tempfile 22 import tempfile
21 import time 23 import time
22 import zipfile 24 import zipfile
23 25
24 _SRC_DIR = os.path.abspath(os.path.join( 26 _SRC_DIR = os.path.abspath(os.path.join(
25 os.path.dirname(__file__), '..', '..', '..')) 27 os.path.dirname(__file__), '..', '..', '..'))
26 28
27 sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil')) 29 sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil'))
28 from devil.android import device_utils 30 from devil.android import device_utils
(...skipping 23 matching lines...) Expand all
52 _REAL_INDEX_FILE_NAME = 'the-real-index' 54 _REAL_INDEX_FILE_NAME = 'the-real-index'
53 55
54 # Name of the chrome package. 56 # Name of the chrome package.
55 _CHROME_PACKAGE = ( 57 _CHROME_PACKAGE = (
56 constants.PACKAGE_INFO[device_setup.DEFAULT_CHROME_PACKAGE].package) 58 constants.PACKAGE_INFO[device_setup.DEFAULT_CHROME_PACKAGE].package)
57 59
58 # An estimate of time to wait for the device to become idle after expensive 60 # An estimate of time to wait for the device to become idle after expensive
59 # operations, such as opening the launcher activity. 61 # operations, such as opening the launcher activity.
60 _TIME_TO_DEVICE_IDLE_SECONDS = 2 62 _TIME_TO_DEVICE_IDLE_SECONDS = 2
61 63
64 # Cache directory's path on the device.
65 _REMOTE_CACHE_DIRECTORY = '/data/data/' + _CHROME_PACKAGE + '/cache/Cache'
66
62 67
63 def _ReadUrlsFromJobDescription(job_name): 68 def _ReadUrlsFromJobDescription(job_name):
64 """Retrieves the list of URLs associated with the job name.""" 69 """Retrieves the list of URLs associated with the job name."""
65 try: 70 try:
66 # Extra sugar: attempt to load from a relative path. 71 # Extra sugar: attempt to load from a relative path.
67 json_file_name = os.path.join(os.path.dirname(__file__), _JOB_SEARCH_PATH, 72 json_file_name = os.path.join(os.path.dirname(__file__), _JOB_SEARCH_PATH,
68 job_name) 73 job_name)
69 with open(json_file_name) as f: 74 with open(json_file_name) as f:
70 json_data = json.load(f) 75 json_data = json.load(f)
71 except IOError: 76 except IOError:
(...skipping 25 matching lines...) Expand all
97 json.dump({'traceEvents': events['events'], 'metadata': {}}, f, indent=2) 102 json.dump({'traceEvents': events['events'], 'metadata': {}}, f, indent=2)
98 except IOError: 103 except IOError:
99 logging.warning('Could not save a trace: %s' % filename) 104 logging.warning('Could not save a trace: %s' % filename)
100 # Swallow the exception. 105 # Swallow the exception.
101 106
102 107
103 def _UpdateTimestampFromAdbStat(filename, stat): 108 def _UpdateTimestampFromAdbStat(filename, stat):
104 os.utime(filename, (stat.st_time, stat.st_time)) 109 os.utime(filename, (stat.st_time, stat.st_time))
105 110
106 111
112 def _AdbShell(adb, cmd):
113 adb.Shell(subprocess.list2cmdline(cmd))
114
115
116 def _AdbUtime(adb, filename, timestamp):
117 """Adb equivalent of os.utime(filename, (timestamp, timestamp))
118 """
119 touch_stamp = datetime.fromtimestamp(timestamp).strftime('%Y%m%d.%H%M%S')
120 _AdbShell(adb, ['touch', '-t', touch_stamp, filename])
121
122
107 def _PullBrowserCache(device): 123 def _PullBrowserCache(device):
108 """Pulls the browser cache from the device and saves it locally. 124 """Pulls the browser cache from the device and saves it locally.
109 125
110 Cache is saved with the same file structure as on the device. Timestamps are 126 Cache is saved with the same file structure as on the device. Timestamps are
111 important to preserve because indexing and eviction depends on them. 127 important to preserve because indexing and eviction depends on them.
112 128
113 Returns: 129 Returns:
114 Temporary directory containing all the browser cache. 130 Temporary directory containing all the browser cache.
115 """ 131 """
116 save_target = tempfile.mkdtemp(suffix='.cache') 132 save_target = tempfile.mkdtemp(suffix='.cache')
117 cache_directory = '/data/data/' + _CHROME_PACKAGE + '/cache/Cache' 133 for filename, stat in device.adb.Ls(_REMOTE_CACHE_DIRECTORY):
118 for filename, stat in device.adb.Ls(cache_directory):
119 if filename == '..': 134 if filename == '..':
120 continue 135 continue
121 if filename == '.': 136 if filename == '.':
122 cache_directory_stat = stat 137 cache_directory_stat = stat
123 continue 138 continue
124 original_file = os.path.join(cache_directory, filename) 139 original_file = os.path.join(_REMOTE_CACHE_DIRECTORY, filename)
125 saved_file = os.path.join(save_target, filename) 140 saved_file = os.path.join(save_target, filename)
126 device.adb.Pull(original_file, saved_file) 141 device.adb.Pull(original_file, saved_file)
127 _UpdateTimestampFromAdbStat(saved_file, stat) 142 _UpdateTimestampFromAdbStat(saved_file, stat)
128 if filename == _INDEX_DIRECTORY_NAME: 143 if filename == _INDEX_DIRECTORY_NAME:
129 # The directory containing the index was pulled recursively, update the 144 # The directory containing the index was pulled recursively, update the
130 # timestamps for known files. They are ignored by cache backend, but may 145 # timestamps for known files. They are ignored by cache backend, but may
131 # be useful for debugging. 146 # be useful for debugging.
132 index_dir_stat = stat 147 index_dir_stat = stat
133 saved_index_dir = os.path.join(save_target, _INDEX_DIRECTORY_NAME) 148 saved_index_dir = os.path.join(save_target, _INDEX_DIRECTORY_NAME)
134 saved_index_file = os.path.join(saved_index_dir, _REAL_INDEX_FILE_NAME) 149 saved_index_file = os.path.join(saved_index_dir, _REAL_INDEX_FILE_NAME)
135 for sub_file, sub_stat in device.adb.Ls(original_file): 150 for sub_file, sub_stat in device.adb.Ls(original_file):
136 if sub_file == _REAL_INDEX_FILE_NAME: 151 if sub_file == _REAL_INDEX_FILE_NAME:
137 _UpdateTimestampFromAdbStat(saved_index_file, sub_stat) 152 _UpdateTimestampFromAdbStat(saved_index_file, sub_stat)
138 break 153 break
139 _UpdateTimestampFromAdbStat(saved_index_dir, index_dir_stat) 154 _UpdateTimestampFromAdbStat(saved_index_dir, index_dir_stat)
140 155
141 # Store the cache directory modification time. It is important to update it 156 # Store the cache directory modification time. It is important to update it
142 # after all files in it have been written. The timestamp is compared with 157 # after all files in it have been written. The timestamp is compared with
143 # the contents of the index file when freshness is determined. 158 # the contents of the index file when freshness is determined.
144 _UpdateTimestampFromAdbStat(save_target, cache_directory_stat) 159 _UpdateTimestampFromAdbStat(save_target, cache_directory_stat)
145 return save_target 160 return save_target
146 161
147 162
163 def _PushBrowserCache(device, local_cache_path):
164 """Pushes the browser cache saved locally to the device.
165
166 Args:
167 device: Android device.
168 local_cache_path: The directory's path containing the cache locally.
169 """
170 # Clear previous cache.
171 _AdbShell(device.adb, ['rm', '-rf', _REMOTE_CACHE_DIRECTORY])
172 _AdbShell(device.adb, ['mkdir', _REMOTE_CACHE_DIRECTORY])
173
174 # Push cache content.
175 device.adb.Push(local_cache_path, _REMOTE_CACHE_DIRECTORY)
176
177 # Walk through the local cache to update mtime on the device.
178 def MirrorMtime(local_path):
179 cache_relative_path = os.path.relpath(local_path, start=local_cache_path)
180 remote_path = os.path.join(_REMOTE_CACHE_DIRECTORY, cache_relative_path)
181 _AdbUtime(device.adb, remote_path, os.stat(local_path).st_mtime)
182
183 for local_directory_path, dirnames, filenames in os.walk(
184 local_cache_path, topdown=False):
185 for filename in filenames:
186 MirrorMtime(os.path.join(local_directory_path, filename))
187 for dirname in dirnames:
188 MirrorMtime(os.path.join(local_directory_path, dirname))
189 MirrorMtime(local_cache_path)
190
191
148 def _ZipDirectoryContent(root_directory_path, archive_dest_path): 192 def _ZipDirectoryContent(root_directory_path, archive_dest_path):
149 """Zip a directory's content recursively with all the directories' 193 """Zip a directory's content recursively with all the directories'
150 timestamps preserved. 194 timestamps preserved.
151 195
152 Args: 196 Args:
153 root_directory_path: The directory's path to archive. 197 root_directory_path: The directory's path to archive.
154 archive_dest_path: Archive destination's path. 198 archive_dest_path: Archive destination's path.
155 """ 199 """
156 with zipfile.ZipFile(archive_dest_path, 'w') as zip_output: 200 with zipfile.ZipFile(archive_dest_path, 'w') as zip_output:
157 timestamps = {} 201 timestamps = {}
202 root_directory_stats = os.stat(root_directory_path)
203 timestamps['.'] = {
204 'atime': root_directory_stats.st_atime,
205 'mtime': root_directory_stats.st_mtime}
158 for directory_path, dirnames, filenames in os.walk(root_directory_path): 206 for directory_path, dirnames, filenames in os.walk(root_directory_path):
159 for dirname in dirnames: 207 for dirname in dirnames:
160 subdirectory_path = os.path.join(directory_path, dirname) 208 subdirectory_path = os.path.join(directory_path, dirname)
161 subdirectory_relative_path = os.path.relpath(subdirectory_path, 209 subdirectory_relative_path = os.path.relpath(subdirectory_path,
162 root_directory_path) 210 root_directory_path)
163 subdirectory_stats = os.stat(subdirectory_path) 211 subdirectory_stats = os.stat(subdirectory_path)
164 timestamps[subdirectory_relative_path] = { 212 timestamps[subdirectory_relative_path] = {
165 'atime': subdirectory_stats.st_atime, 213 'atime': subdirectory_stats.st_atime,
166 'mtime': subdirectory_stats.st_mtime} 214 'mtime': subdirectory_stats.st_mtime}
167 for filename in filenames: 215 for filename in filenames:
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
203 f.write(zip_input.read(file_archive_name)) 251 f.write(zip_input.read(file_archive_name))
204 252
205 assert timestamps 253 assert timestamps
206 for relative_path, stats in timestamps.iteritems(): 254 for relative_path, stats in timestamps.iteritems():
207 output_path = os.path.join(directory_dest_path, relative_path) 255 output_path = os.path.join(directory_dest_path, relative_path)
208 if not os.path.exists(output_path): 256 if not os.path.exists(output_path):
209 os.makedirs(output_path) 257 os.makedirs(output_path)
210 os.utime(output_path, (stats['atime'], stats['mtime'])) 258 os.utime(output_path, (stats['atime'], stats['mtime']))
211 259
212 260
261 def _CleanPreviousTraces(output_directories_path):
262 """Cleans previous traces from the output directory.
263
264 Args:
265 output_directories_path: The output directory path where to clean the
266 previous traces.
267 """
268 for dirname in os.listdir(output_directories_path):
269 directory_path = os.path.join(output_directories_path, dirname)
270 if not os.path.isdir(directory_path):
271 continue
272 try:
273 int(dirname)
274 except ValueError:
275 continue
276 shutil.rmtree(directory_path)
277
278
213 def main(): 279 def main():
214 logging.basicConfig(level=logging.INFO) 280 logging.basicConfig(level=logging.INFO)
215 devil_chromium.Initialize() 281 devil_chromium.Initialize()
216 282
217 parser = argparse.ArgumentParser() 283 parser = argparse.ArgumentParser()
218 parser.add_argument('--job', required=True, 284 parser.add_argument('--job', required=True,
219 help='JSON file with job description.') 285 help='JSON file with job description.')
220 parser.add_argument('--output', required=True, 286 parser.add_argument('--output', required=True,
221 help='Name of output directory to create.') 287 help='Name of output directory to create.')
222 parser.add_argument('--repeat', default=1, type=int, 288 parser.add_argument('--repeat', default=1, type=int,
223 help='How many times to run the job') 289 help='How many times to run the job')
224 parser.add_argument('--save-cache', default=False, 290 parser.add_argument('--cache-op',
225 action='store_true', 291 choices=['clear', 'save', 'push'],
226 help='Clear HTTP cache before start,' + 292 default='clear',
227 'save cache before exit.') 293 help='Configures cache operation to do before launching '
294 +'Chrome. (Default is clear).')
228 parser.add_argument('--wpr-archive', default=None, type=str, 295 parser.add_argument('--wpr-archive', default=None, type=str,
229 help='Web page replay archive to load job\'s urls from.') 296 help='Web page replay archive to load job\'s urls from.')
230 parser.add_argument('--wpr-record', default=False, action='store_true', 297 parser.add_argument('--wpr-record', default=False, action='store_true',
231 help='Record web page replay archive.') 298 help='Record web page replay archive.')
232 args = parser.parse_args() 299 args = parser.parse_args()
233 300
234 try: 301 if not os.path.isdir(args.output):
235 os.makedirs(args.output) 302 try:
236 except OSError: 303 os.makedirs(args.output)
237 logging.error('Cannot create directory for results: %s' % args.output) 304 except OSError:
238 raise 305 logging.error('Cannot create directory for results: %s' % args.output)
306 raise
307 else:
308 _CleanPreviousTraces(args.output)
239 309
240 job_urls = _ReadUrlsFromJobDescription(args.job) 310 job_urls = _ReadUrlsFromJobDescription(args.job)
241 device = device_utils.DeviceUtils.HealthyDevices()[0] 311 device = device_utils.DeviceUtils.HealthyDevices()[0]
312 local_cache_archive_path = os.path.join(args.output, 'cache.zip')
313 local_cache_directory_path = None
314
315 if args.cache_op == 'push':
316 assert os.path.isfile(local_cache_archive_path)
317 local_cache_directory_path = tempfile.mkdtemp(suffix='.cache')
318 _UnzipDirectoryContent(local_cache_archive_path, local_cache_directory_path)
242 319
243 with device_setup.WprHost(device, 320 with device_setup.WprHost(device,
244 args.wpr_archive, 321 args.wpr_archive,
245 args.wpr_record) as additional_flags: 322 args.wpr_record) as additional_flags:
246 pages_loaded = 0 323 pages_loaded = 0
247 for iteration in xrange(args.repeat): 324 for _ in xrange(args.repeat):
248 for url in job_urls: 325 for url in job_urls:
326 if args.cache_op == 'push':
327 device.KillAll(_CHROME_PACKAGE, quiet=True)
328 _PushBrowserCache(device, local_cache_directory_path)
249 with device_setup.DeviceConnection( 329 with device_setup.DeviceConnection(
250 device=device, 330 device=device,
251 additional_flags=additional_flags) as connection: 331 additional_flags=additional_flags) as connection:
252 if iteration == 0 and pages_loaded == 0 and args.save_cache: 332 if (pages_loaded == 0 and args.cache_op == 'save' or
333 args.cache_op == 'clear'):
253 connection.ClearCache() 334 connection.ClearCache()
254 page_track.PageTrack(connection) 335 page_track.PageTrack(connection)
255 tracing_track = tracing.TracingTrack(connection, 336 tracing_track = tracing.TracingTrack(connection,
256 categories=pull_sandwich_metrics.CATEGORIES) 337 categories=pull_sandwich_metrics.CATEGORIES)
257 connection.SetUpMonitoring() 338 connection.SetUpMonitoring()
258 connection.SendAndIgnoreResponse('Page.navigate', {'url': url}) 339 connection.SendAndIgnoreResponse('Page.navigate', {'url': url})
259 connection.StartMonitoring() 340 connection.StartMonitoring()
260 pages_loaded += 1 341 pages_loaded += 1
261 _SaveChromeTrace(tracing_track.ToJsonDict(), args.output, 342 _SaveChromeTrace(tracing_track.ToJsonDict(), args.output,
262 str(pages_loaded)) 343 str(pages_loaded))
263 344
264 if args.save_cache: 345 if local_cache_directory_path:
346 shutil.rmtree(local_cache_directory_path)
347
348 if args.cache_op == 'save':
265 # Move Chrome to background to allow it to flush the index. 349 # Move Chrome to background to allow it to flush the index.
266 device.adb.Shell('am start com.google.android.launcher') 350 device.adb.Shell('am start com.google.android.launcher')
267 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) 351 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS)
268 device.KillAll(_CHROME_PACKAGE, quiet=True) 352 device.KillAll(_CHROME_PACKAGE, quiet=True)
269 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) 353 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS)
270 354
271 cache_directory_path = _PullBrowserCache(device) 355 cache_directory_path = _PullBrowserCache(device)
272 _ZipDirectoryContent(cache_directory_path, 356 _ZipDirectoryContent(cache_directory_path, local_cache_archive_path)
273 os.path.join(args.output, 'cache.zip'))
274 shutil.rmtree(cache_directory_path) 357 shutil.rmtree(cache_directory_path)
275 358
276 359
277 if __name__ == '__main__': 360 if __name__ == '__main__':
278 sys.exit(main()) 361 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698