Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: tools/android/loading/run_sandwich.py

Issue 1701973002: sandwich: Implements reload cache operation to compare with push. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@d06
Patch Set: Addresses matt's nit Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #! /usr/bin/env python 1 #! /usr/bin/env python
2 # Copyright 2016 The Chromium Authors. All rights reserved. 2 # Copyright 2016 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Instructs Chrome to load series of web pages and reports results. 6 """Instructs Chrome to load series of web pages and reports results.
7 7
8 When running Chrome is sandwiched between preprocessed disk caches and 8 When running Chrome is sandwiched between preprocessed disk caches and
9 WepPageReplay serving all connections. 9 WepPageReplay serving all connections.
10 10
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after
79 json_data = json.load(f) 79 json_data = json.load(f)
80 80
81 key = 'urls' 81 key = 'urls'
82 if json_data and key in json_data: 82 if json_data and key in json_data:
83 url_list = json_data[key] 83 url_list = json_data[key]
84 if isinstance(url_list, list) and len(url_list) > 0: 84 if isinstance(url_list, list) and len(url_list) > 0:
85 return url_list 85 return url_list
86 raise Exception('Job description does not define a list named "urls"') 86 raise Exception('Job description does not define a list named "urls"')
87 87
88 88
89 def _SaveChromeTrace(events, directory, subdirectory): 89 def _SaveChromeTrace(events, target_directory):
90 """Saves the trace events, ignores IO errors. 90 """Saves the trace events, ignores IO errors.
91 91
92 Args: 92 Args:
93 events: a dict as returned by TracingTrack.ToJsonDict() 93 events: a dict as returned by TracingTrack.ToJsonDict()
94 directory: directory name contining all traces 94 target_directory: Directory path where trace is created.
95 subdirectory: directory name to create this particular trace in
96 """ 95 """
97 target_directory = os.path.join(directory, subdirectory)
98 filename = os.path.join(target_directory, 'trace.json') 96 filename = os.path.join(target_directory, 'trace.json')
99 try: 97 try:
100 os.makedirs(target_directory) 98 os.makedirs(target_directory)
101 with open(filename, 'w') as f: 99 with open(filename, 'w') as f:
102 json.dump({'traceEvents': events['events'], 'metadata': {}}, f, indent=2) 100 json.dump({'traceEvents': events['events'], 'metadata': {}}, f, indent=2)
103 except IOError: 101 except IOError:
104 logging.warning('Could not save a trace: %s' % filename) 102 logging.warning('Could not save a trace: %s' % filename)
105 # Swallow the exception. 103 # Swallow the exception.
106 104
107 105
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
281 devil_chromium.Initialize() 279 devil_chromium.Initialize()
282 280
283 parser = argparse.ArgumentParser() 281 parser = argparse.ArgumentParser()
284 parser.add_argument('--job', required=True, 282 parser.add_argument('--job', required=True,
285 help='JSON file with job description.') 283 help='JSON file with job description.')
286 parser.add_argument('--output', required=True, 284 parser.add_argument('--output', required=True,
287 help='Name of output directory to create.') 285 help='Name of output directory to create.')
288 parser.add_argument('--repeat', default=1, type=int, 286 parser.add_argument('--repeat', default=1, type=int,
289 help='How many times to run the job') 287 help='How many times to run the job')
290 parser.add_argument('--cache-op', 288 parser.add_argument('--cache-op',
291 choices=['clear', 'save', 'push'], 289 choices=['clear', 'save', 'push', 'reload'],
292 default='clear', 290 default='clear',
293 help='Configures cache operation to do before launching ' 291 help='Configures cache operation to do before launching '
294 +'Chrome. (Default is clear).') 292 +'Chrome. (Default is clear).')
295 parser.add_argument('--wpr-archive', default=None, type=str, 293 parser.add_argument('--wpr-archive', default=None, type=str,
296 help='Web page replay archive to load job\'s urls from.') 294 help='Web page replay archive to load job\'s urls from.')
297 parser.add_argument('--wpr-record', default=False, action='store_true', 295 parser.add_argument('--wpr-record', default=False, action='store_true',
298 help='Record web page replay archive.') 296 help='Record web page replay archive.')
299 parser.add_argument('--disable-wpr-script-injection', default=False, 297 parser.add_argument('--disable-wpr-script-injection', default=False,
300 action='store_true', 298 action='store_true',
301 help='Disable WPR default script injection such as ' + 299 help='Disable WPR default script injection such as ' +
(...skipping 20 matching lines...) Expand all
322 local_cache_archive_path = os.path.join(args.output, 'cache.zip') 320 local_cache_archive_path = os.path.join(args.output, 'cache.zip')
323 local_cache_directory_path = None 321 local_cache_directory_path = None
324 322
325 if args.cache_op == 'push': 323 if args.cache_op == 'push':
326 assert os.path.isfile(local_cache_archive_path) 324 assert os.path.isfile(local_cache_archive_path)
327 local_cache_directory_path = tempfile.mkdtemp(suffix='.cache') 325 local_cache_directory_path = tempfile.mkdtemp(suffix='.cache')
328 _UnzipDirectoryContent(local_cache_archive_path, local_cache_directory_path) 326 _UnzipDirectoryContent(local_cache_archive_path, local_cache_directory_path)
329 327
330 with device_setup.WprHost(device, args.wpr_archive, args.wpr_record, 328 with device_setup.WprHost(device, args.wpr_archive, args.wpr_record,
331 args.disable_wpr_script_injection) as additional_flags: 329 args.disable_wpr_script_injection) as additional_flags:
330 def _RunNavigation(url, clear_cache, trace_id):
331 with device_setup.DeviceConnection(
332 device=device,
333 additional_flags=additional_flags) as connection:
334 if clear_cache:
335 connection.ClearCache()
336 page_track.PageTrack(connection)
337 tracing_track = tracing.TracingTrack(connection,
338 categories=pull_sandwich_metrics.CATEGORIES)
339 connection.SetUpMonitoring()
340 connection.SendAndIgnoreResponse('Page.navigate', {'url': url})
341 connection.StartMonitoring()
342 if trace_id != None:
343 trace_target_directory = os.path.join(args.output, str(trace_id))
344 _SaveChromeTrace(tracing_track.ToJsonDict(), trace_target_directory)
345
332 for _ in xrange(args.repeat): 346 for _ in xrange(args.repeat):
333 for url in job_urls: 347 for url in job_urls:
334 if args.cache_op == 'push': 348 clear_cache = False
349 if args.cache_op == 'clear':
350 clear_cache = True
351 elif args.cache_op == 'push':
335 device.KillAll(_CHROME_PACKAGE, quiet=True) 352 device.KillAll(_CHROME_PACKAGE, quiet=True)
336 _PushBrowserCache(device, local_cache_directory_path) 353 _PushBrowserCache(device, local_cache_directory_path)
337 with device_setup.DeviceConnection( 354 elif args.cache_op == 'reload':
338 device=device, 355 _RunNavigation(url, clear_cache=True, trace_id=None)
339 additional_flags=additional_flags) as connection: 356 elif args.cache_op == 'save':
340 if (not run_infos['urls'] and args.cache_op == 'save' or 357 clear_cache = not run_infos['urls']
341 args.cache_op == 'clear'): 358 _RunNavigation(url, clear_cache=clear_cache,
342 connection.ClearCache() 359 trace_id=len(run_infos['urls']))
343 page_track.PageTrack(connection) 360 run_infos['urls'].append(url)
344 tracing_track = tracing.TracingTrack(connection,
345 categories=pull_sandwich_metrics.CATEGORIES)
346 connection.SetUpMonitoring()
347 connection.SendAndIgnoreResponse('Page.navigate', {'url': url})
348 connection.StartMonitoring()
349 _SaveChromeTrace(tracing_track.ToJsonDict(), args.output,
350 str(len(run_infos['urls'])))
351 run_infos['urls'].append(url)
352 361
353 if local_cache_directory_path: 362 if local_cache_directory_path:
354 shutil.rmtree(local_cache_directory_path) 363 shutil.rmtree(local_cache_directory_path)
355 364
356 if args.cache_op == 'save': 365 if args.cache_op == 'save':
357 # Move Chrome to background to allow it to flush the index. 366 # Move Chrome to background to allow it to flush the index.
358 device.adb.Shell('am start com.google.android.launcher') 367 device.adb.Shell('am start com.google.android.launcher')
359 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) 368 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS)
360 device.KillAll(_CHROME_PACKAGE, quiet=True) 369 device.KillAll(_CHROME_PACKAGE, quiet=True)
361 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS) 370 time.sleep(_TIME_TO_DEVICE_IDLE_SECONDS)
362 371
363 cache_directory_path = _PullBrowserCache(device) 372 cache_directory_path = _PullBrowserCache(device)
364 _ZipDirectoryContent(cache_directory_path, local_cache_archive_path) 373 _ZipDirectoryContent(cache_directory_path, local_cache_archive_path)
365 shutil.rmtree(cache_directory_path) 374 shutil.rmtree(cache_directory_path)
366 375
367 with open(os.path.join(args.output, 'run_infos.json'), 'w') as file_output: 376 with open(os.path.join(args.output, 'run_infos.json'), 'w') as file_output:
368 json.dump(run_infos, file_output, indent=2) 377 json.dump(run_infos, file_output, indent=2)
369 378
370 379
371 if __name__ == '__main__': 380 if __name__ == '__main__':
372 sys.exit(main()) 381 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698