Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(119)

Side by Side Diff: tools/android/loading/run_sandwich.py

Issue 1707363002: sandwich: Implements network condition on WPR server and browser. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@i00
Patch Set: Rebase Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #! /usr/bin/env python 1 #! /usr/bin/env python
2 # Copyright 2016 The Chromium Authors. All rights reserved. 2 # Copyright 2016 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Instructs Chrome to load series of web pages and reports results. 6 """Instructs Chrome to load series of web pages and reports results.
7 7
8 When running Chrome is sandwiched between preprocessed disk caches and 8 When running Chrome is sandwiched between preprocessed disk caches and
9 WepPageReplay serving all connections. 9 WepPageReplay serving all connections.
10 10
(...skipping 15 matching lines...) Expand all
26 _SRC_DIR = os.path.abspath(os.path.join( 26 _SRC_DIR = os.path.abspath(os.path.join(
27 os.path.dirname(__file__), '..', '..', '..')) 27 os.path.dirname(__file__), '..', '..', '..'))
28 28
29 sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil')) 29 sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil'))
30 from devil.android import device_utils 30 from devil.android import device_utils
31 31
32 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android')) 32 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android'))
33 from pylib import constants 33 from pylib import constants
34 import devil_chromium 34 import devil_chromium
35 35
36 import chrome_setup
36 import device_setup 37 import device_setup
37 import devtools_monitor 38 import devtools_monitor
38 import options 39 import options
39 import page_track 40 import page_track
40 import pull_sandwich_metrics 41 import pull_sandwich_metrics
41 import trace_recorder 42 import trace_recorder
42 import tracing 43 import tracing
43 44
44 45
45 # Use options layer to access constants. 46 # Use options layer to access constants.
(...skipping 12 matching lines...) Expand all
58 _REAL_INDEX_FILE_NAME = 'the-real-index' 59 _REAL_INDEX_FILE_NAME = 'the-real-index'
59 60
60 # An estimate of time to wait for the device to become idle after expensive 61 # An estimate of time to wait for the device to become idle after expensive
61 # operations, such as opening the launcher activity. 62 # operations, such as opening the launcher activity.
62 _TIME_TO_DEVICE_IDLE_SECONDS = 2 63 _TIME_TO_DEVICE_IDLE_SECONDS = 2
63 64
64 65
65 def _RemoteCacheDirectory(): 66 def _RemoteCacheDirectory():
66 return '/data/data/{}/cache/Cache'.format(OPTIONS.chrome_package_name) 67 return '/data/data/{}/cache/Cache'.format(OPTIONS.chrome_package_name)
67 68
69 # Devtools timeout of 1 minute to avoid websocket timeout on slow
70 # network condition.
71 _DEVTOOLS_TIMEOUT = 60
72
68 73
69 def _ReadUrlsFromJobDescription(job_name): 74 def _ReadUrlsFromJobDescription(job_name):
70 """Retrieves the list of URLs associated with the job name.""" 75 """Retrieves the list of URLs associated with the job name."""
71 try: 76 try:
72 # Extra sugar: attempt to load from a relative path. 77 # Extra sugar: attempt to load from a relative path.
73 json_file_name = os.path.join(os.path.dirname(__file__), _JOB_SEARCH_PATH, 78 json_file_name = os.path.join(os.path.dirname(__file__), _JOB_SEARCH_PATH,
74 job_name) 79 job_name)
75 with open(json_file_name) as f: 80 with open(json_file_name) as f:
76 json_data = json.load(f) 81 json_data = json.load(f)
77 except IOError: 82 except IOError:
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
251 directory_path = os.path.join(output_directories_path, dirname) 256 directory_path = os.path.join(output_directories_path, dirname)
252 if not os.path.isdir(directory_path): 257 if not os.path.isdir(directory_path):
253 continue 258 continue
254 try: 259 try:
255 int(dirname) 260 int(dirname)
256 except ValueError: 261 except ValueError:
257 continue 262 continue
258 shutil.rmtree(directory_path) 263 shutil.rmtree(directory_path)
259 264
260 265
261 def main(): 266 def _ArgumentParser():
262 logging.basicConfig(level=logging.INFO) 267 """Build a command line argument's parser.
263 devil_chromium.Initialize() 268 """
264
265 # Don't give the argument yet. All we are interested in for now is accessing
266 # the default values of OPTIONS.
267 OPTIONS.ParseArgs([])
268
269 parser = argparse.ArgumentParser() 269 parser = argparse.ArgumentParser()
270 parser.add_argument('--job', required=True, 270 parser.add_argument('--job', required=True,
271 help='JSON file with job description.') 271 help='JSON file with job description.')
272 parser.add_argument('--output', required=True, 272 parser.add_argument('--output', required=True,
273 help='Name of output directory to create.') 273 help='Name of output directory to create.')
274 parser.add_argument('--repeat', default=1, type=int, 274 parser.add_argument('--repeat', default=1, type=int,
275 help='How many times to run the job') 275 help='How many times to run the job')
276 parser.add_argument('--cache-op', 276 parser.add_argument('--cache-op',
277 choices=['clear', 'save', 'push', 'reload'], 277 choices=['clear', 'save', 'push', 'reload'],
278 default='clear', 278 default='clear',
279 help='Configures cache operation to do before launching ' 279 help='Configures cache operation to do before launching '
280 +'Chrome. (Default is clear).') 280 +'Chrome. (Default is clear).')
281 parser.add_argument('--wpr-archive', default=None, type=str, 281 parser.add_argument('--wpr-archive', default=None, type=str,
282 help='Web page replay archive to load job\'s urls from.') 282 help='Web page replay archive to load job\'s urls from.')
283 parser.add_argument('--wpr-record', default=False, action='store_true', 283 parser.add_argument('--wpr-record', default=False, action='store_true',
284 help='Record web page replay archive.') 284 help='Record web page replay archive.')
285 parser.add_argument('--disable-wpr-script-injection', default=False, 285 parser.add_argument('--disable-wpr-script-injection', default=False,
286 action='store_true', 286 action='store_true',
287 help='Disable WPR default script injection such as ' + 287 help='Disable WPR default script injection such as ' +
288 'overriding javascript\'s Math.random() and Date() ' + 288 'overriding javascript\'s Math.random() and Date() ' +
289 'with deterministic implementations.') 289 'with deterministic implementations.')
290 args = parser.parse_args() 290 parser.add_argument('--network-condition', default=None,
291 choices=sorted(chrome_setup.NETWORK_CONDITIONS.keys()),
292 help='Set a network profile.')
293 parser.add_argument('--network-emulator', default='browser',
294 choices=['browser', 'wpr'],
295 help='Set which component is emulating the network condition.' +
296 ' (Default to browser)')
297 return parser
298
299
300 def main():
301 logging.basicConfig(level=logging.INFO)
302 devil_chromium.Initialize()
303
304 # Don't give the argument yet. All we are interested in for now is accessing
305 # the default values of OPTIONS.
306 OPTIONS.ParseArgs([])
307
308 args = _ArgumentParser().parse_args()
291 309
292 if not os.path.isdir(args.output): 310 if not os.path.isdir(args.output):
293 try: 311 try:
294 os.makedirs(args.output) 312 os.makedirs(args.output)
295 except OSError: 313 except OSError:
296 logging.error('Cannot create directory for results: %s' % args.output) 314 logging.error('Cannot create directory for results: %s' % args.output)
297 raise 315 raise
298 else: 316 else:
299 _CleanPreviousTraces(args.output) 317 _CleanPreviousTraces(args.output)
300 318
301 run_infos = { 319 run_infos = {
302 'cache-op': args.cache_op, 320 'cache-op': args.cache_op,
303 'job': args.job, 321 'job': args.job,
304 'urls': [] 322 'urls': []
305 } 323 }
306 job_urls = _ReadUrlsFromJobDescription(args.job) 324 job_urls = _ReadUrlsFromJobDescription(args.job)
307 device = device_utils.DeviceUtils.HealthyDevices()[0] 325 device = device_utils.DeviceUtils.HealthyDevices()[0]
308 local_cache_archive_path = os.path.join(args.output, 'cache.zip') 326 local_cache_archive_path = os.path.join(args.output, 'cache.zip')
309 local_cache_directory_path = None 327 local_cache_directory_path = None
328 wpr_network_condition_name = None
329 browser_network_condition_name = None
330 if args.network_emulator == 'wpr':
331 wpr_network_condition_name = args.network_condition
332 elif args.network_emulator == 'browser':
333 browser_network_condition_name = args.network_condition
334 else:
335 assert False
310 336
311 if args.cache_op == 'push': 337 if args.cache_op == 'push':
312 assert os.path.isfile(local_cache_archive_path) 338 assert os.path.isfile(local_cache_archive_path)
313 local_cache_directory_path = tempfile.mkdtemp(suffix='.cache') 339 local_cache_directory_path = tempfile.mkdtemp(suffix='.cache')
314 _UnzipDirectoryContent(local_cache_archive_path, local_cache_directory_path) 340 _UnzipDirectoryContent(local_cache_archive_path, local_cache_directory_path)
315 341
316 with device_setup.WprHost(device, args.wpr_archive, args.wpr_record, 342 with device_setup.WprHost(device, args.wpr_archive,
317 args.disable_wpr_script_injection) as additional_flags: 343 record=args.wpr_record,
344 network_condition_name=wpr_network_condition_name,
345 disable_script_injection=args.disable_wpr_script_injection
346 ) as additional_flags:
318 def _RunNavigation(url, clear_cache, trace_id): 347 def _RunNavigation(url, clear_cache, trace_id):
319 with device_setup.DeviceConnection( 348 with device_setup.DeviceConnection(
320 device=device, 349 device=device,
321 additional_flags=additional_flags) as connection: 350 additional_flags=additional_flags) as connection:
351 additional_metadata = {}
352 if browser_network_condition_name:
353 additional_metadata = chrome_setup.SetUpEmulationAndReturnMetadata(
354 connection=connection,
355 emulated_device_name=None,
356 emulated_network_name=browser_network_condition_name)
322 loading_trace = trace_recorder.MonitorUrl( 357 loading_trace = trace_recorder.MonitorUrl(
323 connection, url, 358 connection, url,
324 clear_cache=clear_cache, 359 clear_cache=clear_cache,
325 categories=pull_sandwich_metrics.CATEGORIES) 360 categories=pull_sandwich_metrics.CATEGORIES,
361 timeout=_DEVTOOLS_TIMEOUT)
362 loading_trace.metadata.update(additional_metadata)
326 if trace_id != None: 363 if trace_id != None:
327 loading_trace_path = os.path.join( 364 loading_trace_path = os.path.join(
328 args.output, str(trace_id), 'trace.json') 365 args.output, str(trace_id), 'trace.json')
329 os.makedirs(os.path.dirname(loading_trace_path)) 366 os.makedirs(os.path.dirname(loading_trace_path))
330 loading_trace.ToJsonFile(loading_trace_path) 367 loading_trace.ToJsonFile(loading_trace_path)
331 368
332 for _ in xrange(args.repeat): 369 for _ in xrange(args.repeat):
333 for url in job_urls: 370 for url in job_urls:
334 clear_cache = False 371 clear_cache = False
335 if args.cache_op == 'clear': 372 if args.cache_op == 'clear':
(...skipping 22 matching lines...) Expand all
358 cache_directory_path = _PullBrowserCache(device) 395 cache_directory_path = _PullBrowserCache(device)
359 _ZipDirectoryContent(cache_directory_path, local_cache_archive_path) 396 _ZipDirectoryContent(cache_directory_path, local_cache_archive_path)
360 shutil.rmtree(cache_directory_path) 397 shutil.rmtree(cache_directory_path)
361 398
362 with open(os.path.join(args.output, 'run_infos.json'), 'w') as file_output: 399 with open(os.path.join(args.output, 'run_infos.json'), 'w') as file_output:
363 json.dump(run_infos, file_output, indent=2) 400 json.dump(run_infos, file_output, indent=2)
364 401
365 402
366 if __name__ == '__main__': 403 if __name__ == '__main__':
367 sys.exit(main()) 404 sys.exit(main())
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698