OLD | NEW |
---|---|
1 #! /usr/bin/env python | 1 #! /usr/bin/env python |
2 # Copyright 2016 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
5 | 5 |
6 """Instructs Chrome to load series of web pages and reports results. | 6 """Instructs Chrome to load series of web pages and reports results. |
7 | 7 |
8 When running Chrome is sandwiched between preprocessed disk caches and | 8 When running Chrome is sandwiched between preprocessed disk caches and |
9 WepPageReplay serving all connections. | 9 WepPageReplay serving all connections. |
10 | 10 |
(...skipping 15 matching lines...) Expand all Loading... | |
26 _SRC_DIR = os.path.abspath(os.path.join( | 26 _SRC_DIR = os.path.abspath(os.path.join( |
27 os.path.dirname(__file__), '..', '..', '..')) | 27 os.path.dirname(__file__), '..', '..', '..')) |
28 | 28 |
29 sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil')) | 29 sys.path.append(os.path.join(_SRC_DIR, 'third_party', 'catapult', 'devil')) |
30 from devil.android import device_utils | 30 from devil.android import device_utils |
31 | 31 |
32 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android')) | 32 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android')) |
33 from pylib import constants | 33 from pylib import constants |
34 import devil_chromium | 34 import devil_chromium |
35 | 35 |
36 import chrome_setup | |
36 import device_setup | 37 import device_setup |
37 import devtools_monitor | 38 import devtools_monitor |
38 import options | 39 import options |
39 import page_track | 40 import page_track |
40 import pull_sandwich_metrics | 41 import pull_sandwich_metrics |
41 import trace_recorder | 42 import trace_recorder |
42 import tracing | 43 import tracing |
43 | 44 |
44 | 45 |
45 _JOB_SEARCH_PATH = 'sandwich_jobs' | 46 _JOB_SEARCH_PATH = 'sandwich_jobs' |
(...skipping 11 matching lines...) Expand all Loading... | |
57 # Name of the chrome package. | 58 # Name of the chrome package. |
58 _CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package | 59 _CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package |
59 | 60 |
60 # An estimate of time to wait for the device to become idle after expensive | 61 # An estimate of time to wait for the device to become idle after expensive |
61 # operations, such as opening the launcher activity. | 62 # operations, such as opening the launcher activity. |
62 _TIME_TO_DEVICE_IDLE_SECONDS = 2 | 63 _TIME_TO_DEVICE_IDLE_SECONDS = 2 |
63 | 64 |
64 # Cache directory's path on the device. | 65 # Cache directory's path on the device. |
65 _REMOTE_CACHE_DIRECTORY = '/data/data/' + _CHROME_PACKAGE + '/cache/Cache' | 66 _REMOTE_CACHE_DIRECTORY = '/data/data/' + _CHROME_PACKAGE + '/cache/Cache' |
66 | 67 |
68 # Devtools timeout of 1 minute to avoid websocket timeout on slow | |
69 # network condition. | |
70 _DEVTOOLS_TIMEOUT = 60 | |
71 | |
67 | 72 |
68 def _ReadUrlsFromJobDescription(job_name): | 73 def _ReadUrlsFromJobDescription(job_name): |
69 """Retrieves the list of URLs associated with the job name.""" | 74 """Retrieves the list of URLs associated with the job name.""" |
70 try: | 75 try: |
71 # Extra sugar: attempt to load from a relative path. | 76 # Extra sugar: attempt to load from a relative path. |
72 json_file_name = os.path.join(os.path.dirname(__file__), _JOB_SEARCH_PATH, | 77 json_file_name = os.path.join(os.path.dirname(__file__), _JOB_SEARCH_PATH, |
73 job_name) | 78 job_name) |
74 with open(json_file_name) as f: | 79 with open(json_file_name) as f: |
75 json_data = json.load(f) | 80 json_data = json.load(f) |
76 except IOError: | 81 except IOError: |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
250 directory_path = os.path.join(output_directories_path, dirname) | 255 directory_path = os.path.join(output_directories_path, dirname) |
251 if not os.path.isdir(directory_path): | 256 if not os.path.isdir(directory_path): |
252 continue | 257 continue |
253 try: | 258 try: |
254 int(dirname) | 259 int(dirname) |
255 except ValueError: | 260 except ValueError: |
256 continue | 261 continue |
257 shutil.rmtree(directory_path) | 262 shutil.rmtree(directory_path) |
258 | 263 |
259 | 264 |
260 def main(): | 265 def _ArgumentParser(): |
261 logging.basicConfig(level=logging.INFO) | 266 """Build a command line argument's parser. |
262 devil_chromium.Initialize() | 267 """ |
263 options.OPTIONS.ParseArgs([]) | |
264 | |
265 parser = argparse.ArgumentParser() | 268 parser = argparse.ArgumentParser() |
266 parser.add_argument('--job', required=True, | 269 parser.add_argument('--job', required=True, |
267 help='JSON file with job description.') | 270 help='JSON file with job description.') |
268 parser.add_argument('--output', required=True, | 271 parser.add_argument('--output', required=True, |
269 help='Name of output directory to create.') | 272 help='Name of output directory to create.') |
270 parser.add_argument('--repeat', default=1, type=int, | 273 parser.add_argument('--repeat', default=1, type=int, |
271 help='How many times to run the job') | 274 help='How many times to run the job') |
272 parser.add_argument('--cache-op', | 275 parser.add_argument('--cache-op', |
273 choices=['clear', 'save', 'push', 'reload'], | 276 choices=['clear', 'save', 'push', 'reload'], |
274 default='clear', | 277 default='clear', |
275 help='Configures cache operation to do before launching ' | 278 help='Configures cache operation to do before launching ' |
276 +'Chrome. (Default is clear).') | 279 +'Chrome. (Default is clear).') |
277 parser.add_argument('--wpr-archive', default=None, type=str, | 280 parser.add_argument('--wpr-archive', default=None, type=str, |
278 help='Web page replay archive to load job\'s urls from.') | 281 help='Web page replay archive to load job\'s urls from.') |
279 parser.add_argument('--wpr-record', default=False, action='store_true', | 282 parser.add_argument('--wpr-record', default=False, action='store_true', |
280 help='Record web page replay archive.') | 283 help='Record web page replay archive.') |
281 parser.add_argument('--disable-wpr-script-injection', default=False, | 284 parser.add_argument('--disable-wpr-script-injection', default=False, |
282 action='store_true', | 285 action='store_true', |
283 help='Disable WPR default script injection such as ' + | 286 help='Disable WPR default script injection such as ' + |
284 'overriding javascript\'s Math.random() and Date() ' + | 287 'overriding javascript\'s Math.random() and Date() ' + |
285 'with deterministic implementations.') | 288 'with deterministic implementations.') |
286 args = parser.parse_args() | 289 parser.add_argument('--network-condition', default=None, |
290 choices=sorted(chrome_setup.NETWORK_CONDITIONS.keys()), | |
291 help='Set a network profile.') | |
292 parser.add_argument('--network-emulator', default='browser', | |
293 choices=['browser', 'wpr'], | |
294 help='Set which component is emulating the network condition.' + | |
295 ' (Default to browser)') | |
296 return parser | |
297 | |
298 | |
299 def main(): | |
300 logging.basicConfig(level=logging.INFO) | |
301 devil_chromium.Initialize() | |
302 options.OPTIONS.ParseArgs([]) | |
mattcary
2016/02/19 15:40:17
As mentioned on other cl, integrate with OPTIONS s
gabadie
2016/02/22 10:05:00
Acknowledged.
| |
303 | |
304 args = _ArgumentParser().parse_args() | |
287 | 305 |
288 if not os.path.isdir(args.output): | 306 if not os.path.isdir(args.output): |
289 try: | 307 try: |
290 os.makedirs(args.output) | 308 os.makedirs(args.output) |
291 except OSError: | 309 except OSError: |
292 logging.error('Cannot create directory for results: %s' % args.output) | 310 logging.error('Cannot create directory for results: %s' % args.output) |
293 raise | 311 raise |
294 else: | 312 else: |
295 _CleanPreviousTraces(args.output) | 313 _CleanPreviousTraces(args.output) |
296 | 314 |
297 run_infos = { | 315 run_infos = { |
298 'cache-op': args.cache_op, | 316 'cache-op': args.cache_op, |
299 'job': args.job, | 317 'job': args.job, |
300 'urls': [] | 318 'urls': [] |
301 } | 319 } |
302 job_urls = _ReadUrlsFromJobDescription(args.job) | 320 job_urls = _ReadUrlsFromJobDescription(args.job) |
303 device = device_utils.DeviceUtils.HealthyDevices()[0] | 321 device = device_utils.DeviceUtils.HealthyDevices()[0] |
304 local_cache_archive_path = os.path.join(args.output, 'cache.zip') | 322 local_cache_archive_path = os.path.join(args.output, 'cache.zip') |
305 local_cache_directory_path = None | 323 local_cache_directory_path = None |
324 wpr_network_condition_name = None | |
325 browser_network_condition_name = None | |
326 if args.network_emulator == 'wpr': | |
327 wpr_network_condition_name = args.network_condition | |
328 elif args.network_emulator == 'browser': | |
329 browser_network_condition_name = args.network_condition | |
330 else: | |
331 assert False | |
306 | 332 |
307 if args.cache_op == 'push': | 333 if args.cache_op == 'push': |
308 assert os.path.isfile(local_cache_archive_path) | 334 assert os.path.isfile(local_cache_archive_path) |
309 local_cache_directory_path = tempfile.mkdtemp(suffix='.cache') | 335 local_cache_directory_path = tempfile.mkdtemp(suffix='.cache') |
310 _UnzipDirectoryContent(local_cache_archive_path, local_cache_directory_path) | 336 _UnzipDirectoryContent(local_cache_archive_path, local_cache_directory_path) |
311 | 337 |
312 with device_setup.WprHost(device, args.wpr_archive, args.wpr_record, | 338 with device_setup.WprHost(device, args.wpr_archive, |
313 args.disable_wpr_script_injection) as additional_flags: | 339 record=args.wpr_record, |
340 network_condition_name=wpr_network_condition_name, | |
341 disable_script_injection=args.disable_wpr_script_injection | |
342 ) as additional_flags: | |
314 def _RunNavigation(url, clear_cache, trace_id): | 343 def _RunNavigation(url, clear_cache, trace_id): |
315 with device_setup.DeviceConnection( | 344 with device_setup.DeviceConnection( |
316 device=device, | 345 device=device, |
mattcary
2016/02/19 15:40:17
This would be the better place to put the timeout
gabadie
2016/02/22 10:05:00
Please see the other comment about this.
| |
317 additional_flags=additional_flags) as connection: | 346 additional_flags=additional_flags) as connection: |
347 additional_metadata = {} | |
348 if browser_network_condition_name: | |
349 additional_metadata = chrome_setup.SetUpEmulationAndReturnMetadata( | |
350 connection=connection, | |
351 emulated_device_name=None, | |
352 emulated_network_name=browser_network_condition_name) | |
318 loading_trace = trace_recorder.MonitorUrl( | 353 loading_trace = trace_recorder.MonitorUrl( |
319 connection, url, | 354 connection, url, |
320 clear_cache=clear_cache, | 355 clear_cache=clear_cache, |
321 categories=pull_sandwich_metrics.CATEGORIES) | 356 categories=pull_sandwich_metrics.CATEGORIES, |
357 timeout=_DEVTOOLS_TIMEOUT) | |
358 loading_trace.metadata.update(additional_metadata) | |
322 if trace_id != None: | 359 if trace_id != None: |
323 loading_trace_path = os.path.join( | 360 loading_trace_path = os.path.join( |
324 args.output, str(trace_id), 'trace.json') | 361 args.output, str(trace_id), 'trace.json') |
325 os.makedirs(os.path.dirname(loading_trace_path)) | 362 os.makedirs(os.path.dirname(loading_trace_path)) |
326 loading_trace.SaveToJsonFile(loading_trace_path) | 363 loading_trace.SaveToJsonFile(loading_trace_path) |
327 | 364 |
328 for _ in xrange(args.repeat): | 365 for _ in xrange(args.repeat): |
329 for url in job_urls: | 366 for url in job_urls: |
330 clear_cache = False | 367 clear_cache = False |
331 if args.cache_op == 'clear': | 368 if args.cache_op == 'clear': |
(...skipping 22 matching lines...) Expand all Loading... | |
354 cache_directory_path = _PullBrowserCache(device) | 391 cache_directory_path = _PullBrowserCache(device) |
355 _ZipDirectoryContent(cache_directory_path, local_cache_archive_path) | 392 _ZipDirectoryContent(cache_directory_path, local_cache_archive_path) |
356 shutil.rmtree(cache_directory_path) | 393 shutil.rmtree(cache_directory_path) |
357 | 394 |
358 with open(os.path.join(args.output, 'run_infos.json'), 'w') as file_output: | 395 with open(os.path.join(args.output, 'run_infos.json'), 'w') as file_output: |
359 json.dump(run_infos, file_output, indent=2) | 396 json.dump(run_infos, file_output, indent=2) |
360 | 397 |
361 | 398 |
362 if __name__ == '__main__': | 399 if __name__ == '__main__': |
363 sys.exit(main()) | 400 sys.exit(main()) |
OLD | NEW |