| OLD | NEW |
| 1 #! /usr/bin/env python | 1 #! /usr/bin/env python |
| 2 # Copyright 2016 The Chromium Authors. All rights reserved. | 2 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Instructs Chrome to load series of web pages and reports results. | 6 """Instructs Chrome to load series of web pages and reports results. |
| 7 | 7 |
| 8 When running Chrome is sandwiched between preprocessed disk caches and | 8 When running Chrome is sandwiched between preprocessed disk caches and |
| 9 WepPageReplay serving all connections. | 9 WepPageReplay serving all connections. |
| 10 | 10 |
| (...skipping 17 matching lines...) Expand all Loading... |
| 28 from devil.android import device_utils | 28 from devil.android import device_utils |
| 29 | 29 |
| 30 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android')) | 30 sys.path.append(os.path.join(_SRC_DIR, 'build', 'android')) |
| 31 from pylib import constants | 31 from pylib import constants |
| 32 import devil_chromium | 32 import devil_chromium |
| 33 | 33 |
| 34 import chrome_cache | 34 import chrome_cache |
| 35 import chrome_setup | 35 import chrome_setup |
| 36 import device_setup | 36 import device_setup |
| 37 import devtools_monitor | 37 import devtools_monitor |
| 38 import frame_load_lens |
| 39 import loading_trace |
| 38 import options | 40 import options |
| 39 import page_track | 41 import page_track |
| 40 import pull_sandwich_metrics | 42 import pull_sandwich_metrics |
| 43 import request_dependencies_lens |
| 41 import trace_recorder | 44 import trace_recorder |
| 42 import tracing | 45 import tracing |
| 43 import wpr_backend | 46 import wpr_backend |
| 44 | 47 |
| 45 | 48 |
| 46 # Use options layer to access constants. | 49 # Use options layer to access constants. |
| 47 OPTIONS = options.OPTIONS | 50 OPTIONS = options.OPTIONS |
| 48 | 51 |
| 49 _JOB_SEARCH_PATH = 'sandwich_jobs' | 52 _JOB_SEARCH_PATH = 'sandwich_jobs' |
| 50 | 53 |
| (...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 199 def _RunNavigation(self, url, clear_cache, trace_id=None): | 202 def _RunNavigation(self, url, clear_cache, trace_id=None): |
| 200 with device_setup.DeviceConnection( | 203 with device_setup.DeviceConnection( |
| 201 device=self._device, | 204 device=self._device, |
| 202 additional_flags=self._chrome_additional_flags) as connection: | 205 additional_flags=self._chrome_additional_flags) as connection: |
| 203 additional_metadata = {} | 206 additional_metadata = {} |
| 204 if self._GetEmulatorNetworkCondition('browser'): | 207 if self._GetEmulatorNetworkCondition('browser'): |
| 205 additional_metadata = chrome_setup.SetUpEmulationAndReturnMetadata( | 208 additional_metadata = chrome_setup.SetUpEmulationAndReturnMetadata( |
| 206 connection=connection, | 209 connection=connection, |
| 207 emulated_device_name=None, | 210 emulated_device_name=None, |
| 208 emulated_network_name=self._GetEmulatorNetworkCondition('browser')) | 211 emulated_network_name=self._GetEmulatorNetworkCondition('browser')) |
| 209 loading_trace = trace_recorder.MonitorUrl( | 212 trace = trace_recorder.MonitorUrl( |
| 210 connection, url, | 213 connection, url, |
| 211 clear_cache=clear_cache, | 214 clear_cache=clear_cache, |
| 212 categories=pull_sandwich_metrics.CATEGORIES, | 215 categories=pull_sandwich_metrics.CATEGORIES, |
| 213 timeout=_DEVTOOLS_TIMEOUT) | 216 timeout=_DEVTOOLS_TIMEOUT) |
| 214 loading_trace.metadata.update(additional_metadata) | 217 trace.metadata.update(additional_metadata) |
| 215 if trace_id != None and self.trace_output_directory: | 218 if trace_id != None and self.trace_output_directory: |
| 216 loading_trace_path = os.path.join( | 219 trace_path = os.path.join( |
| 217 self.trace_output_directory, str(trace_id), 'trace.json') | 220 self.trace_output_directory, str(trace_id), 'trace.json') |
| 218 os.makedirs(os.path.dirname(loading_trace_path)) | 221 os.makedirs(os.path.dirname(trace_path)) |
| 219 loading_trace.ToJsonFile(loading_trace_path) | 222 trace.ToJsonFile(trace_path) |
| 220 | 223 |
| 221 def _RunUrl(self, url, trace_id=0): | 224 def _RunUrl(self, url, trace_id=0): |
| 222 clear_cache = False | 225 clear_cache = False |
| 223 if self.cache_operation == 'clear': | 226 if self.cache_operation == 'clear': |
| 224 clear_cache = True | 227 clear_cache = True |
| 225 elif self.cache_operation == 'push': | 228 elif self.cache_operation == 'push': |
| 226 self._device.KillAll(OPTIONS.chrome_package_name, quiet=True) | 229 self._device.KillAll(OPTIONS.chrome_package_name, quiet=True) |
| 227 chrome_cache.PushBrowserCache(self._device, | 230 chrome_cache.PushBrowserCache(self._device, |
| 228 self._local_cache_directory_path) | 231 self._local_cache_directory_path) |
| 229 elif self.cache_operation == 'reload': | 232 elif self.cache_operation == 'reload': |
| (...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 362 create_cache_parser = subparsers.add_parser('extract-metrics', | 365 create_cache_parser = subparsers.add_parser('extract-metrics', |
| 363 help='Extracts metrics from a loading trace and saves as CSV.') | 366 help='Extracts metrics from a loading trace and saves as CSV.') |
| 364 create_cache_parser.add_argument('--trace-directory', required=True, | 367 create_cache_parser.add_argument('--trace-directory', required=True, |
| 365 dest='trace_output_directory', type=str, | 368 dest='trace_output_directory', type=str, |
| 366 help='Path of loading traces directory.') | 369 help='Path of loading traces directory.') |
| 367 create_cache_parser.add_argument('--out-metrics', default=None, type=str, | 370 create_cache_parser.add_argument('--out-metrics', default=None, type=str, |
| 368 dest='metrics_csv_path', | 371 dest='metrics_csv_path', |
| 369 help='Path where to save the metrics\'s '+ | 372 help='Path where to save the metrics\'s '+ |
| 370 'CSV.') | 373 'CSV.') |
| 371 | 374 |
| 375 # Filter cache subcommand. |
| 376 filter_cache_parser = subparsers.add_parser('filter-cache', |
| 377 help='Cache filtering that keeps only resources discoverable by the HTML'+ |
| 378 ' document parser.') |
| 379 filter_cache_parser.add_argument('--cache-archive', type=str, required=True, |
| 380 dest='cache_archive_path', |
| 381 help='Path of the cache archive to filter.') |
| 382 filter_cache_parser.add_argument('--output', type=str, required=True, |
| 383 dest='output_cache_archive_path', |
| 384 help='Path of filtered cache archive.') |
| 385 filter_cache_parser.add_argument('loading_trace_paths', type=str, nargs='+', |
| 386 metavar='LOADING_TRACE', |
| 387 help='A list of loading traces generated by a sandwich run for a given' + |
| 388 ' url. This is used to have a resource dependency graph to white-' + |
| 389 'list the ones discoverable by the HTML pre-scanner for that given ' + |
| 390 'url.') |
| 391 |
| 372 return parser | 392 return parser |
| 373 | 393 |
| 374 | 394 |
| 375 def _RecordWprMain(args): | 395 def _RecordWprMain(args): |
| 376 sandwich_runner = SandwichRunner(args.job) | 396 sandwich_runner = SandwichRunner(args.job) |
| 377 sandwich_runner.PullConfigFromArgs(args) | 397 sandwich_runner.PullConfigFromArgs(args) |
| 378 sandwich_runner.wpr_record = True | 398 sandwich_runner.wpr_record = True |
| 379 sandwich_runner.PrintConfig() | 399 sandwich_runner.PrintConfig() |
| 380 if not os.path.isdir(os.path.dirname(args.wpr_archive_path)): | 400 if not os.path.isdir(os.path.dirname(args.wpr_archive_path)): |
| 381 os.makedirs(os.path.dirname(args.wpr_archive_path)) | 401 os.makedirs(os.path.dirname(args.wpr_archive_path)) |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 438 trace_metrics_list.sort(key=lambda e: e['id']) | 458 trace_metrics_list.sort(key=lambda e: e['id']) |
| 439 with open(args.metrics_csv_path, 'w') as csv_file: | 459 with open(args.metrics_csv_path, 'w') as csv_file: |
| 440 writer = csv.DictWriter(csv_file, | 460 writer = csv.DictWriter(csv_file, |
| 441 fieldnames=pull_sandwich_metrics.CSV_FIELD_NAMES) | 461 fieldnames=pull_sandwich_metrics.CSV_FIELD_NAMES) |
| 442 writer.writeheader() | 462 writer.writeheader() |
| 443 for trace_metrics in trace_metrics_list: | 463 for trace_metrics in trace_metrics_list: |
| 444 writer.writerow(trace_metrics) | 464 writer.writerow(trace_metrics) |
| 445 return 0 | 465 return 0 |
| 446 | 466 |
| 447 | 467 |
| 468 def _FilterCacheMain(args): |
| 469 whitelisted_urls = set() |
| 470 for loading_trace_path in args.loading_trace_paths: |
| 471 logging.info('loading %s' % loading_trace_path) |
| 472 trace = loading_trace.LoadingTrace.FromJsonFile(loading_trace_path) |
| 473 requests_lens = request_dependencies_lens.RequestDependencyLens(trace) |
| 474 deps = requests_lens.GetRequestDependencies() |
| 475 |
| 476 main_resource_request = deps[0][0] |
| 477 logging.info('white-listing %s' % main_resource_request.url) |
| 478 whitelisted_urls.add(main_resource_request.url) |
| 479 for (first, second, reason) in deps: |
| 480 # Ignore data protocols. |
| 481 if not second.protocol.startswith('http'): |
| 482 continue |
| 483 if (first.request_id == main_resource_request.request_id and |
| 484 reason == 'parser' and second.url not in whitelisted_urls): |
| 485 logging.info('white-listing %s' % second.url) |
| 486 whitelisted_urls.add(second.url) |
| 487 |
| 488 if not os.path.isdir(os.path.dirname(args.output_cache_archive_path)): |
| 489 os.makedirs(os.path.dirname(args.output_cache_archive_path)) |
| 490 chrome_cache.ApplyUrlWhitelistToCacheArchive(args.cache_archive_path, |
| 491 whitelisted_urls, |
| 492 args.output_cache_archive_path) |
| 493 return 0 |
| 494 |
| 495 |
| 448 def main(command_line_args): | 496 def main(command_line_args): |
| 449 logging.basicConfig(level=logging.INFO) | 497 logging.basicConfig(level=logging.INFO) |
| 450 devil_chromium.Initialize() | 498 devil_chromium.Initialize() |
| 451 | 499 |
| 452 # Don't give the argument yet. All we are interested in for now is accessing | 500 # Don't give the argument yet. All we are interested in for now is accessing |
| 453 # the default values of OPTIONS. | 501 # the default values of OPTIONS. |
| 454 OPTIONS.ParseArgs([]) | 502 OPTIONS.ParseArgs([]) |
| 455 | 503 |
| 456 args = _ArgumentParser().parse_args(command_line_args) | 504 args = _ArgumentParser().parse_args(command_line_args) |
| 457 | 505 |
| 458 if args.subcommand == 'record-wpr': | 506 if args.subcommand == 'record-wpr': |
| 459 return _RecordWprMain(args) | 507 return _RecordWprMain(args) |
| 460 if args.subcommand == 'patch-wpr': | 508 if args.subcommand == 'patch-wpr': |
| 461 return _PatchWprMain(args) | 509 return _PatchWprMain(args) |
| 462 if args.subcommand == 'create-cache': | 510 if args.subcommand == 'create-cache': |
| 463 return _CreateCacheMain(args) | 511 return _CreateCacheMain(args) |
| 464 if args.subcommand == 'run': | 512 if args.subcommand == 'run': |
| 465 return _RunJobMain(args) | 513 return _RunJobMain(args) |
| 466 if args.subcommand == 'extract-metrics': | 514 if args.subcommand == 'extract-metrics': |
| 467 return _ExtractMetricsMain(args) | 515 return _ExtractMetricsMain(args) |
| 516 if args.subcommand == 'filter-cache': |
| 517 return _FilterCacheMain(args) |
| 468 assert False | 518 assert False |
| 469 | 519 |
| 470 | 520 |
| 471 if __name__ == '__main__': | 521 if __name__ == '__main__': |
| 472 sys.exit(main(sys.argv[1:])) | 522 sys.exit(main(sys.argv[1:])) |
| OLD | NEW |