Chromium Code Reviews| Index: tools/android/loading/sandwich.py |
| diff --git a/tools/android/loading/sandwich.py b/tools/android/loading/sandwich.py |
| index a76c82b5b8e44c96dd57d47f4097b980a371ce0c..6ba06a5d9f1692c19dec7272fe6581a9fde01618 100755 |
| --- a/tools/android/loading/sandwich.py |
| +++ b/tools/android/loading/sandwich.py |
| @@ -35,9 +35,12 @@ import chrome_cache |
| import chrome_setup |
| import device_setup |
| import devtools_monitor |
| +import frame_load_lens |
| +import loading_trace |
| import options |
| import page_track |
| import pull_sandwich_metrics |
| +import request_dependencies_lens |
| import trace_recorder |
| import tracing |
| import wpr_backend |
| @@ -206,17 +209,17 @@ class SandwichRunner(object): |
| connection=connection, |
| emulated_device_name=None, |
| emulated_network_name=self._GetEmulatorNetworkCondition('browser')) |
| - loading_trace = trace_recorder.MonitorUrl( |
| + trace = trace_recorder.MonitorUrl( |
| connection, url, |
| clear_cache=clear_cache, |
| categories=pull_sandwich_metrics.CATEGORIES, |
| timeout=_DEVTOOLS_TIMEOUT) |
| - loading_trace.metadata.update(additional_metadata) |
| + trace.metadata.update(additional_metadata) |
| if trace_id != None and self.trace_output_directory: |
| - loading_trace_path = os.path.join( |
| + trace_path = os.path.join( |
| self.trace_output_directory, str(trace_id), 'trace.json') |
| - os.makedirs(os.path.dirname(loading_trace_path)) |
| - loading_trace.ToJsonFile(loading_trace_path) |
| + os.makedirs(os.path.dirname(trace_path)) |
| + trace.ToJsonFile(trace_path) |
| def _RunUrl(self, url, trace_id=0): |
| clear_cache = False |
| @@ -369,6 +372,22 @@ def _ArgumentParser(): |
| help='Path where to save the metrics\'s '+ |
| 'CSV.') |
| + # Filter cache subcommand. |
| + filter_cache_parser = subparsers.add_parser('filter-cache', |
| + help='Cache filtering that keeps only resources discoverable by the HTML'+ |
| + ' document parser.') |
| + filter_cache_parser.add_argument('--cache-archive', type=str, required=True, |
| + dest='cache_archive_path', |
| + help='Path of the cache archive to filter.') |
| + filter_cache_parser.add_argument('--output', type=str, required=True, |
| + dest='output_cache_archive_path', |
| + help='Path of filtered cache archive.') |
| + filter_cache_parser.add_argument('loading_trace_paths', type=str, nargs='+', |
|
pasko
2016/03/01 16:01:11
nit: loading-trace-paths for consistency
gabadie
2016/03/01 16:40:00
This change would be point less because this is a
pasko
2016/03/01 17:55:53
Yes, we discussed it in another review. Good to go
|
| + metavar='LOADING_TRACE', |
| + help='A loading trace path generated by a sandwich run for a given url.' + |
|
pasko
2016/03/01 16:01:11
I would say: "A list of extended traces generated
gabadie
2016/03/01 16:40:00
Done.
|
| + ' This is used to have a resource dependency graph to white-list ' + |
| + 'the ones discoverable by the HTML pre-scanner for that given url.') |
| + |
| return parser |
| @@ -445,6 +464,34 @@ def _ExtractMetricsMain(args): |
| return 0 |
| +def _FilterCacheMain(args): |
| + whitelisted_urls = set() |
| + for loading_trace_path in args.loading_trace_paths: |
| + logging.info('loading %s' % loading_trace_path) |
| + trace = loading_trace.LoadingTrace.FromJsonFile(loading_trace_path) |
| + requests_lens = request_dependencies_lens.RequestDependencyLens(trace) |
| + deps = requests_lens.GetRequestDependencies() |
| + |
| + main_resource_request = deps[0][0] |
| + logging.info('white-listing %s' % main_resource_request.url) |
| + whitelisted_urls.add(main_resource_request.url) |
| + for (first, second, reason) in deps: |
| + # Ignore data protocols. |
| + if not second.protocol.startswith('http'): |
| + continue |
| + if (first.request_id == main_resource_request.request_id and |
| + reason == 'parser' and second.url not in whitelisted_urls): |
| + logging.info('white-listing %s' % second.url) |
| + whitelisted_urls.add(second.url) |
| + |
| + if not os.path.isdir(os.path.dirname(args.output_cache_archive_path)): |
| + os.makedirs(os.path.dirname(args.output_cache_archive_path)) |
| + chrome_cache.ApplyUrlWhitelistToCacheArchive(args.cache_archive_path, |
| + whitelisted_urls, |
| + args.output_cache_archive_path) |
| + return 0 |
| + |
| + |
| def main(command_line_args): |
| logging.basicConfig(level=logging.INFO) |
| devil_chromium.Initialize() |
| @@ -465,6 +512,8 @@ def main(command_line_args): |
| return _RunJobMain(args) |
| if args.subcommand == 'extract-metrics': |
| return _ExtractMetricsMain(args) |
| + if args.subcommand == 'filter-cache': |
| + return _FilterCacheMain(args) |
| assert False |