Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Unified Diff: tools/android/loading/sandwich.py

Issue 1737103002: sandwich: Implements filter-cache sub-command. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@i10
Patch Set: Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: tools/android/loading/sandwich.py
diff --git a/tools/android/loading/sandwich.py b/tools/android/loading/sandwich.py
index a9b3d1ad11f03f829ff50ab06f9c63b33983f368..c7878f038043edcacd7f5a60570e450b4bc94784 100755
--- a/tools/android/loading/sandwich.py
+++ b/tools/android/loading/sandwich.py
@@ -35,9 +35,12 @@ import chrome_cache
import chrome_setup
import device_setup
import devtools_monitor
+import frame_load_lens
+import loading_trace
import options
import page_track
import pull_sandwich_metrics
+import request_dependencies_lens
import trace_recorder
import tracing
import wpr_backend
@@ -203,17 +206,17 @@ class SandwichRunner(object):
connection=connection,
emulated_device_name=None,
emulated_network_name=self._GetEmulatorNetworkCondition('browser'))
- loading_trace = trace_recorder.MonitorUrl(
+ trace = trace_recorder.MonitorUrl(
connection, url,
clear_cache=clear_cache,
categories=pull_sandwich_metrics.CATEGORIES,
timeout=_DEVTOOLS_TIMEOUT)
- loading_trace.metadata.update(additional_metadata)
+ trace.metadata.update(additional_metadata)
if trace_id != None and self.trace_output_directory:
- loading_trace_path = os.path.join(
+ trace_path = os.path.join(
self.trace_output_directory, str(trace_id), 'trace.json')
- os.makedirs(os.path.dirname(loading_trace_path))
- loading_trace.ToJsonFile(loading_trace_path)
+ os.makedirs(os.path.dirname(trace_path))
+ trace.ToJsonFile(trace_path)
def _RunUrl(self, url, trace_id=0):
clear_cache = False
@@ -367,6 +370,21 @@ def _ArgumentParser():
help='Path where to save the metrics\'s '+
'CSV.')
+ # Filter cache subcommand.
+ filter_cache_parser = subparsers.add_parser('filter-cache',
+ help='Cache filtering that keeps only resources discoverable by the HTML'+
+ ' document parser.')
+ filter_cache_parser.add_argument('--cache-archive', type=str, required=True,
+ dest='cache_archive_path',
+ help='Path of the cache archive to filter.')
+ filter_cache_parser.add_argument('--output', type=str, required=True,
+ dest='output_cache_archive_path',
+ help='Path of filtered cache archive.')
+ filter_cache_parser.add_argument('loading_trace_paths', type=str, nargs='+',
pasko 2016/02/26 17:10:06 underscores -> dashes, please
gabadie 2016/03/01 10:40:48 Nope. Because non optional argument. I would have
pasko 2016/03/01 17:55:53 ah, I see, thanks, maybe move the definition of th
+ metavar='LOADING_TRACE',
+ help='A loading trace path to generate the' +
pasko 2016/02/26 17:10:06 please document more details on how they are gener
gabadie 2016/03/01 10:40:48 Done.
pasko 2016/03/01 17:55:53 No, I was not mixing them this time. We can name t
+ ' urls white-list from.')
+
return parser
@@ -442,6 +460,34 @@ def _PullMetricsMain(args):
return 0
+def _FilterCacheMain(args):
+ whitelisted_urls = set()
+ for loading_trace_path in args.loading_trace_paths:
+ logging.info('loading %s' % loading_trace_path)
+ trace = loading_trace.LoadingTrace.FromJsonFile(loading_trace_path)
+ requests_lens = request_dependencies_lens.RequestDependencyLens(trace)
+ deps = requests_lens.GetRequestDependencies()
+
+ main_resource_request = deps[0][0]
+ logging.info('white-listing %s' % main_resource_request.url)
+ whitelisted_urls.add(main_resource_request.url)
+ for (first, second, reason) in deps:
+ # Ignore data protocols.
+ if not second.protocol.startswith('http'):
+ continue
+ if (first.request_id == main_resource_request.request_id and
+ reason == 'parser' and second.url not in whitelisted_urls):
+ logging.info('white-listing %s' % second.url)
+ whitelisted_urls.add(second.url)
+
+ if not os.path.isdir(os.path.dirname(args.output_cache_archive_path)):
+ os.makedirs(os.path.dirname(args.output_cache_archive_path))
+ chrome_cache.ApplyUrlWhitelistToCacheArchive(args.cache_archive_path,
+ whitelisted_urls,
+ args.output_cache_archive_path)
+ return 0
+
+
def main(command_line_args):
logging.basicConfig(level=logging.INFO)
devil_chromium.Initialize()
@@ -462,6 +508,8 @@ def main(command_line_args):
return _RunJobMain(args)
if args.subcommand == 'pull-metrics':
return _PullMetricsMain(args)
+ if args.subcommand == 'filter-cache':
+ return _FilterCacheMain(args)
assert False
« tools/android/loading/chrome_cache.py ('K') | « tools/android/loading/chrome_cache.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698