Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(6)

Side by Side Diff: tools/android/loading/pull_sandwich_metrics.py

Issue 1726403005: sandwich: Makes pull_sandwich_metrics.py a sub-command. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@i09
Patch Set: Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | tools/android/loading/sandwich.py » ('j') | tools/android/loading/sandwich.py » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #! /usr/bin/env python
2 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 3 # found in the LICENSE file.
5 4
6 """Pull a sandwich run's output directory's metrics from traces into a CSV. 5 """Pull a sandwich run's output directory's metrics from traces into a CSV.
7 6
8 python pull_sandwich_metrics.py -h 7 python pull_sandwich_metrics.py -h
9 """ 8 """
10 9
11 import argparse 10 import argparse
Benoit L 2016/02/25 16:02:12 unnecessary import.
gabadie 2016/02/25 16:55:06 My bad. Done.
12 import csv
13 import json 11 import json
14 import logging 12 import logging
15 import os 13 import os
16 import sys 14 import sys
17 15
18 import loading_trace as loading_trace_module 16 import loading_trace as loading_trace_module
19 import tracing 17 import tracing
20 18
21 19
22 CATEGORIES = ['blink.user_timing', 'disabled-by-default-memory-infra'] 20 CATEGORIES = ['blink.user_timing', 'disabled-by-default-memory-infra']
23 21
24 _CSV_FIELD_NAMES = [ 22 CSV_FIELD_NAMES = [
25 'id', 23 'id',
26 'url', 24 'url',
27 'total_load', 25 'total_load',
28 'onload', 26 'onload',
29 'browser_malloc_avg', 27 'browser_malloc_avg',
30 'browser_malloc_max'] 28 'browser_malloc_max']
31 29
32 _TRACKED_EVENT_NAMES = set(['requestStart', 'loadEventStart', 'loadEventEnd']) 30 _TRACKED_EVENT_NAMES = set(['requestStart', 'loadEventStart', 'loadEventEnd'])
33 31
34 32
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
107 return tracked_events 105 return tracked_events
108 106
109 107
110 def _PullMetricsFromLoadingTrace(loading_trace): 108 def _PullMetricsFromLoadingTrace(loading_trace):
111 """Pulls all the metrics from a given trace. 109 """Pulls all the metrics from a given trace.
112 110
113 Args: 111 Args:
114 loading_trace: loading_trace_module.LoadingTrace. 112 loading_trace: loading_trace_module.LoadingTrace.
115 113
116 Returns: 114 Returns:
117 Dictionary with all _CSV_FIELD_NAMES's field set (except the 'id'). 115 Dictionary with all CSV_FIELD_NAMES's field set (except the 'id').
118 """ 116 """
119 browser_dump_events = _GetBrowserDumpEvents(loading_trace.tracing_track) 117 browser_dump_events = _GetBrowserDumpEvents(loading_trace.tracing_track)
120 web_page_tracked_events = _GetWebPageTrackedEvents( 118 web_page_tracked_events = _GetWebPageTrackedEvents(
121 loading_trace.tracing_track) 119 loading_trace.tracing_track)
122 120
123 browser_malloc_sum = 0 121 browser_malloc_sum = 0
124 browser_malloc_max = 0 122 browser_malloc_max = 0
125 for dump_event in browser_dump_events: 123 for dump_event in browser_dump_events:
126 attr = dump_event.args['dumps']['allocators']['malloc']['attrs']['size'] 124 attr = dump_event.args['dumps']['allocators']['malloc']['attrs']['size']
127 assert attr['units'] == 'bytes' 125 assert attr['units'] == 'bytes'
128 size = int(attr['value'], 16) 126 size = int(attr['value'], 16)
129 browser_malloc_sum += size 127 browser_malloc_sum += size
130 browser_malloc_max = max(browser_malloc_max, size) 128 browser_malloc_max = max(browser_malloc_max, size)
131 129
132 return { 130 return {
133 'total_load': (web_page_tracked_events['loadEventEnd'].start_msec - 131 'total_load': (web_page_tracked_events['loadEventEnd'].start_msec -
134 web_page_tracked_events['requestStart'].start_msec), 132 web_page_tracked_events['requestStart'].start_msec),
135 'onload': (web_page_tracked_events['loadEventEnd'].start_msec - 133 'onload': (web_page_tracked_events['loadEventEnd'].start_msec -
136 web_page_tracked_events['loadEventStart'].start_msec), 134 web_page_tracked_events['loadEventStart'].start_msec),
137 'browser_malloc_avg': browser_malloc_sum / float(len(browser_dump_events)), 135 'browser_malloc_avg': browser_malloc_sum / float(len(browser_dump_events)),
138 'browser_malloc_max': browser_malloc_max 136 'browser_malloc_max': browser_malloc_max
139 } 137 }
140 138
141 139
142 def _PullMetricsFromOutputDirectory(output_directory_path): 140 def PullMetricsFromOutputDirectory(output_directory_path):
143 """Pulls all the metrics from all the traces of a sandwich run directory. 141 """Pulls all the metrics from all the traces of a sandwich run directory.
144 142
145 Args: 143 Args:
146 output_directory_path: The sandwich run's output directory to pull the 144 output_directory_path: The sandwich run's output directory to pull the
147 metrics from. 145 metrics from.
148 146
149 Returns: 147 Returns:
150 List of dictionaries with all _CSV_FIELD_NAMES's field set. 148 List of dictionaries with all CSV_FIELD_NAMES's field set.
151 """ 149 """
152 assert os.path.isdir(output_directory_path) 150 assert os.path.isdir(output_directory_path)
153 run_infos = None 151 run_infos = None
154 with open(os.path.join(output_directory_path, 'run_infos.json')) as f: 152 with open(os.path.join(output_directory_path, 'run_infos.json')) as f:
155 run_infos = json.load(f) 153 run_infos = json.load(f)
156 assert run_infos 154 assert run_infos
157 metrics = [] 155 metrics = []
158 for node_name in os.listdir(output_directory_path): 156 for node_name in os.listdir(output_directory_path):
159 if not os.path.isdir(os.path.join(output_directory_path, node_name)): 157 if not os.path.isdir(os.path.join(output_directory_path, node_name)):
160 continue 158 continue
161 try: 159 try:
162 page_id = int(node_name) 160 page_id = int(node_name)
163 except ValueError: 161 except ValueError:
164 continue 162 continue
165 trace_path = os.path.join(output_directory_path, node_name, 'trace.json') 163 trace_path = os.path.join(output_directory_path, node_name, 'trace.json')
166 if not os.path.isfile(trace_path): 164 if not os.path.isfile(trace_path):
167 continue 165 continue
168 logging.info('processing \'%s\'' % trace_path) 166 logging.info('processing \'%s\'' % trace_path)
169 loading_trace = loading_trace_module.LoadingTrace.FromJsonFile(trace_path) 167 loading_trace = loading_trace_module.LoadingTrace.FromJsonFile(trace_path)
170 trace_metrics = _PullMetricsFromLoadingTrace(loading_trace) 168 trace_metrics = _PullMetricsFromLoadingTrace(loading_trace)
171 trace_metrics['id'] = page_id 169 trace_metrics['id'] = page_id
172 trace_metrics['url'] = run_infos['urls'][page_id] 170 trace_metrics['url'] = run_infos['urls'][page_id]
173 metrics.append(trace_metrics) 171 metrics.append(trace_metrics)
174 assert len(metrics) > 0, ('Looks like \'{}\' was not a sandwich ' + 172 assert len(metrics) > 0, ('Looks like \'{}\' was not a sandwich ' +
175 'run directory.').format(output_directory_path) 173 'run directory.').format(output_directory_path)
176 return metrics 174 return metrics
177
178
179 def main():
180 logging.basicConfig(level=logging.INFO)
181
182 parser = argparse.ArgumentParser()
183 parser.add_argument('output', type=str,
184 help='Output directory of run_sandwich.py command.')
185 args = parser.parse_args()
186
187 trace_metrics_list = _PullMetricsFromOutputDirectory(args.output)
188 trace_metrics_list.sort(key=lambda e: e['id'])
189 cs_file_path = os.path.join(args.output, 'trace_analysis.csv')
190 with open(cs_file_path, 'w') as csv_file:
191 writer = csv.DictWriter(csv_file, fieldnames=_CSV_FIELD_NAMES)
192 writer.writeheader()
193 for trace_metrics in trace_metrics_list:
194 writer.writerow(trace_metrics)
195 return 0
196
197
198 if __name__ == '__main__':
199 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | tools/android/loading/sandwich.py » ('j') | tools/android/loading/sandwich.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698