Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(368)

Side by Side Diff: tools/callstats.py

Issue 2040823003: [tools] Update callstats.[py|html] (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/callstats.html ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2016 the V8 project authors. All rights reserved. 2 # Copyright 2016 the V8 project authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 ''' 5 '''
6 Usage: runtime-call-stats.py [-h] <command> ... 6 Usage: runtime-call-stats.py [-h] <command> ...
7 7
8 Optional arguments: 8 Optional arguments:
9 -h, --help show this help message and exit 9 -h, --help show this help message and exit
10 10
11 Commands: 11 Commands:
12 run run chrome with --runtime-call-stats and generate logs 12 run run chrome with --runtime-call-stats and generate logs
13 stats process logs and print statistics 13 stats process logs and print statistics
14 json process logs from several versions and generate JSON 14 json process logs from several versions and generate JSON
15 help help information 15 help help information
16 16
17 For each command, you can try ./runtime-call-stats.py help command. 17 For each command, you can try ./runtime-call-stats.py help command.
18 ''' 18 '''
19 19
20 import argparse 20 import argparse
21 import json 21 import json
22 import os 22 import os
23 import re 23 import re
24 import shutil 24 import shutil
25 import subprocess 25 import subprocess
26 import sys 26 import sys
27 import tempfile 27 import tempfile
28 import operator
28 29
29 import numpy 30 import numpy
30 import scipy 31 import scipy
31 import scipy.stats 32 import scipy.stats
32 from math import sqrt 33 from math import sqrt
33 34
34 35
35 # Run benchmarks. 36 # Run benchmarks.
36 37
37 def print_command(cmd_args): 38 def print_command(cmd_args):
38 def fix_for_printing(arg): 39 def fix_for_printing(arg):
39 m = re.match(r'^--([^=]+)=(.*)$', arg) 40 m = re.match(r'^--([^=]+)=(.*)$', arg)
40 if m and (' ' in m.group(2) or m.group(2).startswith('-')): 41 if m and (' ' in m.group(2) or m.group(2).startswith('-')):
41 arg = "--{}='{}'".format(m.group(1), m.group(2)) 42 arg = "--{}='{}'".format(m.group(1), m.group(2))
42 elif ' ' in arg: 43 elif ' ' in arg:
43 arg = "'{}'".format(arg) 44 arg = "'{}'".format(arg)
44 return arg 45 return arg
45 print " ".join(map(fix_for_printing, cmd_args)) 46 print " ".join(map(fix_for_printing, cmd_args))
46 47
47 48
48 def start_replay_server(args, sites): 49 def start_replay_server(args, sites):
49 with tempfile.NamedTemporaryFile(prefix='callstats-inject-', suffix='.js', 50 with tempfile.NamedTemporaryFile(prefix='callstats-inject-', suffix='.js',
50 mode='wt', delete=False) as f: 51 mode='wt', delete=False) as f:
51 injection = f.name 52 injection = f.name
52 generate_injection(f, sites, args.refresh) 53 generate_injection(f, sites, args.refresh)
54 http_port = 4080 + args.port_offset
55 https_port = 4443 + args.port_offset
53 cmd_args = [ 56 cmd_args = [
54 args.replay_bin, 57 args.replay_bin,
55 "--port=4080", 58 "--port=%s" % http_port,
56 "--ssl_port=4443", 59 "--ssl_port=%s" % https_port,
57 "--no-dns_forwarding", 60 "--no-dns_forwarding",
58 "--use_closest_match", 61 "--use_closest_match",
59 "--no-diff_unknown_requests", 62 "--no-diff_unknown_requests",
60 "--inject_scripts=deterministic.js,{}".format(injection), 63 "--inject_scripts=deterministic.js,{}".format(injection),
61 args.replay_wpr, 64 args.replay_wpr,
62 ] 65 ]
63 print "=" * 80 66 print "=" * 80
64 print_command(cmd_args) 67 print_command(cmd_args)
65 with open(os.devnull, 'w') as null: 68 with open(os.devnull, 'w') as null:
66 server = subprocess.Popen(cmd_args, stdout=null, stderr=null) 69 server = subprocess.Popen(cmd_args, stdout=null, stderr=null)
67 print "RUNNING REPLAY SERVER: %s with PID=%s" % (args.replay_bin, server.pid) 70 print "RUNNING REPLAY SERVER: %s with PID=%s" % (args.replay_bin, server.pid)
68 print "=" * 80 71 print "=" * 80
69 return {'process': server, 'injection': injection} 72 return {'process': server, 'injection': injection}
70 73
71 74
72 def stop_replay_server(server): 75 def stop_replay_server(server):
73 print("SHUTTING DOWN REPLAY SERVER %s" % server['process'].pid) 76 print("SHUTTING DOWN REPLAY SERVER %s" % server['process'].pid)
74 server['process'].terminate() 77 server['process'].terminate()
75 os.remove(server['injection']) 78 os.remove(server['injection'])
76 79
77 80
78 def generate_injection(f, sites, refreshes=0): 81 def generate_injection(f, sites, refreshes=0):
79 print >> f, """\ 82 print >> f, """\
80 (function() { 83 (function() {
81 let s = window.sessionStorage.getItem("refreshCounter"); 84 var s = window.sessionStorage.getItem("refreshCounter");
82 let refreshTotal = """, refreshes, """; 85 var refreshTotal = """, refreshes, """;
83 let refreshCounter = s ? parseInt(s) : refreshTotal; 86 var refreshCounter = s ? parseInt(s) : refreshTotal;
84 let refreshId = refreshTotal - refreshCounter; 87 var refreshId = refreshTotal - refreshCounter;
85 if (refreshCounter > 0) { 88 if (refreshCounter > 0) {
86 window.sessionStorage.setItem("refreshCounter", refreshCounter-1); 89 window.sessionStorage.setItem("refreshCounter", refreshCounter-1);
87 } 90 }
88
89 function match(url, item) { 91 function match(url, item) {
90 if ('regexp' in item) return url.match(item.regexp) !== null; 92 if ('regexp' in item) { return url.match(item.regexp) !== null };
91 let url_wanted = item.url; 93 var url_wanted = item.url;
92 // Allow automatic redirections from http to https. 94 /* Allow automatic redirections from http to https. */
93 if (url_wanted.startsWith("http://") && url.startsWith("https://")) { 95 if (url_wanted.startsWith("http://") && url.startsWith("https://")) {
94 url_wanted = "https://" + url_wanted.substr(7); 96 url_wanted = "https://" + url_wanted.substr(7);
95 } 97 }
96 return url.startsWith(url_wanted); 98 return url.startsWith(url_wanted);
97 }; 99 };
98 100 function onLoad(url) {
99 function onLoad(e) { 101 for (var item of sites) {
100 let url = e.target.URL;
101 for (let item of sites) {
102 if (!match(url, item)) continue; 102 if (!match(url, item)) continue;
103 let timeout = 'timeline' in item ? 2500 * item.timeline 103 var timeout = 'timeline' in item ? 2000 * item.timeline
104 : 'timeout' in item ? 1000 * (item.timeout - 3) 104 : 'timeout' in item ? 1000 * (item.timeout - 3)
105 : 10000; 105 : 10000;
106 console.log("Setting time out of " + timeout + " for: " + url); 106 console.log("Setting time out of " + timeout + " for: " + url);
107 window.setTimeout(function() { 107 window.setTimeout(function() {
108 console.log("Time is out for: " + url); 108 console.log("Time is out for: " + url);
109 let msg = "STATS: (" + refreshId + ") " + url; 109 var msg = "STATS: (" + refreshId + ") " + url;
110 %GetAndResetRuntimeCallStats(1, msg); 110 %GetAndResetRuntimeCallStats(1, msg);
111 if (refreshCounter > 0) { 111 if (refreshCounter > 0) {
112 console.log("Refresh counter is " + refreshCounter + ", refreshing: " + url); 112 console.log(
113 "Refresh counter is " + refreshCounter + ", refreshing: " + url);
113 window.location.reload(); 114 window.location.reload();
114 } 115 }
115 }, timeout); 116 }, timeout);
116 return; 117 return;
117 } 118 }
118 console.log("Ignoring: " + url); 119 console.log("Ignoring: " + url);
119 }; 120 };
120 121 var sites =
121 let sites =
122 """, json.dumps(sites), """; 122 """, json.dumps(sites), """;
123 123 onLoad(window.location.href);
124 console.log("Event listenner added for: " + window.location.href);
125 window.addEventListener("load", onLoad);
126 })();""" 124 })();"""
127 125
128 126
129 def run_site(site, domain, args, timeout=None): 127 def run_site(site, domain, args, timeout=None):
130 print "="*80 128 print "="*80
131 print "RUNNING DOMAIN %s" % domain 129 print "RUNNING DOMAIN %s" % domain
132 print "="*80 130 print "="*80
133 result_template = "{domain}#{count}.txt" if args.repeat else "{domain}.txt" 131 result_template = "{domain}#{count}.txt" if args.repeat else "{domain}.txt"
134 count = 0 132 count = 0
135 if timeout is None: timeout = args.timeout 133 if timeout is None: timeout = args.timeout
136 if args.replay_wpr: 134 if args.replay_wpr:
137 timeout *= 1 + args.refresh 135 timeout *= 1 + args.refresh
138 timeout += 1 136 timeout += 1
137 retries_since_good_run = 0
139 while count == 0 or args.repeat is not None and count < args.repeat: 138 while count == 0 or args.repeat is not None and count < args.repeat:
140 count += 1 139 count += 1
141 result = result_template.format(domain=domain, count=count) 140 result = result_template.format(domain=domain, count=count)
142 retries = 0 141 retries = 0
143 while args.retries is None or retries < args.retries: 142 while args.retries is None or retries < args.retries:
144 retries += 1 143 retries += 1
145 try: 144 try:
146 if args.user_data_dir: 145 if args.user_data_dir:
147 user_data_dir = args.user_data_dir 146 user_data_dir = args.user_data_dir
148 else: 147 else:
149 user_data_dir = tempfile.mkdtemp(prefix="chr_") 148 user_data_dir = tempfile.mkdtemp(prefix="chr_")
150 js_flags = "--runtime-call-stats" 149 js_flags = "--runtime-call-stats"
151 if args.replay_wpr: js_flags += " --allow-natives-syntax" 150 if args.replay_wpr: js_flags += " --allow-natives-syntax"
152 if args.js_flags: js_flags += " " + args.js_flags 151 if args.js_flags: js_flags += " " + args.js_flags
153 chrome_flags = [ 152 chrome_flags = [
154 "--no-default-browser-check", 153 "--no-default-browser-check",
154 "--no-sandbox",
155 "--disable-translate", 155 "--disable-translate",
156 "--js-flags={}".format(js_flags), 156 "--js-flags={}".format(js_flags),
157 "--no-first-run", 157 "--no-first-run",
158 "--user-data-dir={}".format(user_data_dir), 158 "--user-data-dir={}".format(user_data_dir),
159 ] 159 ]
160 if args.replay_wpr: 160 if args.replay_wpr:
161 http_port = 4080 + args.port_offset
162 https_port = 4443 + args.port_offset
161 chrome_flags += [ 163 chrome_flags += [
162 "--host-resolver-rules=MAP *:80 localhost:4080, " \ 164 "--host-resolver-rules=MAP *:80 localhost:%s, " \
163 "MAP *:443 localhost:4443, " \ 165 "MAP *:443 localhost:%s, " \
164 "EXCLUDE localhost", 166 "EXCLUDE localhost" % (
167 http_port, https_port),
165 "--ignore-certificate-errors", 168 "--ignore-certificate-errors",
166 "--disable-seccomp-sandbox", 169 "--disable-seccomp-sandbox",
167 "--disable-web-security", 170 "--disable-web-security",
168 "--reduce-security-for-testing", 171 "--reduce-security-for-testing",
169 "--allow-insecure-localhost", 172 "--allow-insecure-localhost",
170 ] 173 ]
171 else: 174 else:
172 chrome_flags += [ 175 chrome_flags += [
173 "--single-process", 176 "--single-process",
174 ] 177 ]
175 if args.chrome_flags: 178 if args.chrome_flags:
176 chrome_flags += args.chrome_flags.split() 179 chrome_flags += args.chrome_flags.split()
177 cmd_args = [ 180 cmd_args = [
178 "timeout", str(timeout), 181 "timeout", str(timeout),
179 args.with_chrome 182 args.with_chrome
180 ] + chrome_flags + [ site ] 183 ] + chrome_flags + [ site ]
181 print "- " * 40 184 print "- " * 40
182 print_command(cmd_args) 185 print_command(cmd_args)
183 print "- " * 40 186 print "- " * 40
184 with open(result, "wt") as f: 187 with open(result, "wt") as f:
185 status = subprocess.call(cmd_args, stdout=f) 188 with open(args.log_stderr or os.devnull, 'at') as err:
189 status = subprocess.call(cmd_args, stdout=f, stderr=err)
186 # 124 means timeout killed chrome, 0 means the user was bored first! 190 # 124 means timeout killed chrome, 0 means the user was bored first!
187 # If none of these two happened, then chrome apparently crashed, so 191 # If none of these two happened, then chrome apparently crashed, so
188 # it must be called again. 192 # it must be called again.
189 if status != 124 and status != 0: 193 if status != 124 and status != 0:
190 print("CHROME CRASHED, REPEATING RUN"); 194 print("CHROME CRASHED, REPEATING RUN");
191 continue 195 continue
192 # If the stats file is empty, chrome must be called again. 196 # If the stats file is empty, chrome must be called again.
193 if os.path.isfile(result) and os.path.getsize(result) > 0: 197 if os.path.isfile(result) and os.path.getsize(result) > 0:
194 if args.print_url: 198 if args.print_url:
195 with open(result, "at") as f: 199 with open(result, "at") as f:
196 print >> f 200 print >> f
197 print >> f, "URL: {}".format(site) 201 print >> f, "URL: {}".format(site)
202 retries_since_good_run = 0
198 break 203 break
199 if retries <= 6: timeout += 2 ** (retries-1) 204 if retries_since_good_run < 6:
200 print("EMPTY RESULT, REPEATING RUN"); 205 timeout += 2 ** retries_since_good_run
206 retries_since_good_run += 1
207 print("EMPTY RESULT, REPEATING RUN ({})".format(
208 retries_since_good_run));
201 finally: 209 finally:
202 if not args.user_data_dir: 210 if not args.user_data_dir:
203 shutil.rmtree(user_data_dir) 211 shutil.rmtree(user_data_dir)
204 212
205 213
206 def read_sites_file(args): 214 def read_sites_file(args):
207 try: 215 try:
208 sites = [] 216 sites = []
209 try: 217 try:
210 with open(args.sites_file, "rt") as f: 218 with open(args.sites_file, "rt") as f:
211 for item in json.load(f): 219 for item in json.load(f):
212 if 'timeout' not in item: 220 if 'timeout' not in item:
213 # This is more-or-less arbitrary. 221 # This is more-or-less arbitrary.
214 item['timeout'] = int(2.5 * item['timeline'] + 3) 222 item['timeout'] = int(1.5 * item['timeline'] + 7)
215 if item['timeout'] > args.timeout: item['timeout'] = args.timeout 223 if item['timeout'] > args.timeout: item['timeout'] = args.timeout
216 sites.append(item) 224 sites.append(item)
217 except ValueError: 225 except ValueError:
218 with open(args.sites_file, "rt") as f: 226 with open(args.sites_file, "rt") as f:
219 for line in f: 227 for line in f:
220 line = line.strip() 228 line = line.strip()
221 if not line or line.startswith('#'): continue 229 if not line or line.startswith('#'): continue
222 sites.append({'url': line, 'timeout': args.timeout}) 230 sites.append({'url': line, 'timeout': args.timeout})
223 return sites 231 return sites
224 except IOError as e: 232 except IOError as e:
225 args.error("Cannot read from {}. {}.".format(args.sites_file, e.strerror)) 233 args.error("Cannot read from {}. {}.".format(args.sites_file, e.strerror))
226 sys.exit(1) 234 sys.exit(1)
227 235
228 236
229 def do_run(args): 237 def do_run(args):
230 # Determine the websites to benchmark. 238 # Determine the websites to benchmark.
231 if args.sites_file: 239 if args.sites_file:
232 sites = read_sites_file(args) 240 sites = read_sites_file(args)
233 else: 241 else:
234 sites = [{'url': site, 'timeout': args.timeout} for site in args.sites] 242 sites = [{'url': site, 'timeout': args.timeout} for site in args.sites]
235 # Disambiguate domains, if needed. 243 # Disambiguate domains, if needed.
236 L = [] 244 L = []
237 domains = {} 245 domains = {}
238 for item in sites: 246 for item in sites:
239 site = item['url'] 247 site = item['url']
240 m = re.match(r'^(https?://)?([^/]+)(/.*)?$', site) 248 domain = None
241 if not m: 249 if args.domain:
242 args.error("Invalid URL {}.".format(site)) 250 domain = args.domain
243 continue 251 elif 'domain' in item:
244 domain = m.group(2) 252 domain = item['domain']
253 else:
254 m = re.match(r'^(https?://)?([^/]+)(/.*)?$', site)
255 if not m:
256 args.error("Invalid URL {}.".format(site))
257 continue
258 domain = m.group(2)
245 entry = [site, domain, None, item['timeout']] 259 entry = [site, domain, None, item['timeout']]
246 if domain not in domains: 260 if domain not in domains:
247 domains[domain] = entry 261 domains[domain] = entry
248 else: 262 else:
249 if not isinstance(domains[domain], int): 263 if not isinstance(domains[domain], int):
250 domains[domain][2] = 1 264 domains[domain][2] = 1
251 domains[domain] = 1 265 domains[domain] = 1
252 domains[domain] += 1 266 domains[domain] += 1
253 entry[2] = domains[domain] 267 entry[2] = domains[domain]
254 L.append(entry) 268 L.append(entry)
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
289 stddev = 0 303 stddev = 0
290 ci = { 'abs': 0, 'low': average, 'high': average } 304 ci = { 'abs': 0, 'low': average, 'high': average }
291 if abs(stddev) > 0.0001 and abs(average) > 0.0001: 305 if abs(stddev) > 0.0001 and abs(average) > 0.0001:
292 ci['perc'] = t_bounds[1] * stddev / sqrt(N) / average * 100 306 ci['perc'] = t_bounds[1] * stddev / sqrt(N) / average * 100
293 else: 307 else:
294 ci['perc'] = 0 308 ci['perc'] = 0
295 return { 'samples': N, 'average': average, 'median': median, 309 return { 'samples': N, 'average': average, 'median': median,
296 'stddev': stddev, 'min': low, 'max': high, 'ci': ci } 310 'stddev': stddev, 'min': low, 'max': high, 'ci': ci }
297 311
298 312
299 def read_stats(path, S): 313 def read_stats(path, domain, args):
314 groups = [];
315 if args.aggregate:
316 groups = [
317 ('Group-IC', re.compile(".*IC.*")),
318 ('Group-Optimize',
319 re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")),
320 ('Group-Compile', re.compile("Compile.*")),
321 ('Group-Parse', re.compile("Parse.*")),
322 ('Group-Callback', re.compile("Callback$")),
323 ('Group-API', re.compile("API.*")),
324 ('Group-GC', re.compile("GC|AllocateInTargetSpace")),
325 ('Group-JavaScript', re.compile("JS_Execution")),
326 ('Group-Runtime', re.compile(".*"))]
300 with open(path, "rt") as f: 327 with open(path, "rt") as f:
301 # Process the whole file and sum repeating entries. 328 # Process the whole file and sum repeating entries.
302 D = { 'Sum': {'time': 0, 'count': 0} } 329 entries = { 'Sum': {'time': 0, 'count': 0} }
330 for group_name, regexp in groups:
331 entries[group_name] = { 'time': 0, 'count': 0 }
303 for line in f: 332 for line in f:
304 line = line.strip() 333 line = line.strip()
305 # Discard headers and footers. 334 # Discard headers and footers.
306 if not line: continue 335 if not line: continue
307 if line.startswith("Runtime Function"): continue 336 if line.startswith("Runtime Function"): continue
308 if line.startswith("===="): continue 337 if line.startswith("===="): continue
309 if line.startswith("----"): continue 338 if line.startswith("----"): continue
310 if line.startswith("URL:"): continue 339 if line.startswith("URL:"): continue
311 if line.startswith("STATS:"): continue 340 if line.startswith("STATS:"): continue
312 # We have a regular line. 341 # We have a regular line.
313 fields = line.split() 342 fields = line.split()
314 key = fields[0] 343 key = fields[0]
315 time = float(fields[1].replace("ms", "")) 344 time = float(fields[1].replace("ms", ""))
316 count = int(fields[3]) 345 count = int(fields[3])
317 if key not in D: D[key] = { 'time': 0, 'count': 0 } 346 if key not in entries: entries[key] = { 'time': 0, 'count': 0 }
318 D[key]['time'] += time 347 entries[key]['time'] += time
319 D[key]['count'] += count 348 entries[key]['count'] += count
320 # We calculate the sum, if it's not the "total" line. 349 # We calculate the sum, if it's not the "total" line.
321 if key != "Total": 350 if key != "Total":
322 D['Sum']['time'] += time 351 entries['Sum']['time'] += time
323 D['Sum']['count'] += count 352 entries['Sum']['count'] += count
324 # Append the sums as single entries to S. 353 for group_name, regexp in groups:
325 for key in D: 354 if not regexp.match(key): continue
326 if key not in S: S[key] = { 'time_list': [], 'count_list': [] } 355 entries[group_name]['time'] += time
327 S[key]['time_list'].append(D[key]['time']) 356 entries[group_name]['count'] += count
328 S[key]['count_list'].append(D[key]['count']) 357 break
358 # Append the sums as single entries to domain.
359 for key in entries :
360 if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
361 domain[key]['time_list'].append(entries[key]['time'])
362 domain[key]['count_list'].append(entries[key]['count'])
329 363
330 364
331 def print_stats(S, args): 365 def print_stats(S, args):
332 # Sort by ascending/descending time average, then by ascending/descending 366 # Sort by ascending/descending time average, then by ascending/descending
333 # count average, then by ascending name. 367 # count average, then by ascending name.
334 def sort_asc_func(item): 368 def sort_asc_func(item):
335 return (item[1]['time_stat']['average'], 369 return (item[1]['time_stat']['average'],
336 item[1]['count_stat']['average'], 370 item[1]['count_stat']['average'],
337 item[0]) 371 item[0])
338 def sort_desc_func(item): 372 def sort_desc_func(item):
(...skipping 18 matching lines...) Expand all
357 conf = "{:0.1f}({:0.2f}%)".format(s['ci']['abs'], s['ci']['perc']) 391 conf = "{:0.1f}({:0.2f}%)".format(s['ci']['abs'], s['ci']['perc'])
358 return "{:8.1f}{} +/- {:15s}".format(s['average'], units, conf) 392 return "{:8.1f}{} +/- {:15s}".format(s['average'], units, conf)
359 print "{:>50s} {} {}".format( 393 print "{:>50s} {} {}".format(
360 key, 394 key,
361 stats(value['time_stat'], units="ms"), 395 stats(value['time_stat'], units="ms"),
362 stats(value['count_stat']) 396 stats(value['count_stat'])
363 ) 397 )
364 # Print and calculate partial sums, if necessary. 398 # Print and calculate partial sums, if necessary.
365 for i in range(low, high): 399 for i in range(low, high):
366 print_entry(*L[i]) 400 print_entry(*L[i])
367 if args.totals and args.limit != 0: 401 if args.totals and args.limit != 0 and not args.aggregate:
368 if i == low: 402 if i == low:
369 partial = { 'time_list': [0] * len(L[i][1]['time_list']), 403 partial = { 'time_list': [0] * len(L[i][1]['time_list']),
370 'count_list': [0] * len(L[i][1]['count_list']) } 404 'count_list': [0] * len(L[i][1]['count_list']) }
371 assert len(partial['time_list']) == len(L[i][1]['time_list']) 405 assert len(partial['time_list']) == len(L[i][1]['time_list'])
372 assert len(partial['count_list']) == len(L[i][1]['count_list']) 406 assert len(partial['count_list']) == len(L[i][1]['count_list'])
373 for j, v in enumerate(L[i][1]['time_list']): 407 for j, v in enumerate(L[i][1]['time_list']):
374 partial['time_list'][j] += v 408 partial['time_list'][j] += v
375 for j, v in enumerate(L[i][1]['count_list']): 409 for j, v in enumerate(L[i][1]['count_list']):
376 partial['count_list'][j] += v 410 partial['count_list'][j] += v
377 # Print totals, if necessary. 411 # Print totals, if necessary.
378 if args.totals: 412 if args.totals:
379 print '-' * 80 413 print '-' * 80
380 if args.limit != 0: 414 if args.limit != 0 and not args.aggregate:
381 partial['time_stat'] = statistics(partial['time_list']) 415 partial['time_stat'] = statistics(partial['time_list'])
382 partial['count_stat'] = statistics(partial['count_list']) 416 partial['count_stat'] = statistics(partial['count_list'])
383 print_entry("Partial", partial) 417 print_entry("Partial", partial)
384 print_entry("Sum", S["Sum"]) 418 print_entry("Sum", S["Sum"])
385 print_entry("Total", S["Total"]) 419 print_entry("Total", S["Total"])
386 420
387 421
388 def do_stats(args): 422 def do_stats(args):
389 T = {} 423 domains = {}
390 for path in args.logfiles: 424 for path in args.logfiles:
391 filename = os.path.basename(path) 425 filename = os.path.basename(path)
392 m = re.match(r'^([^#]+)(#.*)?$', filename) 426 m = re.match(r'^([^#]+)(#.*)?$', filename)
393 domain = m.group(1) 427 domain = m.group(1)
394 if domain not in T: T[domain] = {} 428 if domain not in domains: domains[domain] = {}
395 read_stats(path, T[domain]) 429 read_stats(path, domains[domain], args)
396 for i, domain in enumerate(sorted(T)): 430 if args.aggregate:
397 if len(T) > 1: 431 create_total_page_stats(domains, args)
432 for i, domain in enumerate(sorted(domains)):
433 if len(domains) > 1:
398 if i > 0: print 434 if i > 0: print
399 print "{}:".format(domain) 435 print "{}:".format(domain)
400 print '=' * 80 436 print '=' * 80
401 S = T[domain] 437 domain_stats = domains[domain]
402 for key in S: 438 for key in domain_stats:
403 S[key]['time_stat'] = statistics(S[key]['time_list']) 439 domain_stats[key]['time_stat'] = \
404 S[key]['count_stat'] = statistics(S[key]['count_list']) 440 statistics(domain_stats[key]['time_list'])
405 print_stats(S, args) 441 domain_stats[key]['count_stat'] = \
442 statistics(domain_stats[key]['count_list'])
443 print_stats(domain_stats, args)
444
445
446 # Create a Total page with all entries summed up.
447 def create_total_page_stats(domains, args):
448 total = {}
449 def sum_up(parent, key, other):
450 sums = parent[key]
451 for i, item in enumerate(other[key]):
452 if i >= len(sums):
453 sums.extend([0] * (i - len(sums) + 1))
454 if item is not None:
455 sums[i] += item
456 # Sum up all the entries/metrics from all domains
457 for domain, entries in domains.items():
458 for key, domain_stats in entries.items():
459 if key not in total:
460 total[key] = {}
461 total[key]['time_list'] = list(domain_stats['time_list'])
462 total[key]['count_list'] = list(domain_stats['count_list'])
463 else:
464 sum_up(total[key], 'time_list', domain_stats)
465 sum_up(total[key], 'count_list', domain_stats)
466 # Add a new "Total" page containing the summed up metrics.
467 domains['Total'] = total
406 468
407 469
408 # Generate JSON file. 470 # Generate JSON file.
409 471
410 def do_json(args): 472 def do_json(args):
411 J = {} 473 versions = {}
412 for path in args.logdirs: 474 for path in args.logdirs:
413 if os.path.isdir(path): 475 if os.path.isdir(path):
414 for root, dirs, files in os.walk(path): 476 for root, dirs, files in os.walk(path):
415 version = os.path.basename(root) 477 version = os.path.basename(root)
416 if version not in J: J[version] = {} 478 if version not in versions: versions[version] = {}
417 for filename in files: 479 for filename in files:
418 if filename.endswith(".txt"): 480 if filename.endswith(".txt"):
419 m = re.match(r'^([^#]+)(#.*)?\.txt$', filename) 481 m = re.match(r'^([^#]+)(#.*)?\.txt$', filename)
420 domain = m.group(1) 482 domain = m.group(1)
421 if domain not in J[version]: J[version][domain] = {} 483 if domain not in versions[version]: versions[version][domain] = {}
422 read_stats(os.path.join(root, filename), J[version][domain]) 484 read_stats(os.path.join(root, filename),
423 for version, T in J.items(): 485 versions[version][domain], args)
424 for domain, S in T.items(): 486 for version, domains in versions.items():
425 A = [] 487 if args.aggregate:
426 for name, value in S.items(): 488 create_total_page_stats(domains, args)
489 for domain, entries in domains.items():
490 stats = []
491 for name, value in entries.items():
427 # We don't want the calculated sum in the JSON file. 492 # We don't want the calculated sum in the JSON file.
428 if name == "Sum": continue 493 if name == "Sum": continue
429 entry = [name] 494 entry = [name]
430 for x in ['time_list', 'count_list']: 495 for x in ['time_list', 'count_list']:
431 s = statistics(S[name][x]) 496 s = statistics(entries[name][x])
432 entry.append(round(s['average'], 1)) 497 entry.append(round(s['average'], 1))
433 entry.append(round(s['ci']['abs'], 1)) 498 entry.append(round(s['ci']['abs'], 1))
434 entry.append(round(s['ci']['perc'], 2)) 499 entry.append(round(s['ci']['perc'], 2))
435 A.append(entry) 500 stats.append(entry)
436 T[domain] = A 501 domains[domain] = stats
437 print json.dumps(J, separators=(',', ':')) 502 print json.dumps(versions, separators=(',', ':'))
438 503
439 504
440 # Help. 505 # Help.
441 506
442 def do_help(parser, subparsers, args): 507 def do_help(parser, subparsers, args):
443 if args.help_cmd: 508 if args.help_cmd:
444 if args.help_cmd in subparsers: 509 if args.help_cmd in subparsers:
445 subparsers[args.help_cmd].print_help() 510 subparsers[args.help_cmd].print_help()
446 else: 511 else:
447 args.error("Unknown command '{}'".format(args.help_cmd)) 512 args.error("Unknown command '{}'".format(args.help_cmd))
(...skipping 17 matching lines...) Expand all
465 "run", help="run --help") 530 "run", help="run --help")
466 subparsers["run"].set_defaults( 531 subparsers["run"].set_defaults(
467 func=do_run, error=subparsers["run"].error) 532 func=do_run, error=subparsers["run"].error)
468 subparsers["run"].add_argument( 533 subparsers["run"].add_argument(
469 "--chrome-flags", type=str, default="", 534 "--chrome-flags", type=str, default="",
470 help="specify additional chrome flags") 535 help="specify additional chrome flags")
471 subparsers["run"].add_argument( 536 subparsers["run"].add_argument(
472 "--js-flags", type=str, default="", 537 "--js-flags", type=str, default="",
473 help="specify additional V8 flags") 538 help="specify additional V8 flags")
474 subparsers["run"].add_argument( 539 subparsers["run"].add_argument(
540 "--domain", type=str, default="",
541 help="specify the output file domain name")
542 subparsers["run"].add_argument(
475 "--no-url", dest="print_url", action="store_false", default=True, 543 "--no-url", dest="print_url", action="store_false", default=True,
476 help="do not include url in statistics file") 544 help="do not include url in statistics file")
477 subparsers["run"].add_argument( 545 subparsers["run"].add_argument(
478 "-n", "--repeat", type=int, metavar="<num>", 546 "-n", "--repeat", type=int, metavar="<num>",
479 help="specify iterations for each website (default: once)") 547 help="specify iterations for each website (default: once)")
480 subparsers["run"].add_argument( 548 subparsers["run"].add_argument(
481 "-k", "--refresh", type=int, metavar="<num>", default=0, 549 "-k", "--refresh", type=int, metavar="<num>", default=0,
482 help="specify refreshes for each iteration (default: 0)") 550 help="specify refreshes for each iteration (default: 0)")
483 subparsers["run"].add_argument( 551 subparsers["run"].add_argument(
484 "--replay-wpr", type=str, metavar="<path>", 552 "--replay-wpr", type=str, metavar="<path>",
485 help="use the specified web page replay (.wpr) archive") 553 help="use the specified web page replay (.wpr) archive")
486 subparsers["run"].add_argument( 554 subparsers["run"].add_argument(
487 "--replay-bin", type=str, metavar="<path>", 555 "--replay-bin", type=str, metavar="<path>",
488 help="specify the replay.py script typically located in " \ 556 help="specify the replay.py script typically located in " \
489 "$CHROMIUM/src/third_party/webpagereplay/replay.py") 557 "$CHROMIUM/src/third_party/webpagereplay/replay.py")
490 subparsers["run"].add_argument( 558 subparsers["run"].add_argument(
491 "-r", "--retries", type=int, metavar="<num>", 559 "-r", "--retries", type=int, metavar="<num>",
492 help="specify retries if website is down (default: forever)") 560 help="specify retries if website is down (default: forever)")
493 subparsers["run"].add_argument( 561 subparsers["run"].add_argument(
494 "-f", "--sites-file", type=str, metavar="<path>", 562 "-f", "--sites-file", type=str, metavar="<path>",
495 help="specify file containing benchmark websites") 563 help="specify file containing benchmark websites")
496 subparsers["run"].add_argument( 564 subparsers["run"].add_argument(
497 "-t", "--timeout", type=int, metavar="<seconds>", default=60, 565 "-t", "--timeout", type=int, metavar="<seconds>", default=60,
498 help="specify seconds before chrome is killed") 566 help="specify seconds before chrome is killed")
499 subparsers["run"].add_argument( 567 subparsers["run"].add_argument(
568 "-p", "--port-offset", type=int, metavar="<offset>", default=0,
569 help="specify the offset for the replay server's default ports")
570 subparsers["run"].add_argument(
500 "-u", "--user-data-dir", type=str, metavar="<path>", 571 "-u", "--user-data-dir", type=str, metavar="<path>",
501 help="specify user data dir (default is temporary)") 572 help="specify user data dir (default is temporary)")
502 subparsers["run"].add_argument( 573 subparsers["run"].add_argument(
503 "-c", "--with-chrome", type=str, metavar="<path>", 574 "-c", "--with-chrome", type=str, metavar="<path>",
504 default="/usr/bin/google-chrome", 575 default="/usr/bin/google-chrome",
505 help="specify chrome executable to use") 576 help="specify chrome executable to use")
506 subparsers["run"].add_argument( 577 subparsers["run"].add_argument(
578 "-l", "--log-stderr", type=str, metavar="<path>",
579 help="specify where chrome's stderr should go (default: /dev/null)")
580 subparsers["run"].add_argument(
507 "sites", type=str, metavar="<URL>", nargs="*", 581 "sites", type=str, metavar="<URL>", nargs="*",
508 help="specify benchmark website") 582 help="specify benchmark website")
509 # Command: stats. 583 # Command: stats.
510 subparsers["stats"] = subparser_adder.add_parser( 584 subparsers["stats"] = subparser_adder.add_parser(
511 "stats", help="stats --help") 585 "stats", help="stats --help")
512 subparsers["stats"].set_defaults( 586 subparsers["stats"].set_defaults(
513 func=do_stats, error=subparsers["stats"].error) 587 func=do_stats, error=subparsers["stats"].error)
514 subparsers["stats"].add_argument( 588 subparsers["stats"].add_argument(
515 "-l", "--limit", type=int, metavar="<num>", default=0, 589 "-l", "--limit", type=int, metavar="<num>", default=0,
516 help="limit how many items to print (default: none)") 590 help="limit how many items to print (default: none)")
517 subparsers["stats"].add_argument( 591 subparsers["stats"].add_argument(
518 "-s", "--sort", choices=["asc", "desc"], default="asc", 592 "-s", "--sort", choices=["asc", "desc"], default="asc",
519 help="specify sorting order (default: ascending)") 593 help="specify sorting order (default: ascending)")
520 subparsers["stats"].add_argument( 594 subparsers["stats"].add_argument(
521 "-n", "--no-total", dest="totals", action="store_false", default=True, 595 "-n", "--no-total", dest="totals", action="store_false", default=True,
522 help="do not print totals") 596 help="do not print totals")
523 subparsers["stats"].add_argument( 597 subparsers["stats"].add_argument(
524 "logfiles", type=str, metavar="<logfile>", nargs="*", 598 "logfiles", type=str, metavar="<logfile>", nargs="*",
525 help="specify log files to parse") 599 help="specify log files to parse")
600 subparsers["stats"].add_argument(
601 "--aggregate", dest="aggregate", action="store_true", default=False,
602 help="Create aggregated entries. Adds Group-* entries at the toplevel. " +
603 "Additionally creates a Total page with all entries.")
526 # Command: json. 604 # Command: json.
527 subparsers["json"] = subparser_adder.add_parser( 605 subparsers["json"] = subparser_adder.add_parser(
528 "json", help="json --help") 606 "json", help="json --help")
529 subparsers["json"].set_defaults( 607 subparsers["json"].set_defaults(
530 func=do_json, error=subparsers["json"].error) 608 func=do_json, error=subparsers["json"].error)
531 subparsers["json"].add_argument( 609 subparsers["json"].add_argument(
532 "logdirs", type=str, metavar="<logdir>", nargs="*", 610 "logdirs", type=str, metavar="<logdir>", nargs="*",
533 help="specify directories with log files to parse") 611 help="specify directories with log files to parse")
612 subparsers["json"].add_argument(
613 "--aggregate", dest="aggregate", action="store_true", default=False,
614 help="Create aggregated entries. Adds Group-* entries at the toplevel. " +
615 "Additionally creates a Total page with all entries.")
534 # Command: help. 616 # Command: help.
535 subparsers["help"] = subparser_adder.add_parser( 617 subparsers["help"] = subparser_adder.add_parser(
536 "help", help="help information") 618 "help", help="help information")
537 subparsers["help"].set_defaults( 619 subparsers["help"].set_defaults(
538 func=lambda args: do_help(parser, subparsers, args), 620 func=lambda args: do_help(parser, subparsers, args),
539 error=subparsers["help"].error) 621 error=subparsers["help"].error)
540 subparsers["help"].add_argument( 622 subparsers["help"].add_argument(
541 "help_cmd", type=str, metavar="<command>", nargs="?", 623 "help_cmd", type=str, metavar="<command>", nargs="?",
542 help="command for which to display help") 624 help="command for which to display help")
543 # Execute the command. 625 # Execute the command.
544 args = parser.parse_args() 626 args = parser.parse_args()
545 setattr(args, 'script_path', os.path.dirname(sys.argv[0])) 627 setattr(args, 'script_path', os.path.dirname(sys.argv[0]))
546 if args.command == "run" and coexist(args.sites_file, args.sites): 628 if args.command == "run" and coexist(args.sites_file, args.sites):
547 args.error("use either option --sites-file or site URLs") 629 args.error("use either option --sites-file or site URLs")
548 sys.exit(1) 630 sys.exit(1)
549 elif args.command == "run" and not coexist(args.replay_wpr, args.replay_bin): 631 elif args.command == "run" and not coexist(args.replay_wpr, args.replay_bin):
550 args.error("options --replay-wpr and --replay-bin must be used together") 632 args.error("options --replay-wpr and --replay-bin must be used together")
551 sys.exit(1) 633 sys.exit(1)
552 else: 634 else:
553 args.func(args) 635 args.func(args)
554 636
555 if __name__ == "__main__": 637 if __name__ == "__main__":
556 sys.exit(main()) 638 sys.exit(main())
OLDNEW
« no previous file with comments | « tools/callstats.html ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698