Chromium Code Reviews| Index: tools/perf/measurements/loading_measurement_analyzer.py |
| diff --git a/tools/perf/measurements/loading_measurement_analyzer.py b/tools/perf/measurements/loading_measurement_analyzer.py |
| index a5abe11ac685340ccca200c0a57dfb381d34a46b..395b61d2558dfdc819dface785fe73b4c13164ac 100755 |
| --- a/tools/perf/measurements/loading_measurement_analyzer.py |
| +++ b/tools/perf/measurements/loading_measurement_analyzer.py |
| @@ -29,6 +29,9 @@ class LoadingMeasurementAnalyzer(object): |
| self.totals = collections.defaultdict(list) |
| self.maxes = collections.defaultdict(list) |
| self.avgs = collections.defaultdict(list) |
| + self.load_times = [] |
| + self.cpu_times = [] |
| + self.network_percents = [] |
| self.num_rows_parsed = 0 |
| self.num_slowest_urls = options.num_slowest_urls |
| if options.rank_csv_file: |
| @@ -42,17 +45,30 @@ class LoadingMeasurementAnalyzer(object): |
| if (options.rank_limit and |
| self._GetRank(row['url']) > options.rank_limit): |
| continue |
| + cpu_time = 0 |
| + load_time = float(row['load_time (ms)']) |
| + if load_time < 0: |
| + print 'Skipping %s due to negative load time' % row['url'] |
| + continue |
| for key, value in row.iteritems(): |
| - if key in ('url', 'dom_content_loaded_time (ms)', 'load_time (ms)'): |
| + if key in ('url', 'load_time (ms)', 'dom_content_loaded_time (ms)'): |
| continue |
| if not value or value == '-': |
| continue |
| + value = float(value) |
| if '_avg' in key: |
| - self.avgs[key].append((float(value), row['url'])) |
| + self.avgs[key].append((value, row['url'])) |
| elif '_max' in key: |
| - self.maxes[key].append((float(value), row['url'])) |
| + self.maxes[key].append((value, row['url'])) |
| else: |
| - self.totals[key].append((float(value), row['url'])) |
| + self.totals[key].append((value, row['url'])) |
|
nduca
2013/07/22 23:37:00
so you define network time as the unaccounted-for
|
| + cpu_time += value |
| + self.load_times.append((load_time, row['url'])) |
| + self.cpu_times.append((cpu_time, row['url'])) |
| + if options.show_network: |
| + network_time = load_time - cpu_time |
| + self.totals['Network (ms)'].append((network_time, row['url'])) |
| + self.network_percents.append((network_time / load_time, row['url'])) |
| self.num_rows_parsed += 1 |
| if options.max_rows and self.num_rows_parsed == int(options.max_rows): |
| break |
| @@ -73,17 +89,23 @@ class LoadingMeasurementAnalyzer(object): |
| sum_totals = {} |
| for key, values in self.totals.iteritems(): |
| sum_totals[key] = sum([v[0] for v in values]) |
| - total_time = sum(sum_totals.values()) |
| + total_cpu_time = sum([v[0] for v in self.cpu_times]) |
| + total_page_load_time = sum([v[0] for v in self.load_times]) |
| print 'Total URLs: ', self.num_rows_parsed |
| - print 'Total time: %ds' % int(round(total_time / 1000)) |
| + print 'Total CPU time: %ds' % int(round(total_cpu_time / 1000)) |
| + print 'Total page load time: %ds' % int(round(total_page_load_time / 1000)) |
| + print 'Average CPU time: %dms' % int(round( |
| + total_cpu_time / self.num_rows_parsed)) |
| + print 'Average page load time: %dms' % int(round( |
| + total_page_load_time / self.num_rows_parsed)) |
| for key, value in sorted(sum_totals.iteritems(), reverse=True, |
| key=lambda i: i[1]): |
| output_key = '%30s: ' % key.replace(' (ms)', '') |
| output_value = '%10ds ' % (value / 1000) |
| - output_percent = '%.1f%%' % (100 * value / total_time) |
| + output_percent = '%.1f%%' % (100 * value / total_page_load_time) |
| print output_key, output_value, output_percent |
| if not self.num_slowest_urls: |
| @@ -98,6 +120,15 @@ class LoadingMeasurementAnalyzer(object): |
| for value, url in slowest: |
| print '\t', '%dms\t' % value, url, '(#%s)' % self._GetRank(url) |
| + if self.network_percents: |
| + print 'Top %d highest network to CPU time ratios:' % self.num_slowest_urls |
| + for percent, url in sorted( |
| + self.network_percents, reverse=True)[:self.num_slowest_urls]: |
| + percent *= 100 |
| + print '\t', '%.1f%%' % percent, url, '(#%s)' % self._GetRank(url) |
| + |
| + |
| def main(argv): |
| prog_desc = 'Parses CSV output from the loading_measurement' |
| parser = optparse.OptionParser(usage=('%prog [options]' + '\n\n' + prog_desc)) |
| @@ -109,6 +140,8 @@ def main(argv): |
| parser.add_option('--rank-csv-file', help='A CSV file of <rank,url>') |
| parser.add_option('--rank-limit', type='int', |
| help='Only process pages higher than this rank') |
| + parser.add_option('--show-network', action='store_true', |
| + help='Whether to display Network as a category') |
| options, args = parser.parse_args(argv[1:]) |