OLD | NEW |
---|---|
(Empty) | |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | |
tonyg
2014/02/22 14:55:19
The term "meta" gave me the wrong initial expectat
| |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 import re | |
6 | |
7 from telemetry.page import page_measurement | |
8 from metrics import timeline as timeline_module | |
9 | |
10 | |
11 SMOOTHNESS_METRIC_TYPE = 'smoothness' | |
12 | |
13 | |
14 def IsMetricRequest(event_name): | |
15 return event_name.startswith('MetricRequest.') | |
16 | |
17 | |
18 def ParseMetricRequest(event_name): | |
19 m = re.match('MetricRequest\.(.+?)\/(.+)', event_name) | |
20 assert m | |
21 return (m.group(1), m.group(2)) | |
22 | |
23 | |
24 class MetricRequest(object): | |
25 def __init__(self, event): | |
26 x, y = ParseMetricRequest(event.name) | |
27 self.metric_type = x | |
28 self.logical_name = y | |
29 self.start = event.start | |
30 self.end = event.end | |
nednguyen
2014/02/22 15:11:02
nit: Maybe use self.bounds instead?
| |
31 | |
32 def GetResultNameFor(self, result_name): | |
33 return "%s/%s" % (self.logical_name, result_name) | |
34 | |
35 class _MetaMetrics(object): | |
36 def __init__(self, model, renderer_thread, detailed_mode): | |
37 self._model = model | |
38 self._renderer_thread = renderer_thread | |
39 self._detailed_mode = detailed_mode | |
40 | |
41 def FindMetricRequests(self): | |
42 # TODO(nduca): Add support for page-load metric request. | |
43 return [MetricRequest(event) for | |
44 event in self._renderer_thread.IterAllAsyncSlices() | |
45 if IsMetricRequest(event.name)] | |
46 | |
47 def CreateMetricsForMetricRequest(self, request): | |
48 if request.metric_type == SMOOTHNESS_METRIC_TYPE: | |
49 return [] # TODO(nduca): Hook up a real metric. | |
50 raise Exception('Unrecognized metric type: %s' % request.metric_type) | |
51 | |
52 def AddResults(self, results): | |
53 requests = self.FindMetricRequests() | |
54 if len(requests) == 0: | |
55 raise Exception('Expected at least one request from the page') | |
56 for request in requests: | |
57 metrics = self.CreateMetricsForMetricRequest(request) | |
58 for m in metrics: | |
59 m.AddResults(self._model, self._renderer_thread, | |
60 request, results, self._detailed_mode) | |
61 | |
62 | |
63 class MetaMeasurement(page_measurement.PageMeasurement): | |
64 """ A meta measurement shifts the burden of what metrics to collect onto | |
65 the page under test. Instead of the measurement having a fixed set of values | |
66 it collects about the page, the page being tested issues a set of standard | |
67 calls to the user timing API specifying time spans of interest for which it | |
68 wants metrics, and what kind of metrics are requested for that time range. The | |
69 meta measurement object collects a trace and these metric requests, then | |
70 generates result values based on matching those requests with the appropriate | |
71 timeline metrics. """ | |
72 def __init__(self): | |
73 super(MetaMeasurement, self).__init__('smoothness') | |
74 | |
75 def AddCommandLineOptions(self, parser): | |
76 parser.add_option('--detailed-mode', '-d', action='store_true', | |
77 help='Report detailed results.') | |
78 | |
79 def CanRunForPage(self, page): | |
80 return hasattr(page, 'smoothness') | |
81 | |
82 def WillNavigateToPage(self, page, tab): | |
83 if not tab.browser.supports_tracing: | |
84 raise Exception('Not supported') | |
85 tab.browser.StartTracing(timeline_module.MINIMAL_TRACE_CATEGORIES) | |
tonyg
2014/02/22 14:55:19
Thinking aloud here, but I'm concerned about the t
| |
86 | |
87 def MeasurePage(self, page, tab, results): | |
88 """ Collect all possible metrics and added them to results. """ | |
89 trace_result = tab.browser.StopTracing() | |
90 model = trace_result.AsTimelineModel() | |
91 renderer_thread = model.GetRendererThreadFromTab(tab) | |
92 meta_metrics = _MetaMetrics(model, renderer_thread, | |
93 self.options.detailed_mode) | |
94 meta_metrics.AddResults(results) | |
OLD | NEW |