OLD | NEW |
| (Empty) |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 import unittest | |
6 | |
7 from telemetry.internal.results import page_test_results | |
8 from telemetry.page import page as page_module | |
9 from telemetry.web_perf.metrics import rendering_stats | |
10 from telemetry.web_perf.metrics import smoothness | |
11 | |
12 | |
13 class _MockRenderingStats(object): | |
14 | |
15 stats = ['refresh_period', 'frame_timestamps', 'frame_times', 'paint_times', | |
16 'painted_pixel_counts', 'record_times', | |
17 'recorded_pixel_counts', 'approximated_pixel_percentages', | |
18 'checkerboarded_pixel_percentages', 'input_event_latency', | |
19 'frame_queueing_durations', 'main_thread_scroll_latency', | |
20 'gesture_scroll_update_latency'] | |
21 | |
22 def __init__(self, **kwargs): | |
23 self.errors = {} | |
24 for stat in self.stats: | |
25 value = kwargs[stat] if stat in kwargs else None | |
26 setattr(self, stat, value) | |
27 | |
28 | |
29 #pylint: disable=protected-access | |
30 class SmoothnessMetricUnitTest(unittest.TestCase): | |
31 | |
32 def setUp(self): | |
33 self.metric = smoothness.SmoothnessMetric() | |
34 self.page = page_module.Page('file://blank.html') | |
35 self.good_timestamps = [[10, 20], [30, 40, 50]] | |
36 self.not_enough_frames_timestamps = [[10], [20, 30, 40]] | |
37 | |
38 def testPopulateResultsFromStats(self): | |
39 stats = _MockRenderingStats() | |
40 for stat in _MockRenderingStats.stats: | |
41 # Just set fake data for all of the relevant arrays of stats typically | |
42 # found in a RenderingStats object. | |
43 setattr(stats, stat, [[10, 20], [30, 40, 50]]) | |
44 results = page_test_results.PageTestResults() | |
45 results.WillRunPage(self.page) | |
46 self.metric._PopulateResultsFromStats(results, stats, False) | |
47 current_page_run = results.current_page_run | |
48 self.assertTrue(current_page_run.ok) | |
49 expected_values_count = 12 | |
50 self.assertEquals(expected_values_count, len(current_page_run.values)) | |
51 | |
52 def testHasEnoughFrames(self): | |
53 # This list will pass since every sub-array has at least 2 frames. | |
54 has_enough_frames = self.metric._HasEnoughFrames(self.good_timestamps) | |
55 self.assertTrue(has_enough_frames) | |
56 | |
57 def testHasEnoughFramesWithNotEnoughFrames(self): | |
58 # This list will fail since the first sub-array only has a single frame. | |
59 has_enough_frames = self.metric._HasEnoughFrames( | |
60 self.not_enough_frames_timestamps) | |
61 self.assertFalse(has_enough_frames) | |
62 | |
63 def testComputeSurfaceFlingerMetricNoJank(self): | |
64 stats = _MockRenderingStats(refresh_period=10, | |
65 frame_timestamps=[[10, 20], [130, 140, 150]], | |
66 frame_times=[[10], [10, 10]]) | |
67 avg_surface_fps, jank_count, max_frame_delay, frame_lengths = ( | |
68 self.metric._ComputeSurfaceFlingerMetric(self.page, stats)) | |
69 self.assertEquals([1, 1, 1], frame_lengths.values) | |
70 self.assertEquals(1, max_frame_delay.value) | |
71 self.assertEquals(0, jank_count.value) | |
72 self.assertEquals(100, avg_surface_fps.value) | |
73 | |
74 def testComputeSurfaceFlingerMetricJank(self): | |
75 stats = _MockRenderingStats( | |
76 refresh_period=10, | |
77 frame_timestamps=[[10, 20, 50], [130, 140, 150, 170, 180]], | |
78 frame_times=[[10, 30], [10, 10, 20, 10]]) | |
79 avg_surface_fps, jank_count, max_frame_delay, frame_lengths = ( | |
80 self.metric._ComputeSurfaceFlingerMetric(self.page, stats)) | |
81 self.assertEquals([1, 3, 1, 1, 2, 1], frame_lengths.values) | |
82 self.assertEquals(3, max_frame_delay.value) | |
83 self.assertEquals(2, jank_count.value) | |
84 self.assertEquals(67, avg_surface_fps.value) | |
85 | |
86 def testComputeFrameTimeMetricWithNotEnoughFrames(self): | |
87 stats = _MockRenderingStats( | |
88 refresh_period=10, | |
89 frame_timestamps=self.not_enough_frames_timestamps, | |
90 frame_times=[[10, 20], [30, 40, 50]]) | |
91 avg_surface_fps, jank_count, max_frame_delay, frame_lengths = ( | |
92 self.metric._ComputeSurfaceFlingerMetric(self.page, stats)) | |
93 self.assertEquals(None, avg_surface_fps.value) | |
94 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
95 avg_surface_fps.none_value_reason) | |
96 self.assertEquals(None, jank_count.value) | |
97 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
98 jank_count.none_value_reason) | |
99 self.assertEquals(None, max_frame_delay.value) | |
100 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
101 max_frame_delay.none_value_reason) | |
102 self.assertEquals(None, frame_lengths.values) | |
103 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
104 frame_lengths.none_value_reason) | |
105 | |
106 def testComputeLatencyMetric(self): | |
107 stats = _MockRenderingStats(frame_timestamps=self.good_timestamps, | |
108 input_event_latency=[[10, 20], [30, 40, 50]]) | |
109 # pylint: disable=unbalanced-tuple-unpacking | |
110 mean_value, discrepancy_value = self.metric._ComputeLatencyMetric( | |
111 self.page, stats, 'input_event_latency', stats.input_event_latency) | |
112 self.assertEquals(30, mean_value.value) | |
113 self.assertEquals(60, discrepancy_value.value) | |
114 | |
115 def testComputeLatencyMetricWithMissingData(self): | |
116 stats = _MockRenderingStats(frame_timestamps=self.good_timestamps, | |
117 input_event_latency=[[], []]) | |
118 value = self.metric._ComputeLatencyMetric( | |
119 self.page, stats, 'input_event_latency', stats.input_event_latency) | |
120 self.assertEquals((), value) | |
121 | |
122 def testComputeLatencyMetricWithNotEnoughFrames(self): | |
123 stats = _MockRenderingStats( | |
124 frame_timestamps=self.not_enough_frames_timestamps, | |
125 input_event_latency=[[], []]) | |
126 # pylint: disable=unbalanced-tuple-unpacking | |
127 mean_value, discrepancy_value = self.metric._ComputeLatencyMetric( | |
128 self.page, stats, 'input_event_latency', stats.input_event_latency) | |
129 self.assertEquals(None, mean_value.value) | |
130 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
131 mean_value.none_value_reason) | |
132 self.assertEquals(None, discrepancy_value.value) | |
133 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
134 discrepancy_value.none_value_reason) | |
135 | |
136 def testComputeGestureScrollUpdateLatencies(self): | |
137 stats = _MockRenderingStats( | |
138 frame_timestamps=self.good_timestamps, | |
139 gesture_scroll_update_latency=[[10, 20], [30, 40, 50]]) | |
140 gesture_value = self.metric._ComputeFirstGestureScrollUpdateLatencies( | |
141 self.page, stats) | |
142 self.assertEquals([10, 30], gesture_value.values) | |
143 | |
144 def testComputeGestureScrollUpdateLatenciesWithMissingData(self): | |
145 stats = _MockRenderingStats( | |
146 frame_timestamps=self.good_timestamps, | |
147 gesture_scroll_update_latency=[[], []]) | |
148 value = self.metric._ComputeFirstGestureScrollUpdateLatencies( | |
149 self.page, stats) | |
150 self.assertEquals(None, value.values) | |
151 | |
152 def testComputeGestureScrollUpdateLatenciesWithNotEnoughFrames(self): | |
153 stats = _MockRenderingStats( | |
154 frame_timestamps=self.not_enough_frames_timestamps, | |
155 gesture_scroll_update_latency=[[10, 20], [30, 40, 50]]) | |
156 gesture_value = self.metric._ComputeFirstGestureScrollUpdateLatencies( | |
157 self.page, stats) | |
158 self.assertEquals(None, gesture_value.values) | |
159 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
160 gesture_value.none_value_reason) | |
161 | |
162 def testComputeQueueingDuration(self): | |
163 stats = _MockRenderingStats(frame_timestamps=self.good_timestamps, | |
164 frame_queueing_durations=[[10, 20], [30, 40]]) | |
165 list_of_scalar_values = self.metric._ComputeQueueingDuration(self.page, | |
166 stats) | |
167 self.assertEquals([10, 20, 30, 40], list_of_scalar_values.values) | |
168 | |
169 def testComputeQueueingDurationWithMissingData(self): | |
170 stats = _MockRenderingStats(frame_timestamps=self.good_timestamps, | |
171 frame_queueing_durations=[[], []]) | |
172 list_of_scalar_values = self.metric._ComputeQueueingDuration( | |
173 self.page, stats) | |
174 self.assertEquals(None, list_of_scalar_values.values) | |
175 self.assertEquals('No frame queueing durations recorded.', | |
176 list_of_scalar_values.none_value_reason) | |
177 | |
178 def testComputeQueueingDurationWithMissingDataAndErrorValue(self): | |
179 stats = _MockRenderingStats(frame_timestamps=self.good_timestamps, | |
180 frame_queueing_durations=[[], []]) | |
181 stats.errors['frame_queueing_durations'] = ( | |
182 'Current chrome version does not support the queueing delay metric.') | |
183 list_of_scalar_values = self.metric._ComputeQueueingDuration( | |
184 self.page, stats) | |
185 self.assertEquals(None, list_of_scalar_values.values) | |
186 self.assertEquals( | |
187 'Current chrome version does not support the queueing delay metric.', | |
188 list_of_scalar_values.none_value_reason) | |
189 | |
190 def testComputeQueueingDurationWithNotEnoughFrames(self): | |
191 stats = _MockRenderingStats( | |
192 frame_timestamps=self.not_enough_frames_timestamps, | |
193 frame_queueing_durations=[[10, 20], [30, 40, 50]]) | |
194 list_of_scalar_values = self.metric._ComputeQueueingDuration(self.page, | |
195 stats) | |
196 self.assertEquals(None, list_of_scalar_values.values) | |
197 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
198 list_of_scalar_values.none_value_reason) | |
199 | |
200 def testComputeFrameTimeMetric(self): | |
201 stats = _MockRenderingStats(frame_timestamps=self.good_timestamps, | |
202 frame_times=[[10, 20], [30, 40, 50]]) | |
203 frame_times_value, mean_frame_time_value, percentage_smooth_value = ( | |
204 self.metric._ComputeFrameTimeMetric(self.page, stats)) | |
205 self.assertEquals([10, 20, 30, 40, 50], frame_times_value.values) | |
206 self.assertEquals(30, mean_frame_time_value.value) | |
207 self.assertEquals(20, percentage_smooth_value.value) | |
208 | |
209 def testComputeFrameTimeMetricWithNotEnoughFrames2(self): | |
210 stats = _MockRenderingStats( | |
211 frame_timestamps=self.not_enough_frames_timestamps, | |
212 frame_times=[[10, 20], [30, 40, 50]]) | |
213 frame_times_value, mean_frame_time_value, percentage_smooth_value = ( | |
214 self.metric._ComputeFrameTimeMetric(self.page, stats)) | |
215 self.assertEquals(None, frame_times_value.values) | |
216 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
217 frame_times_value.none_value_reason) | |
218 self.assertEquals(None, mean_frame_time_value.value) | |
219 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
220 mean_frame_time_value.none_value_reason) | |
221 self.assertEquals(None, percentage_smooth_value.value) | |
222 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
223 percentage_smooth_value.none_value_reason) | |
224 | |
225 def testComputeFrameTimeDiscrepancy(self): | |
226 stats = _MockRenderingStats(frame_timestamps=self.good_timestamps) | |
227 frame_time_discrepancy_value = self.metric._ComputeFrameTimeDiscrepancy( | |
228 self.page, stats) | |
229 self.assertEquals(10, frame_time_discrepancy_value.value) | |
230 | |
231 def testComputeFrameTimeDiscrepancyWithNotEnoughFrames(self): | |
232 stats = _MockRenderingStats( | |
233 frame_timestamps=self.not_enough_frames_timestamps) | |
234 frame_time_discrepancy_value = self.metric._ComputeFrameTimeDiscrepancy( | |
235 self.page, stats) | |
236 self.assertEquals(None, frame_time_discrepancy_value.value) | |
237 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
238 frame_time_discrepancy_value.none_value_reason) | |
239 | |
240 def testComputeMeanPixelsApproximated(self): | |
241 stats = _MockRenderingStats( | |
242 frame_timestamps=self.good_timestamps, | |
243 approximated_pixel_percentages=[[10, 20], [30, 40, 50]]) | |
244 mean_pixels_value = self.metric._ComputeMeanPixelsApproximated( | |
245 self.page, stats) | |
246 self.assertEquals(30, mean_pixels_value.value) | |
247 | |
248 def testComputeMeanPixelsApproximatedWithNotEnoughFrames(self): | |
249 stats = _MockRenderingStats( | |
250 frame_timestamps=self.not_enough_frames_timestamps, | |
251 approximated_pixel_percentages=[[10, 20], [30, 40, 50]]) | |
252 mean_pixels_value = self.metric._ComputeMeanPixelsApproximated( | |
253 self.page, stats) | |
254 self.assertEquals(None, mean_pixels_value.value) | |
255 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
256 mean_pixels_value.none_value_reason) | |
257 | |
258 def testComputeMeanPixelsCheckerboarded(self): | |
259 stats = _MockRenderingStats( | |
260 frame_timestamps=self.good_timestamps, | |
261 checkerboarded_pixel_percentages=[[10, 20], [30, 40, 50]]) | |
262 mean_pixels_value = self.metric._ComputeMeanPixelsCheckerboarded( | |
263 self.page, stats) | |
264 self.assertEquals(30, mean_pixels_value.value) | |
265 | |
266 def testComputeMeanPixelsCheckerboardedWithNotEnoughFrames(self): | |
267 stats = _MockRenderingStats( | |
268 frame_timestamps=self.not_enough_frames_timestamps, | |
269 checkerboarded_pixel_percentages=[[10, 20], [30, 40, 50]]) | |
270 mean_pixels_value = self.metric._ComputeMeanPixelsCheckerboarded( | |
271 self.page, stats) | |
272 self.assertEquals(None, mean_pixels_value.value) | |
273 self.assertEquals(smoothness.NOT_ENOUGH_FRAMES_MESSAGE, | |
274 mean_pixels_value.none_value_reason) | |
275 | |
276 def testComputeMeanPixelsCheckerboardedWithNoData(self): | |
277 stats = _MockRenderingStats( | |
278 frame_timestamps=self.good_timestamps, | |
279 checkerboarded_pixel_percentages=None) | |
280 stats.errors[rendering_stats.CHECKERBOARDED_PIXEL_ERROR] = 'test error' | |
281 mean_pixels_value = self.metric._ComputeMeanPixelsCheckerboarded( | |
282 self.page, stats) | |
283 self.assertEquals(None, mean_pixels_value.value) | |
284 self.assertEquals('test error', | |
285 mean_pixels_value.none_value_reason) | |
OLD | NEW |