Index: tools/telemetry/telemetry/web_perf/metrics/smoothness_unittest.py |
diff --git a/tools/telemetry/telemetry/web_perf/metrics/smoothness_unittest.py b/tools/telemetry/telemetry/web_perf/metrics/smoothness_unittest.py |
new file mode 100644 |
index 0000000000000000000000000000000000000000..a71ce768cfc870b8094bc6221cb44461b3fd0ad4 |
--- /dev/null |
+++ b/tools/telemetry/telemetry/web_perf/metrics/smoothness_unittest.py |
@@ -0,0 +1,167 @@ |
+# Copyright 2014 The Chromium Authors. All rights reserved. |
+# Use of this source code is governed by a BSD-style license that can be |
+# found in the LICENSE file. |
+ |
+from collections import namedtuple |
+import unittest |
+ |
+from telemetry.results import page_test_results |
+from telemetry.page import page as page_module |
+from telemetry.web_perf.metrics import smoothness |
+ |
+NOT_ENOUGH_FRAMES_MESSAGE = 'Not enough frames for smoothness metrics.' |
+ |
+class SmoothnessMetricUnitTest(unittest.TestCase): |
+ |
+ def setUp(self): |
+ self.metric = smoothness.SmoothnessMetric() |
+ self.page = page_module.Page('file://blank.html') |
+ |
+ MockRenderingStats = namedtuple('MockRenderingStats', [ |
+ 'frame_timestamps', 'frame_times', 'paint_times', |
+ 'painted_pixel_counts', 'record_times', 'recorded_pixel_counts', |
+ 'rasterize_times', 'rasterized_pixel_counts', |
+ 'approximated_pixel_percentages', 'input_event_latency', |
+ 'frame_queueing_durations', 'scroll_update_latency']) |
+ self.stats = MockRenderingStats( |
+ frame_timestamps = [[800, 816.7, 848, 860.9, 883.0], |
+ [900, 917.3, 934.1, 952.7]], |
+ frame_times = [[16.7, 31.3, 12.9, 22.1], [17.3, 16.8, 18.6]], |
+ paint_times = [[53.355, 0.58, 0.213, 2.84], [4.54, 0.0, 6.1]], |
+ painted_pixel_counts = [[7565696, 3256, 325632], [0, 921464, 4138543]], |
+ record_times = [[10, 20], [30, 40, 50]], |
+ recorded_pixel_counts = [[10, 20], [30, 40, 50]], |
+ rasterize_times = [[10, 20], [30, 40, 50]], |
+ rasterized_pixel_counts = [[10, 20], [30, 40, 50]], |
+ approximated_pixel_percentages = [[10, 20], [30, 40, 50]], |
+ input_event_latency = [[10, 20], [30, 40, 50]], |
+ frame_queueing_durations = [[10, 20], [30, 40, 50]], |
+ scroll_update_latency = [[10, 20], [30, 40, 50]] |
+ ) |
+ |
+ def testPopulateResultsFromStats(self): |
+ results = page_test_results.PageTestResults() |
+ results.WillRunPage(self.page) |
+ self.metric.PopulateResultsFromStats(results, self.stats) |
+ current_page_run = results.current_page_run |
+ self.assertTrue(current_page_run.ok) |
+ self.assertEquals(10, len(current_page_run.values)) |
+ |
+ def testGetNoneValueReason(self): |
+ # This list will pass since every sub-array has at least 2 frames. |
+ none_value_reason = self.metric.GetNoneValueReason( |
+ self.stats.frame_timestamps) |
+ self.assertEquals(None, none_value_reason) |
+ |
+ def testGetNoneValueReasonNotEnoughFrames(self): |
+ # This list will fail since the first sub-array only has a single frame. |
+ list_of_frame_timestamp_lists = [[10], [30, 40, 50]] |
+ none_value_reason = self.metric.GetNoneValueReason( |
+ list_of_frame_timestamp_lists) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, none_value_reason) |
+ |
+ def testGetLatencyMetricValues(self): |
+ mean_value, discrepancy_value = self.metric.GetLatencyMetricValues( |
+ self.page, '', self.stats.input_event_latency, None) |
+ self.assertEquals(30, mean_value.value) |
+ self.assertIsNone(mean_value.none_value_reason) |
+ self.assertEquals(60, discrepancy_value.value) |
+ self.assertIsNone(discrepancy_value.none_value_reason) |
+ |
+ def testGetLatencyMetricValuesMissingData(self): |
+ list_of_latency_lists = [[], []] |
+ mean_value, discrepancy_value = self.metric.GetLatencyMetricValues( |
+ self.page, '', list_of_latency_lists, None) |
+ self.assertEquals(None, mean_value.value) |
+ self.assertEquals('No latency values recorded.', |
+ mean_value.none_value_reason) |
+ self.assertEquals(None, discrepancy_value.value) |
+ self.assertEquals('No latency values recorded.', |
+ discrepancy_value.none_value_reason) |
+ |
+ def testGetLatencyMetricValuesNotEnoughFrames(self): |
+ mean_value, discrepancy_value = self.metric.GetLatencyMetricValues( |
+ self.page, '', self.stats.input_event_latency, |
+ NOT_ENOUGH_FRAMES_MESSAGE) |
+ self.assertEquals(None, mean_value.value) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, |
+ mean_value.none_value_reason) |
+ self.assertEquals(None, discrepancy_value.value) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, |
+ discrepancy_value.none_value_reason) |
+ |
+ def testGetQueueingDurationsValue(self): |
+ list_of_scalar_values = self.metric.GetQueueingDurationValue( |
+ self.page, self.stats.frame_queueing_durations, None) |
+ self.assertEquals([10, 20, 30, 40, 50], list_of_scalar_values.values ) |
+ self.assertIsNone(list_of_scalar_values.none_value_reason) |
+ |
+ def testGetQueueingDurationsValueMissingData(self): |
+ list_of_queueing_durations_lists = [[], []] |
+ list_of_scalar_values = self.metric.GetQueueingDurationValue( |
+ self.page, list_of_queueing_durations_lists, None) |
+ self.assertEquals(None, list_of_scalar_values.values) |
+ self.assertEquals('Queueing delay metric is unsupported.', |
+ list_of_scalar_values.none_value_reason) |
+ |
+ def testGetFrameDurationsValueWithNotEnoughFrames(self): |
+ list_of_scalar_values = self.metric.GetQueueingDurationValue( |
+ self.page, self.stats.frame_queueing_durations, |
+ NOT_ENOUGH_FRAMES_MESSAGE) |
+ self.assertEquals(None, list_of_scalar_values.values) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, |
+ list_of_scalar_values.none_value_reason) |
+ |
+ def testGetFrameTimeMetricValues(self): |
+ frame_times_value, mean_frame_time_value, mostly_smooth_value = ( |
+ self.metric.GetFrameTimeMetricValues(self.page, |
+ self.stats.frame_times, None)) |
+ self.assertEquals([16.7, 31.3, 12.9, 22.1, 17.3, 16.8, 18.6], |
+ frame_times_value.values ) |
+ self.assertIsNone(frame_times_value.none_value_reason) |
+ self.assertEquals(19.386, mean_frame_time_value.value) |
+ self.assertIsNone(mean_frame_time_value.none_value_reason) |
+ self.assertEquals(0, mostly_smooth_value.value ) |
+ self.assertIsNone(mostly_smooth_value.none_value_reason) |
+ |
+ def testGetFrameTimeMetricValuesWithNotEnoughFrames(self): |
+ frame_times_value, mean_frame_time_value, mostly_smooth_value = ( |
+ self.metric.GetFrameTimeMetricValues(self.page, |
+ self.stats.frame_times, |
+ NOT_ENOUGH_FRAMES_MESSAGE)) |
+ self.assertEquals(None, frame_times_value.values ) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, |
+ frame_times_value.none_value_reason) |
+ self.assertEquals(None, mean_frame_time_value.value) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, |
+ mean_frame_time_value.none_value_reason) |
+ self.assertEquals(None, mostly_smooth_value.value ) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, |
+ mostly_smooth_value.none_value_reason) |
+ |
+ def testGetFrameTimeDiscrepancyValue(self): |
+ jank_value = self.metric.GetFrameTimeDiscrepancyValue( |
+ self.page, self.stats.frame_timestamps, None) |
+ self.assertEquals(31.3, jank_value.value) |
+ self.assertIsNone(jank_value.none_value_reason) |
+ |
+ def testGetFrameTimeDiscrepancyValueWithNotEnoughFrames(self): |
+ jank_value = self.metric.GetFrameTimeDiscrepancyValue( |
+ self.page, self.stats.frame_timestamps, NOT_ENOUGH_FRAMES_MESSAGE) |
+ self.assertEquals(None, jank_value.value) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, |
+ jank_value.none_value_reason) |
+ |
+ def testGetMeanPixelsApproximatedValue(self): |
+ mean_pixels_value = self.metric.GetMeanPixelsApproximatedValue( |
+ self.page, self.stats.approximated_pixel_percentages, None) |
+ self.assertEquals(30, mean_pixels_value.value) |
+ self.assertIsNone(mean_pixels_value.none_value_reason) |
+ |
+ def testGetMeanPixelsApproximatedValueWithNotEnoughFrames(self): |
+ mean_pixels_value = self.metric.GetMeanPixelsApproximatedValue( |
+ self.page, self.stats.approximated_pixel_percentages, |
+ NOT_ENOUGH_FRAMES_MESSAGE) |
+ self.assertEquals(None, mean_pixels_value.value) |
+ self.assertEquals(NOT_ENOUGH_FRAMES_MESSAGE, |
+ mean_pixels_value.none_value_reason) |