| Index: tools/telemetry/telemetry/web_perf/metrics/startup_unittest.py
|
| diff --git a/tools/telemetry/telemetry/web_perf/metrics/startup_unittest.py b/tools/telemetry/telemetry/web_perf/metrics/startup_unittest.py
|
| new file mode 100644
|
| index 0000000000000000000000000000000000000000..a6013340d3846fa8497cf0f882646cca6640768c
|
| --- /dev/null
|
| +++ b/tools/telemetry/telemetry/web_perf/metrics/startup_unittest.py
|
| @@ -0,0 +1,94 @@
|
| +# Copyright 2015 The Chromium Authors. All rights reserved.
|
| +# Use of this source code is governed by a BSD-style license that can be
|
| +# found in the LICENSE file.
|
| +
|
| +import unittest
|
| +
|
| +import telemetry.timeline.event as timeline_event
|
| +from telemetry.testing import test_page_test_results
|
| +from telemetry.web_perf.metrics import startup
|
| +
|
| +
|
| +class StartupTimelineMetricTest(unittest.TestCase):
|
| +
|
| + def setUp(self):
|
| + self.events = []
|
| +
|
| + def AddEvent(self, event_name, start, duration=None):
|
| + event = timeline_event.TimelineEvent('my_category', event_name,
|
| + start, duration)
|
| + self.events.append(event)
|
| +
|
| + # Attributes defined outside __init__
|
| + # pylint: disable=W0201
|
| + def ComputeStartupMetrics(self):
|
| + results = test_page_test_results.TestPageTestResults(self)
|
| +
|
| + # Create a mock model usable by
|
| + # StartupTimelineMetric.AddWholeTraceResults().
|
| + def IterateEvents(event_predicate):
|
| + for event in self.events:
|
| + if event_predicate(event):
|
| + yield event
|
| + class MockClass(object):
|
| + pass
|
| + model = MockClass()
|
| + model.browser_process = MockClass()
|
| + model.browser_process.parent = MockClass()
|
| + model.browser_process.parent.IterAllEvents = IterateEvents
|
| +
|
| + startup.StartupTimelineMetric().AddWholeTraceResults(model, results)
|
| + return results
|
| +
|
| + def testUntrackedvents(self):
|
| + # Code coverage for untracked events
|
| + self.AddEvent('uknown_event_0', 0)
|
| + self.AddEvent('uknown_event_1', 1)
|
| + self.ComputeStartupMetrics()
|
| +
|
| + def testInstantEventsBasedValue(self):
|
| + # Test case with instant events to measure the duration between the first
|
| + # occurrences of two distinct events.
|
| + START0 = 7
|
| + START1 = 8
|
| + DURATION0 = 17
|
| + DURATION1 = 18
|
| +
|
| + # Generate duplicated events to make sure we consider only the first one.
|
| + self.AddEvent(startup._MAIN_ENTRY_POINT, START0)
|
| + self.AddEvent(startup._MAIN_ENTRY_POINT, START1)
|
| + self.AddEvent('loadEventEnd', START0 + DURATION0)
|
| + self.AddEvent('loadEventEnd', START1 + DURATION1)
|
| +
|
| + results = self.ComputeStartupMetrics()
|
| + results.AssertHasPageSpecificScalarValue('foreground_tab_load_complete',
|
| + 'ms', DURATION0)
|
| +
|
| + def testBeginEndEventsBasedValue(self):
|
| + # Test case to get the duration of the first occurrence of a duration event.
|
| + i = 1
|
| + for event_names in startup._METRICS.values():
|
| + if len(event_names) != 1:
|
| + continue
|
| +
|
| + duration = 13 * i
|
| + i += 1
|
| +
|
| + # Generate duplicated events to make sure only the first event is
|
| + # considered.
|
| + self.AddEvent(event_names[0], 5, duration)
|
| + self.AddEvent(event_names[0], 6, duration + 2)
|
| +
|
| + self.assertTrue(i > 1)
|
| +
|
| + results = self.ComputeStartupMetrics()
|
| +
|
| + i = 1
|
| + for display_name, event_names in startup._METRICS.iteritems():
|
| + if len(event_names) != 1:
|
| + continue
|
| +
|
| + duration = 13 * i
|
| + i += 1
|
| +
|
| + results.AssertHasPageSpecificScalarValue(display_name, 'ms', duration)
|
|
|