| OLD | NEW |
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import json | 5 import json |
| 6 import logging | |
| 7 import mock | |
| 8 import os | 6 import os |
| 7 import shutil |
| 9 import tempfile | 8 import tempfile |
| 10 import unittest | 9 import unittest |
| 11 | 10 |
| 12 from telemetry.testing import fakes | |
| 13 from telemetry.testing import browser_test_runner | 11 from telemetry.testing import browser_test_runner |
| 14 | 12 |
| 15 import gpu_project_config | 13 from gpu_tests import path_util |
| 16 | 14 |
| 17 from gpu_tests import gpu_integration_test | 15 path_util.AddDirToPathIfNeeded(path_util.GetChromiumSrcDir(), 'tools', 'perf') |
| 18 from gpu_tests import gpu_test_expectations | 16 from chrome_telemetry_build import chromium_config |
| 19 | |
| 20 class SimpleIntegrationUnittest(gpu_integration_test.GpuIntegrationTest): | |
| 21 # Must be class-scoped since instances aren't reused across runs. | |
| 22 _num_flaky_runs_to_fail = 2 | |
| 23 | |
| 24 _num_browser_starts = 0 | |
| 25 | |
| 26 @classmethod | |
| 27 def Name(cls): | |
| 28 return 'simple_integration_unittest' | |
| 29 | |
| 30 def setUp(self): | |
| 31 super(SimpleIntegrationUnittest, self).setUp() | |
| 32 | |
| 33 @classmethod | |
| 34 def setUpClass(cls): | |
| 35 finder_options = fakes.CreateBrowserFinderOptions() | |
| 36 finder_options.browser_options.platform = fakes.FakeLinuxPlatform() | |
| 37 finder_options.output_formats = ['none'] | |
| 38 finder_options.suppress_gtest_report = True | |
| 39 finder_options.output_dir = None | |
| 40 finder_options.upload_bucket = 'public' | |
| 41 finder_options.upload_results = False | |
| 42 cls._finder_options = finder_options | |
| 43 cls.platform = None | |
| 44 cls.browser = None | |
| 45 cls.SetBrowserOptions(cls._finder_options) | |
| 46 cls.StartBrowser() | |
| 47 | |
| 48 @classmethod | |
| 49 def GenerateGpuTests(cls, options): | |
| 50 yield ('expected_failure', 'failure.html', ()) | |
| 51 yield ('expected_flaky', 'flaky.html', ()) | |
| 52 yield ('expected_skip', 'failure.html', ()) | |
| 53 yield ('unexpected_failure', 'failure.html', ()) | |
| 54 yield ('unexpected_error', 'error.html', ()) | |
| 55 | |
| 56 @classmethod | |
| 57 def _CreateExpectations(cls): | |
| 58 expectations = gpu_test_expectations.GpuTestExpectations() | |
| 59 expectations.Fail('expected_failure') | |
| 60 expectations.Flaky('expected_flaky', max_num_retries=3) | |
| 61 expectations.Skip('expected_skip') | |
| 62 return expectations | |
| 63 | |
| 64 @classmethod | |
| 65 def StartBrowser(cls): | |
| 66 super(SimpleIntegrationUnittest, cls).StartBrowser() | |
| 67 cls._num_browser_starts += 1 | |
| 68 | |
| 69 def RunActualGpuTest(self, file_path, *args): | |
| 70 logging.warn('Running ' + file_path) | |
| 71 if file_path == 'failure.html': | |
| 72 self.fail('Expected failure') | |
| 73 elif file_path == 'flaky.html': | |
| 74 if self.__class__._num_flaky_runs_to_fail > 0: | |
| 75 self.__class__._num_flaky_runs_to_fail -= 1 | |
| 76 self.fail('Expected flaky failure') | |
| 77 elif file_path == 'error.html': | |
| 78 raise Exception('Expected exception') | |
| 79 | |
| 80 | |
| 81 class BrowserStartFailureIntegrationUnittest( | |
| 82 gpu_integration_test.GpuIntegrationTest): | |
| 83 | |
| 84 _num_browser_crashes = 0 | |
| 85 _num_browser_starts = 0 | |
| 86 | |
| 87 @classmethod | |
| 88 def setUpClass(cls): | |
| 89 cls._fake_browser_options = \ | |
| 90 fakes.CreateBrowserFinderOptions(execute_on_startup=cls.CrashOnStart) | |
| 91 cls._fake_browser_options.browser_options.platform = \ | |
| 92 fakes.FakeLinuxPlatform() | |
| 93 cls._fake_browser_options.output_formats = ['none'] | |
| 94 cls._fake_browser_options.suppress_gtest_report = True | |
| 95 cls._fake_browser_options.output_dir = None | |
| 96 cls._fake_browser_options .upload_bucket = 'public' | |
| 97 cls._fake_browser_options .upload_results = False | |
| 98 cls._finder_options = cls._fake_browser_options | |
| 99 cls.platform = None | |
| 100 cls.browser = None | |
| 101 cls.SetBrowserOptions(cls._finder_options) | |
| 102 cls.StartBrowser() | |
| 103 | |
| 104 @classmethod | |
| 105 def _CreateExpectations(cls): | |
| 106 return gpu_test_expectations.GpuTestExpectations() | |
| 107 | |
| 108 @classmethod | |
| 109 def CrashOnStart(cls): | |
| 110 cls._num_browser_starts += 1 | |
| 111 if cls._num_browser_crashes < 2: | |
| 112 cls._num_browser_crashes += 1 | |
| 113 raise | |
| 114 | |
| 115 @classmethod | |
| 116 def Name(cls): | |
| 117 return 'browser_start_failure_integration_unittest' | |
| 118 | |
| 119 @classmethod | |
| 120 def GenerateGpuTests(cls, options): | |
| 121 # This test causes the browser to try and restart the browser 3 times. | |
| 122 yield ('restart', 'restart.html', ()) | |
| 123 | |
| 124 def RunActualGpuTest(self, file_path, *args): | |
| 125 # The logic of this test is run when the browser starts, it fails twice | |
| 126 # and then succeeds on the third time so we are just testing that this | |
| 127 # is successful based on the parameters. | |
| 128 pass | |
| 129 | |
| 130 | |
| 131 class BrowserCrashAfterStartIntegrationUnittest( | |
| 132 gpu_integration_test.GpuIntegrationTest): | |
| 133 | |
| 134 _num_browser_crashes = 0 | |
| 135 _num_browser_starts = 0 | |
| 136 | |
| 137 @classmethod | |
| 138 def setUpClass(cls): | |
| 139 cls._fake_browser_options = fakes.CreateBrowserFinderOptions( | |
| 140 execute_after_browser_creation=cls.CrashAfterStart) | |
| 141 cls._fake_browser_options.browser_options.platform = \ | |
| 142 fakes.FakeLinuxPlatform() | |
| 143 cls._fake_browser_options.output_formats = ['none'] | |
| 144 cls._fake_browser_options.suppress_gtest_report = True | |
| 145 cls._fake_browser_options.output_dir = None | |
| 146 cls._fake_browser_options .upload_bucket = 'public' | |
| 147 cls._fake_browser_options .upload_results = False | |
| 148 cls._finder_options = cls._fake_browser_options | |
| 149 cls.platform = None | |
| 150 cls.browser = None | |
| 151 cls.SetBrowserOptions(cls._finder_options) | |
| 152 cls.StartBrowser() | |
| 153 | |
| 154 @classmethod | |
| 155 def _CreateExpectations(cls): | |
| 156 return gpu_test_expectations.GpuTestExpectations() | |
| 157 | |
| 158 @classmethod | |
| 159 def CrashAfterStart(cls, browser): | |
| 160 cls._num_browser_starts += 1 | |
| 161 if cls._num_browser_crashes < 2: | |
| 162 cls._num_browser_crashes += 1 | |
| 163 # This simulates the first tab's renderer process crashing upon | |
| 164 # startup. The try/catch forces the GpuIntegrationTest's first | |
| 165 # fetch of this tab to fail. crbug.com/682819 | |
| 166 try: | |
| 167 browser.tabs[0].Navigate('chrome://crash') | |
| 168 except Exception: | |
| 169 pass | |
| 170 | |
| 171 @classmethod | |
| 172 def Name(cls): | |
| 173 return 'browser_crash_after_start_integration_unittest' | |
| 174 | |
| 175 @classmethod | |
| 176 def GenerateGpuTests(cls, options): | |
| 177 # This test causes the browser to try and restart the browser 3 times. | |
| 178 yield ('restart', 'restart.html', ()) | |
| 179 | |
| 180 def RunActualGpuTest(self, file_path, *args): | |
| 181 # The logic of this test is run when the browser starts, it fails twice | |
| 182 # and then succeeds on the third time so we are just testing that this | |
| 183 # is successful based on the parameters. | |
| 184 pass | |
| 185 | 17 |
| 186 | 18 |
| 187 class GpuIntegrationTestUnittest(unittest.TestCase): | 19 class GpuIntegrationTestUnittest(unittest.TestCase): |
| 188 @mock.patch('telemetry.internal.util.binary_manager.InitDependencyManager') | 20 def setUp(self): |
| 189 def testSimpleIntegrationUnittest(self, mockInitDependencyManager): | 21 self._test_state = {} |
| 22 |
| 23 def testSimpleIntegrationTest(self): |
| 190 self._RunIntegrationTest( | 24 self._RunIntegrationTest( |
| 191 'simple_integration_unittest', [ | 25 'simple_integration_unittest', |
| 192 'unexpected_error', | 26 ['unittest_data.integration_tests.SimpleTest.unexpected_error', |
| 193 'unexpected_failure' | 27 'unittest_data.integration_tests.SimpleTest.unexpected_failure'], |
| 194 ], [ | 28 ['unittest_data.integration_tests.SimpleTest.expected_flaky', |
| 195 'expected_failure', | 29 'unittest_data.integration_tests.SimpleTest.expected_failure'], |
| 196 'expected_flaky', | 30 ['unittest_data.integration_tests.SimpleTest.expected_skip']) |
| 197 ]) | |
| 198 # It might be nice to be more precise about the order of operations | 31 # It might be nice to be more precise about the order of operations |
| 199 # with these browser restarts, but this is at least a start. | 32 # with these browser restarts, but this is at least a start. |
| 200 self.assertEquals(SimpleIntegrationUnittest._num_browser_starts, 6) | 33 self.assertEquals(self._test_state['num_browser_starts'], 6) |
| 201 | 34 |
| 202 @mock.patch('telemetry.internal.util.binary_manager.InitDependencyManager') | 35 def testIntegrationTesttWithBrowserFailure(self): |
| 203 def testIntegrationUnittestWithBrowserFailure( | |
| 204 self, mockInitDependencyManager): | |
| 205 self._RunIntegrationTest( | 36 self._RunIntegrationTest( |
| 206 'browser_start_failure_integration_unittest', [], ['restart']) | 37 'browser_start_failure_integration_unittest', [], |
| 207 self.assertEquals( \ | 38 ['unittest_data.integration_tests.BrowserStartFailureTest.restart'], |
| 208 BrowserStartFailureIntegrationUnittest._num_browser_crashes, 2) | 39 []) |
| 209 self.assertEquals( \ | 40 self.assertEquals(self._test_state['num_browser_crashes'], 2) |
| 210 BrowserStartFailureIntegrationUnittest._num_browser_starts, 3) | 41 self.assertEquals(self._test_state['num_browser_starts'], 3) |
| 211 | 42 |
| 212 @mock.patch('telemetry.internal.util.binary_manager.InitDependencyManager') | 43 def testIntegrationTestWithBrowserCrashUponStart(self): |
| 213 def testIntegrationUnittestWithBrowserCrashUponStart( | |
| 214 self, mockInitDependencyManager): | |
| 215 self._RunIntegrationTest( | 44 self._RunIntegrationTest( |
| 216 'browser_crash_after_start_integration_unittest', [], ['restart']) | 45 'browser_crash_after_start_integration_unittest', [], |
| 217 self.assertEquals( \ | 46 [('unittest_data.integration_tests.BrowserCrashAfterStartTest.restart')], |
| 218 BrowserCrashAfterStartIntegrationUnittest._num_browser_crashes, 2) | 47 []) |
| 219 self.assertEquals( \ | 48 self.assertEquals(self._test_state['num_browser_crashes'], 2) |
| 220 BrowserCrashAfterStartIntegrationUnittest._num_browser_starts, 3) | 49 self.assertEquals(self._test_state['num_browser_starts'], 3) |
| 221 | 50 |
| 222 def _RunIntegrationTest(self, test_name, failures, successes): | 51 def _RunIntegrationTest(self, test_name, failures, successes, skips): |
| 223 options = browser_test_runner.TestRunOptions() | 52 config = chromium_config.ChromiumConfig( |
| 224 # Suppress printing out information for passing tests. | 53 top_level_dir=path_util.GetGpuTestDir(), |
| 225 options.verbosity = 0 | 54 benchmark_dirs=[ |
| 226 config = gpu_project_config.CONFIG | 55 os.path.join(path_util.GetGpuTestDir(), 'unittest_data')]) |
| 227 temp_file = tempfile.NamedTemporaryFile(delete=False) | 56 temp_dir = tempfile.mkdtemp() |
| 228 temp_file.close() | 57 test_results_path = os.path.join(temp_dir, 'test_results.json') |
| 229 temp_file_name = temp_file.name | 58 test_state_path = os.path.join(temp_dir, 'test_state.json') |
| 230 try: | 59 try: |
| 231 browser_test_runner.Run( | 60 browser_test_runner.Run( |
| 232 config, options, | 61 config, |
| 233 [test_name, | 62 [test_name, |
| 234 '--write-abbreviated-json-results-to=%s' % temp_file_name]) | 63 '--write-full-results-to=%s' % test_results_path, |
| 235 with open(temp_file_name) as f: | 64 '--test-state-json-path=%s' % test_state_path]) |
| 65 with open(test_results_path) as f: |
| 236 test_result = json.load(f) | 66 test_result = json.load(f) |
| 237 self.assertEquals(test_result['failures'], failures) | 67 with open(test_state_path) as f: |
| 238 self.assertEquals(test_result['successes'], successes) | 68 self._test_state = json.load(f) |
| 239 self.assertEquals(test_result['valid'], True) | 69 actual_successes, actual_failures, actual_skips = ( |
| 70 self._ExtracTestResults(test_result)) |
| 71 self.assertEquals(actual_failures, failures) |
| 72 self.assertEquals(actual_successes, successes) |
| 73 self.assertEquals(actual_skips, skips) |
| 74 finally: |
| 75 shutil.rmtree(temp_dir) |
| 240 | 76 |
| 241 finally: | 77 def _ExtracTestResults(self, test_result): |
| 242 os.remove(temp_file_name) | 78 delimiter = test_result['path_delimiter'] |
| 79 failures = [] |
| 80 successes = [] |
| 81 skips = [] |
| 82 def _IsLeafNode(node): |
| 83 test_dict = node[1] |
| 84 return ('expected' in test_dict and |
| 85 isinstance(test_dict['expected'], basestring)) |
| 86 node_queues = [] |
| 87 for t in test_result['tests']: |
| 88 node_queues.append((t, test_result['tests'][t])) |
| 89 while node_queues: |
| 90 node = node_queues.pop() |
| 91 full_test_name, test_dict = node |
| 92 if _IsLeafNode(node): |
| 93 if all(res not in test_dict['expected'].split() for res in |
| 94 test_dict['actual'].split()): |
| 95 failures.append(full_test_name) |
| 96 elif test_dict['expected'] == test_dict['actual'] == 'SKIP': |
| 97 skips.append(full_test_name) |
| 98 else: |
| 99 successes.append(full_test_name) |
| 100 else: |
| 101 for k in test_dict: |
| 102 node_queues.append( |
| 103 ('%s%s%s' % (full_test_name, delimiter, k), |
| 104 test_dict[k])) |
| 105 return successes, failures, skips |
| 106 |
| OLD | NEW |