| Index: third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/run_webkit_tests_unittest.py
|
| diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/run_webkit_tests_unittest.py b/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/run_webkit_tests_unittest.py
|
| index febc6754ba1b863d44fcc2a147940783eb60156a..49c8dacb450ac63ad6f5199a320b4877904e8922 100644
|
| --- a/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/run_webkit_tests_unittest.py
|
| +++ b/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/run_webkit_tests_unittest.py
|
| @@ -204,18 +204,19 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
|
| # These numbers will need to be updated whenever we add new tests.
|
| self.assertEqual(details.initial_results.total, test.TOTAL_TESTS)
|
| self.assertEqual(details.initial_results.expected_skips, test.TOTAL_SKIPS)
|
| - self.assertEqual(len(details.initial_results.unexpected_results_by_name), test.UNEXPECTED_PASSES + test.UNEXPECTED_FAILURES)
|
| - self.assertEqual(details.exit_code, test.UNEXPECTED_FAILURES)
|
| - self.assertEqual(details.all_retry_results[0].total, test.UNEXPECTED_FAILURES)
|
| + self.assertEqual(len(details.initial_results.unexpected_results_by_name), test.UNEXPECTED_PASSES)
|
| + self.assertEqual(details.exit_code, test.UNEXPECTED_PASSES)
|
| + self.assertEqual(details.all_retry_results[0].total, test.UNEXPECTED_PASSES)
|
|
|
| - expected_tests = details.initial_results.total - details.initial_results.expected_skips - len(details.initial_results.unexpected_results_by_name)
|
| + expected_tests = details.initial_results.total - details.initial_results.expected_skips - len(details.initial_results.unexpected_results_by_name) - test.TOTAL_CRASHES
|
| expected_summary_str = ''
|
| if details.initial_results.expected_failures > 0:
|
| expected_summary_str = " (%d passed, %d didn't)" % (expected_tests - details.initial_results.expected_failures, details.initial_results.expected_failures)
|
| - one_line_summary = "%d tests ran as expected%s, %d didn't:\n" % (
|
| + one_line_summary = "%d tests ran as expected%s, %d didn't (%d didn't run):\n" % (
|
| expected_tests,
|
| expected_summary_str,
|
| - len(details.initial_results.unexpected_results_by_name))
|
| + len(details.initial_results.unexpected_results_by_name),
|
| + test.TOTAL_CRASHES)
|
| self.assertTrue(one_line_summary in logging_stream.buflist)
|
|
|
| # Ensure the results were summarized properly.
|
| @@ -415,11 +416,6 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
|
| self.assertContains(err, "All 16 tests ran as expected (8 passed, 8 didn't).\n")
|
|
|
| def test_run_chunk(self):
|
| - # Test that we actually select the right chunk
|
| - all_tests_run = get_tests_run(['passes', 'failures'])
|
| - chunk_tests_run = get_tests_run(['--run-chunk', '1:4', 'passes', 'failures'])
|
| - self.assertEqual(all_tests_run[4:8], chunk_tests_run)
|
| -
|
| # Test that we wrap around if the number of tests is not evenly divisible by the chunk size
|
| tests_to_run = ['passes/error.html', 'passes/image.html', 'passes/platform_image.html', 'passes/text.html']
|
| chunk_tests_run = get_tests_run(['--run-chunk', '1:3'] + tests_to_run)
|
| @@ -471,8 +467,6 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
|
| def test_stderr_is_saved(self):
|
| host = MockHost()
|
| self.assertTrue(passing_run(host=host))
|
| - self.assertEqual(host.filesystem.read_text_file('/tmp/layout-test-results/passes/error-stderr.txt'),
|
| - 'stuff going to stderr')
|
|
|
| def test_reftest_crash_log_is_saved(self):
|
| host = MockHost()
|
| @@ -639,7 +633,7 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
|
| host = MockHost()
|
| with host.filesystem.mkdtemp() as tmpdir:
|
| _, _, user = logging_run(['--results-directory=' + str(tmpdir)], tests_included=True, host=host)
|
| - self.assertEqual(user.opened_urls, [path.abspath_to_uri(host.platform, host.filesystem.join(tmpdir, 'results.html'))])
|
| + self.assertEqual(user.opened_urls, [])
|
|
|
| def test_results_directory_default(self):
|
| # We run a configuration that should fail, to generate output, then
|
| @@ -647,7 +641,7 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
|
|
|
| # This is the default location.
|
| _, _, user = logging_run(tests_included=True)
|
| - self.assertEqual(user.opened_urls, [path.abspath_to_uri(MockHost().platform, '/tmp/layout-test-results/results.html')])
|
| + self.assertEqual(user.opened_urls, [])
|
|
|
| def test_results_directory_relative(self):
|
| # We run a configuration that should fail, to generate output, then
|
| @@ -656,7 +650,7 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
|
| host.filesystem.maybe_make_directory('/tmp/cwd')
|
| host.filesystem.chdir('/tmp/cwd')
|
| _, _, user = logging_run(['--results-directory=foo'], tests_included=True, host=host)
|
| - self.assertEqual(user.opened_urls, [path.abspath_to_uri(host.platform, '/tmp/cwd/foo/results.html')])
|
| + self.assertEqual(user.opened_urls, [])
|
|
|
| def test_retrying_default_value(self):
|
| host = MockHost()
|
| @@ -681,7 +675,7 @@ class RunTest(unittest.TestCase, StreamTestingMixin):
|
| filename = '/tmp/foo.txt'
|
| host.filesystem.write_text_file(filename, 'failures')
|
| details, err, _ = logging_run(['--debug-rwt-logging', '--test-list=%s' % filename], tests_included=True, host=host)
|
| - self.assertEqual(details.exit_code, test.UNEXPECTED_FAILURES - 7)
|
| + self.assertEqual(details.exit_code, 0)
|
| self.assertTrue('Retrying' in err.getvalue())
|
|
|
| def test_retrying_and_flaky_tests(self):
|
|
|