| Index: third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/models/test_run_results_unittest.py
|
| diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/models/test_run_results_unittest.py b/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/models/test_run_results_unittest.py
|
| index 9826b457d4e13beac9ae7178e31ec09765f55bb7..97348d168706567e0852151d3b01bd75e79b0362 100644
|
| --- a/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/models/test_run_results_unittest.py
|
| +++ b/third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/models/test_run_results_unittest.py
|
| @@ -53,8 +53,9 @@ def get_result(test_name, result_type=test_expectations.PASS, run_time=0):
|
|
|
|
|
| def run_results(port, extra_skipped_tests=[]):
|
| - tests = ['passes/text.html', 'failures/expected/timeout.html', 'failures/expected/crash.html', 'failures/expected/leak.html', 'failures/expected/keyboard.html',
|
| - 'failures/expected/audio.html', 'failures/expected/text.html', 'passes/skipped/skip.html']
|
| + tests = ['passes/text.html', 'failures/expected/timeout.html', 'failures/expected/crash.html', 'failures/expected/leak.html',
|
| + 'failures/expected/keyboard.html', 'failures/expected/audio.html', 'failures/expected/text.html',
|
| + 'passes/skipped/skip.html']
|
| expectations = test_expectations.TestExpectations(port, tests)
|
| if extra_skipped_tests:
|
| expectations.add_extra_skipped_tests(extra_skipped_tests)
|
| @@ -84,8 +85,18 @@ def summarized_results(port, expected, passing, flaky, only_include_failing=Fals
|
| initial_results.add(get_result('failures/expected/leak.html'), expected, test_is_slow)
|
| else:
|
| initial_results.add(get_result('passes/text.html', test_expectations.TIMEOUT, run_time=1), expected, test_is_slow)
|
| - initial_results.add(get_result('failures/expected/audio.html', test_expectations.CRASH, run_time=0.049), expected, test_is_slow)
|
| - initial_results.add(get_result('failures/expected/timeout.html', test_expectations.TEXT, run_time=0.05), expected, test_is_slow)
|
| + initial_results.add(
|
| + get_result('failures/expected/audio.html',
|
| + test_expectations.CRASH,
|
| + run_time=0.049),
|
| + expected,
|
| + test_is_slow)
|
| + initial_results.add(
|
| + get_result('failures/expected/timeout.html',
|
| + test_expectations.TEXT,
|
| + run_time=0.05),
|
| + expected,
|
| + test_is_slow)
|
| initial_results.add(get_result('failures/expected/crash.html', test_expectations.TIMEOUT), expected, test_is_slow)
|
| initial_results.add(get_result('failures/expected/leak.html', test_expectations.TIMEOUT), expected, test_is_slow)
|
|
|
| @@ -94,8 +105,7 @@ def summarized_results(port, expected, passing, flaky, only_include_failing=Fals
|
|
|
| initial_results.add(get_result('failures/expected/text.html', test_expectations.IMAGE), expected, test_is_slow)
|
|
|
| - all_retry_results = [run_results(port, extra_skipped_tests),
|
| - run_results(port, extra_skipped_tests),
|
| + all_retry_results = [run_results(port, extra_skipped_tests), run_results(port, extra_skipped_tests),
|
| run_results(port, extra_skipped_tests)]
|
|
|
| def add_result_to_all_retries(new_result, expected):
|
| @@ -104,51 +114,32 @@ def summarized_results(port, expected, passing, flaky, only_include_failing=Fals
|
|
|
| if flaky:
|
| add_result_to_all_retries(get_result('passes/text.html', test_expectations.PASS), True)
|
| - add_result_to_all_retries(
|
| - get_result('failures/expected/audio.html', test_expectations.AUDIO), True)
|
| - add_result_to_all_retries(
|
| - get_result('failures/expected/leak.html', test_expectations.LEAK), True)
|
| - add_result_to_all_retries(
|
| - get_result('failures/expected/timeout.html', test_expectations.AUDIO), True)
|
| -
|
| - all_retry_results[0].add(
|
| - get_result('failures/expected/crash.html', test_expectations.AUDIO),
|
| - False, test_is_slow)
|
| - all_retry_results[1].add(
|
| - get_result('failures/expected/crash.html', test_expectations.CRASH),
|
| - True, test_is_slow)
|
| - all_retry_results[2].add(
|
| - get_result('failures/expected/crash.html', test_expectations.LEAK),
|
| - False, test_is_slow)
|
| -
|
| - all_retry_results[0].add(
|
| - get_result('failures/expected/text.html', test_expectations.TEXT),
|
| - True, test_is_slow)
|
| + add_result_to_all_retries(get_result('failures/expected/audio.html', test_expectations.AUDIO), True)
|
| + add_result_to_all_retries(get_result('failures/expected/leak.html', test_expectations.LEAK), True)
|
| + add_result_to_all_retries(get_result('failures/expected/timeout.html', test_expectations.AUDIO), True)
|
| +
|
| + all_retry_results[0].add(get_result('failures/expected/crash.html', test_expectations.AUDIO), False, test_is_slow)
|
| + all_retry_results[1].add(get_result('failures/expected/crash.html', test_expectations.CRASH), True, test_is_slow)
|
| + all_retry_results[2].add(get_result('failures/expected/crash.html', test_expectations.LEAK), False, test_is_slow)
|
| +
|
| + all_retry_results[0].add(get_result('failures/expected/text.html', test_expectations.TEXT), True, test_is_slow)
|
|
|
| else:
|
| - add_result_to_all_retries(
|
| - get_result('passes/text.html', test_expectations.TIMEOUT), False)
|
| - add_result_to_all_retries(
|
| - get_result('failures/expected/audio.html', test_expectations.LEAK), False)
|
| - add_result_to_all_retries(
|
| - get_result('failures/expected/crash.html', test_expectations.TIMEOUT), False)
|
| - add_result_to_all_retries(
|
| - get_result('failures/expected/leak.html', test_expectations.TIMEOUT), False)
|
| -
|
| - all_retry_results[0].add(
|
| - get_result('failures/expected/timeout.html', test_expectations.AUDIO),
|
| - False, test_is_slow)
|
| - all_retry_results[1].add(
|
| - get_result('failures/expected/timeout.html', test_expectations.CRASH),
|
| - False, test_is_slow)
|
| - all_retry_results[2].add(
|
| - get_result('failures/expected/timeout.html', test_expectations.LEAK),
|
| - False, test_is_slow)
|
| -
|
| - return test_run_results.summarize_results(
|
| - port, initial_results.expectations, initial_results, all_retry_results,
|
| - enabled_pixel_tests_in_retry=False,
|
| - only_include_failing=only_include_failing)
|
| + add_result_to_all_retries(get_result('passes/text.html', test_expectations.TIMEOUT), False)
|
| + add_result_to_all_retries(get_result('failures/expected/audio.html', test_expectations.LEAK), False)
|
| + add_result_to_all_retries(get_result('failures/expected/crash.html', test_expectations.TIMEOUT), False)
|
| + add_result_to_all_retries(get_result('failures/expected/leak.html', test_expectations.TIMEOUT), False)
|
| +
|
| + all_retry_results[0].add(get_result('failures/expected/timeout.html', test_expectations.AUDIO), False, test_is_slow)
|
| + all_retry_results[1].add(get_result('failures/expected/timeout.html', test_expectations.CRASH), False, test_is_slow)
|
| + all_retry_results[2].add(get_result('failures/expected/timeout.html', test_expectations.LEAK), False, test_is_slow)
|
| +
|
| + return test_run_results.summarize_results(port,
|
| + initial_results.expectations,
|
| + initial_results,
|
| + all_retry_results,
|
| + enabled_pixel_tests_in_retry=False,
|
| + only_include_failing=only_include_failing)
|
|
|
|
|
| class InterpretTestFailuresTest(unittest.TestCase):
|
| @@ -157,7 +148,8 @@ class InterpretTestFailuresTest(unittest.TestCase):
|
| self.port = host.port_factory.get(port_name='test')
|
|
|
| def test_interpret_test_failures(self):
|
| - test_dict = test_run_results._interpret_test_failures([test_failures.FailureReftestMismatchDidNotOccur(self.port.abspath_for_test('foo/reftest-expected-mismatch.html'))])
|
| + test_dict = test_run_results._interpret_test_failures([test_failures.FailureReftestMismatchDidNotOccur(
|
| + self.port.abspath_for_test('foo/reftest-expected-mismatch.html'))])
|
| self.assertEqual(len(test_dict), 0)
|
|
|
| test_dict = test_run_results._interpret_test_failures([test_failures.FailureMissingAudio()])
|
| @@ -184,13 +176,58 @@ class SummarizedResultsTest(unittest.TestCase):
|
|
|
| def test_num_failures_by_type(self):
|
| summary = summarized_results(self.port, expected=False, passing=False, flaky=False)
|
| - self.assertEquals(summary['num_failures_by_type'], {'CRASH': 1, 'MISSING': 0, 'TEXT': 1, 'IMAGE': 1, 'NEEDSREBASELINE': 0, 'NEEDSMANUALREBASELINE': 0, 'PASS': 1, 'REBASELINE': 0, 'SKIP': 0, 'SLOW': 0, 'TIMEOUT': 3, 'IMAGE+TEXT': 0, 'LEAK': 0, 'FAIL': 0, 'AUDIO': 0, 'WONTFIX': 0})
|
| + self.assertEquals(summary['num_failures_by_type'], {'CRASH': 1,
|
| + 'MISSING': 0,
|
| + 'TEXT': 1,
|
| + 'IMAGE': 1,
|
| + 'NEEDSREBASELINE': 0,
|
| + 'NEEDSMANUALREBASELINE': 0,
|
| + 'PASS': 1,
|
| + 'REBASELINE': 0,
|
| + 'SKIP': 0,
|
| + 'SLOW': 0,
|
| + 'TIMEOUT': 3,
|
| + 'IMAGE+TEXT': 0,
|
| + 'LEAK': 0,
|
| + 'FAIL': 0,
|
| + 'AUDIO': 0,
|
| + 'WONTFIX': 0})
|
|
|
| summary = summarized_results(self.port, expected=True, passing=False, flaky=False)
|
| - self.assertEquals(summary['num_failures_by_type'], {'CRASH': 1, 'MISSING': 0, 'TEXT': 0, 'IMAGE': 0, 'NEEDSREBASELINE': 0, 'NEEDSMANUALREBASELINE': 0, 'PASS': 1, 'REBASELINE': 0, 'SKIP': 0, 'SLOW': 0, 'TIMEOUT': 1, 'IMAGE+TEXT': 0, 'LEAK': 1, 'FAIL': 0, 'AUDIO': 1, 'WONTFIX': 0})
|
| + self.assertEquals(summary['num_failures_by_type'], {'CRASH': 1,
|
| + 'MISSING': 0,
|
| + 'TEXT': 0,
|
| + 'IMAGE': 0,
|
| + 'NEEDSREBASELINE': 0,
|
| + 'NEEDSMANUALREBASELINE': 0,
|
| + 'PASS': 1,
|
| + 'REBASELINE': 0,
|
| + 'SKIP': 0,
|
| + 'SLOW': 0,
|
| + 'TIMEOUT': 1,
|
| + 'IMAGE+TEXT': 0,
|
| + 'LEAK': 1,
|
| + 'FAIL': 0,
|
| + 'AUDIO': 1,
|
| + 'WONTFIX': 0})
|
|
|
| summary = summarized_results(self.port, expected=False, passing=True, flaky=False)
|
| - self.assertEquals(summary['num_failures_by_type'], {'CRASH': 0, 'MISSING': 0, 'TEXT': 0, 'IMAGE': 0, 'NEEDSREBASELINE': 0, 'NEEDSMANUALREBASELINE': 0, 'PASS': 5, 'REBASELINE': 0, 'SKIP': 1, 'SLOW': 0, 'TIMEOUT': 0, 'IMAGE+TEXT': 0, 'LEAK': 0, 'FAIL': 0, 'AUDIO': 0, 'WONTFIX': 0})
|
| + self.assertEquals(summary['num_failures_by_type'], {'CRASH': 0,
|
| + 'MISSING': 0,
|
| + 'TEXT': 0,
|
| + 'IMAGE': 0,
|
| + 'NEEDSREBASELINE': 0,
|
| + 'NEEDSMANUALREBASELINE': 0,
|
| + 'PASS': 5,
|
| + 'REBASELINE': 0,
|
| + 'SKIP': 1,
|
| + 'SLOW': 0,
|
| + 'TIMEOUT': 0,
|
| + 'IMAGE+TEXT': 0,
|
| + 'LEAK': 0,
|
| + 'FAIL': 0,
|
| + 'AUDIO': 0,
|
| + 'WONTFIX': 0})
|
|
|
| def test_chromium_revision(self):
|
| self.port._options.builder_name = 'dummy builder'
|
| @@ -263,15 +300,16 @@ class SummarizedResultsTest(unittest.TestCase):
|
| expectations = test_expectations.TestExpectations(self.port, [test_name])
|
| initial_results = test_run_results.TestRunResults(expectations, 1)
|
| initial_results.add(get_result(test_name, test_expectations.TIMEOUT, run_time=1), False, False)
|
| - all_retry_results = [test_run_results.TestRunResults(expectations, 1),
|
| - test_run_results.TestRunResults(expectations, 1),
|
| + all_retry_results = [test_run_results.TestRunResults(expectations, 1), test_run_results.TestRunResults(expectations, 1),
|
| test_run_results.TestRunResults(expectations, 1)]
|
| all_retry_results[0].add(get_result(test_name, test_expectations.LEAK, run_time=0.1), False, False)
|
| all_retry_results[1].add(get_result(test_name, test_expectations.PASS, run_time=0.1), False, False)
|
| all_retry_results[2].add(get_result(test_name, test_expectations.PASS, run_time=0.1), False, False)
|
| - summary = test_run_results.summarize_results(
|
| - self.port, expectations, initial_results, all_retry_results,
|
| - enabled_pixel_tests_in_retry=True)
|
| + summary = test_run_results.summarize_results(self.port,
|
| + expectations,
|
| + initial_results,
|
| + all_retry_results,
|
| + enabled_pixel_tests_in_retry=True)
|
| self.assertTrue('is_unexpected' in summary['tests']['failures']['expected']['text.html'])
|
| self.assertEquals(summary['tests']['failures']['expected']['text.html']['expected'], 'FAIL')
|
| self.assertEquals(summary['tests']['failures']['expected']['text.html']['actual'], 'TIMEOUT LEAK PASS PASS')
|
| @@ -314,15 +352,16 @@ class SummarizedResultsTest(unittest.TestCase):
|
| expectations = test_expectations.TestExpectations(self.port, [test_name])
|
| initial_results = test_run_results.TestRunResults(expectations, 1)
|
| initial_results.add(get_result(test_name, test_expectations.CRASH), False, False)
|
| - all_retry_results = [test_run_results.TestRunResults(expectations, 1),
|
| - test_run_results.TestRunResults(expectations, 1),
|
| + all_retry_results = [test_run_results.TestRunResults(expectations, 1), test_run_results.TestRunResults(expectations, 1),
|
| test_run_results.TestRunResults(expectations, 1)]
|
| all_retry_results[0].add(get_result(test_name, test_expectations.TIMEOUT), False, False)
|
| all_retry_results[1].add(get_result(test_name, test_expectations.PASS), True, False)
|
| all_retry_results[2].add(get_result(test_name, test_expectations.PASS), True, False)
|
| - summary = test_run_results.summarize_results(
|
| - self.port, expectations, initial_results, all_retry_results,
|
| - enabled_pixel_tests_in_retry=True)
|
| + summary = test_run_results.summarize_results(self.port,
|
| + expectations,
|
| + initial_results,
|
| + all_retry_results,
|
| + enabled_pixel_tests_in_retry=True)
|
| self.assertTrue('is_unexpected' not in summary['tests']['passes']['text.html'])
|
| self.assertEquals(summary['tests']['passes']['text.html']['expected'], 'PASS')
|
| self.assertEquals(summary['tests']['passes']['text.html']['actual'], 'CRASH TIMEOUT PASS PASS')
|
| @@ -331,8 +370,7 @@ class SummarizedResultsTest(unittest.TestCase):
|
| self.assertEquals(summary['num_regressions'], 0)
|
|
|
| def test_summarized_results_regression(self):
|
| - summary = summarized_results(self.port, expected=False, passing=False,
|
| - flaky=False, fail_on_retry=True)
|
| + summary = summarized_results(self.port, expected=False, passing=False, flaky=False, fail_on_retry=True)
|
|
|
| self.assertTrue(summary['tests']['failures']['expected']['timeout.html']['is_unexpected'])
|
| self.assertEquals(summary['tests']['failures']['expected']['timeout.html']['expected'], 'TIMEOUT')
|
|
|