OLD | NEW |
1 # Copyright (C) 2013 Google Inc. All rights reserved. | 1 # Copyright (C) 2013 Google Inc. All rights reserved. |
2 # | 2 # |
3 # Redistribution and use in source and binary forms, with or without | 3 # Redistribution and use in source and binary forms, with or without |
4 # modification, are permitted provided that the following conditions are | 4 # modification, are permitted provided that the following conditions are |
5 # met: | 5 # met: |
6 # | 6 # |
7 # * Redistributions of source code must retain the above copyright | 7 # * Redistributions of source code must retain the above copyright |
8 # notice, this list of conditions and the following disclaimer. | 8 # notice, this list of conditions and the following disclaimer. |
9 # * Redistributions in binary form must reproduce the above | 9 # * Redistributions in binary form must reproduce the above |
10 # copyright notice, this list of conditions and the following disclaimer | 10 # copyright notice, this list of conditions and the following disclaimer |
(...skipping 24 matching lines...) Expand all Loading... |
35 class BotTestExpectationsTest(unittest.TestCase): | 35 class BotTestExpectationsTest(unittest.TestCase): |
36 # FIXME: Find a way to import this map from Tools/TestResultServer/model/jso
nresults.py. | 36 # FIXME: Find a way to import this map from Tools/TestResultServer/model/jso
nresults.py. |
37 FAILURE_MAP = {"A": "AUDIO", "C": "CRASH", "F": "TEXT", "I": "IMAGE", "O": "
MISSING", | 37 FAILURE_MAP = {"A": "AUDIO", "C": "CRASH", "F": "TEXT", "I": "IMAGE", "O": "
MISSING", |
38 "N": "NO DATA", "P": "PASS", "T": "TIMEOUT", "Y": "NOTRUN", "X": "SKIP",
"Z": "IMAGE+TEXT", "K": "LEAK"} | 38 "N": "NO DATA", "P": "PASS", "T": "TIMEOUT", "Y": "NOTRUN", "X": "SKIP",
"Z": "IMAGE+TEXT", "K": "LEAK"} |
39 | 39 |
40 # All result_string's in this file expect newest result | 40 # All result_string's in this file expect newest result |
41 # on left: "PFF", means it just passed after 2 failures. | 41 # on left: "PFF", means it just passed after 2 failures. |
42 | 42 |
43 def _assert_is_flaky(self, results_string, should_be_flaky): | 43 def _assert_is_flaky(self, results_string, should_be_flaky): |
44 results_json = self._results_json_from_test_data({}) | 44 results_json = self._results_json_from_test_data({}) |
45 expectations = bot_test_expectations.BotTestExpectations(results_json) | 45 expectations = bot_test_expectations.BotTestExpectations(results_json, s
et('test')) |
46 length_encoded = self._results_from_string(results_string)['results'] | 46 length_encoded = self._results_from_string(results_string)['results'] |
47 num_actual_results = len(expectations._flaky_types_in_results(length_enc
oded, only_ignore_very_flaky=True)) | 47 num_actual_results = len(expectations._flaky_types_in_results(length_enc
oded, only_ignore_very_flaky=True)) |
48 if should_be_flaky: | 48 if should_be_flaky: |
49 self.assertGreater(num_actual_results, 1) | 49 self.assertGreater(num_actual_results, 1) |
50 else: | 50 else: |
51 self.assertEqual(num_actual_results, 1) | 51 self.assertEqual(num_actual_results, 1) |
52 | 52 |
53 def test_basic_flaky(self): | 53 def test_basic_flaky(self): |
54 self._assert_is_flaky('PFF', False) # Used to fail, but now passes. | 54 self._assert_is_flaky('PFF', False) # Used to fail, but now passes. |
55 self._assert_is_flaky('FFP', False) # Just started failing. | 55 self._assert_is_flaky('FFP', False) # Just started failing. |
(...skipping 17 matching lines...) Expand all Loading... |
73 last_char = None | 73 last_char = None |
74 for char in results_string: | 74 for char in results_string: |
75 if char != last_char: | 75 if char != last_char: |
76 results_list.insert(0, [1, char]) | 76 results_list.insert(0, [1, char]) |
77 else: | 77 else: |
78 results_list[0][0] += 1 | 78 results_list[0][0] += 1 |
79 return {'results': results_list} | 79 return {'results': results_list} |
80 | 80 |
81 def _assert_expectations(self, test_data, expectations_string, only_ignore_v
ery_flaky): | 81 def _assert_expectations(self, test_data, expectations_string, only_ignore_v
ery_flaky): |
82 results_json = self._results_json_from_test_data(test_data) | 82 results_json = self._results_json_from_test_data(test_data) |
83 expectations = bot_test_expectations.BotTestExpectations(results_json) | 83 expectations = bot_test_expectations.BotTestExpectations(results_json, s
et('test')) |
84 self.assertEqual(expectations.flakes_by_path(only_ignore_very_flaky), ex
pectations_string) | 84 self.assertEqual(expectations.flakes_by_path(only_ignore_very_flaky), ex
pectations_string) |
85 | 85 |
86 def _assert_unexpected_results(self, test_data, expectations_string): | 86 def _assert_unexpected_results(self, test_data, expectations_string): |
87 results_json = self._results_json_from_test_data(test_data) | 87 results_json = self._results_json_from_test_data(test_data) |
88 expectations = bot_test_expectations.BotTestExpectations(results_json) | 88 expectations = bot_test_expectations.BotTestExpectations(results_json, s
et('test')) |
89 self.assertEqual(expectations.unexpected_results_by_path(), expectations
_string) | 89 self.assertEqual(expectations.unexpected_results_by_path(), expectations
_string) |
90 | 90 |
91 def test_basic(self): | 91 def test_basic(self): |
92 test_data = { | 92 test_data = { |
93 'tests': { | 93 'tests': { |
94 'foo': { | 94 'foo': { |
95 'veryflaky.html': self._results_from_string('FPFP'), | 95 'veryflaky.html': self._results_from_string('FPFP'), |
96 'maybeflaky.html': self._results_from_string('PPFP'), | 96 'maybeflaky.html': self._results_from_string('PPFP'), |
97 'notflakypass.html': self._results_from_string('PPPP'), | 97 'notflakypass.html': self._results_from_string('PPPP'), |
98 'notflakyfail.html': self._results_from_string('FFFF'), | 98 'notflakyfail.html': self._results_from_string('FFFF'), |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
155 self._assert_unexpected_results(test_data, { | 155 self._assert_unexpected_results(test_data, { |
156 'foo/pass1.html': sorted(["FAIL", "PASS"]), | 156 'foo/pass1.html': sorted(["FAIL", "PASS"]), |
157 'foo/pass2.html': sorted(["IMAGE", "PASS"]), | 157 'foo/pass2.html': sorted(["IMAGE", "PASS"]), |
158 'foo/fail.html': sorted(["TEXT", "PASS"]), | 158 'foo/fail.html': sorted(["TEXT", "PASS"]), |
159 'foo/f_p.html': sorted(["TEXT", "PASS"]), | 159 'foo/f_p.html': sorted(["TEXT", "PASS"]), |
160 'foo/crash.html': sorted(["WONTFIX", "CRASH", "TEXT"]), | 160 'foo/crash.html': sorted(["WONTFIX", "CRASH", "TEXT"]), |
161 'foo/image.html': sorted(["CRASH", "FAIL", "IMAGE"]), | 161 'foo/image.html': sorted(["CRASH", "FAIL", "IMAGE"]), |
162 'foo/i_f.html': sorted(["PASS", "IMAGE", "TEXT"]), | 162 'foo/i_f.html': sorted(["PASS", "IMAGE", "TEXT"]), |
163 'foo/all.html': sorted(["TEXT", "PASS", "IMAGE+TEXT", "TIMEOUT", "CR
ASH", "IMAGE", "MISSING", "LEAK"]), | 163 'foo/all.html': sorted(["TEXT", "PASS", "IMAGE+TEXT", "TIMEOUT", "CR
ASH", "IMAGE", "MISSING", "LEAK"]), |
164 }) | 164 }) |
OLD | NEW |