Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(582)

Side by Side Diff: appengine/test_results/test/jsonresults_unittest.py

Issue 499103002: Make test_results deployable again. (Closed) Base URL: https://chromium.googlesource.com/infra/infra.git@master
Patch Set: merge to ToT Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright (C) 2010 Google Inc. All rights reserved.
2 #
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
5 # met:
6 #
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
12 # distribution.
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
16 #
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29 # Allow this unittest to access _members.
30 # pylint: disable=W0212
31
32 import json
33 import logging
34 import unittest
35
36 from appengine.test_results.model import jsonresults
37 from appengine.test_results.model.jsonresults import (
38 JsonResults,
39 TEXT,
40 FAIL,
41 LEAK,
42 PASS,
43 TIMEOUT,
44 IMAGE,
45 NO_DATA,
46 IMAGE_PLUS_TEXT,
47 CRASH,
48 NOTRUN,
49 TestFile,
50 )
51 from appengine.test_results.handlers import master_config
52
53 from google.appengine.ext import testbed
54
55 FULL_RESULT_EXAMPLE = """ADD_RESULTS({
56 "seconds_since_epoch": 1368146629,
57 "tests": {
58 "media": {
59 "encrypted-media": {
60 "encrypted-media-v2-events.html": {
61 "bugs": ["crbug.com/1234"],
62 "expected": "TIMEOUT",
63 "actual": "TIMEOUT",
64 "time": 6.0
65 },
66 "encrypted-media-v2-syntax.html": {
67 "expected": "TIMEOUT",
68 "actual": "TIMEOUT"
69 }
70 },
71 "progress-events-generated-correctly.html": {
72 "expected": "PASS FAIL IMAGE TIMEOUT CRASH MISSING",
73 "actual": "TIMEOUT",
74 "time": 6.0
75 },
76 "W3C": {
77 "audio": {
78 "src": {
79 "src_removal_does_not_trigger_loadstart.html": {
80 "expected": "PASS",
81 "actual": "PASS",
82 "time": 3.5
83 }
84 }
85 },
86 "video": {
87 "src": {
88 "src_removal_does_not_trigger_loadstart.html": {
89 "expected": "PASS",
90 "actual": "PASS",
91 "time": 1.1
92 },
93 "notrun.html": {
94 "expected": "NOTRUN",
95 "actual": "SKIP",
96 "time": 1.1
97 }
98 }
99 }
100 },
101 "unexpected-skip.html": {
102 "expected": "PASS",
103 "actual": "SKIP"
104 },
105 "unexpected-fail.html": {
106 "expected": "PASS",
107 "actual": "FAIL"
108 },
109 "flaky-failed.html": {
110 "expected": "PASS FAIL",
111 "actual": "FAIL"
112 },
113 "media-document-audio-repaint.html": {
114 "expected": "IMAGE",
115 "actual": "IMAGE",
116 "time": 0.1
117 },
118 "unexpected-leak.html": {
119 "expected": "PASS",
120 "actual": "LEAK"
121 }
122 }
123 },
124 "skipped": 2,
125 "num_regressions": 0,
126 "build_number": "3",
127 "interrupted": false,
128 "layout_tests_dir": "\/tmp\/cr\/src\/third_party\/WebKit\/LayoutTests",
129 "version": 3,
130 "builder_name": "Webkit",
131 "num_passes": 10,
132 "pixel_tests_enabled": true,
133 "blink_revision": "1234",
134 "has_pretty_patch": true,
135 "fixable": 25,
136 "num_flaky": 0,
137 "num_failures_by_type": {
138 "CRASH": 3,
139 "MISSING": 0,
140 "TEXT": 3,
141 "IMAGE": 1,
142 "PASS": 10,
143 "SKIP": 2,
144 "TIMEOUT": 16,
145 "IMAGE+TEXT": 0,
146 "FAIL": 2,
147 "AUDIO": 0,
148 "LEAK": 1
149 },
150 "has_wdiff": true,
151 "chromium_revision": "5678"
152 });"""
153
154 JSON_RESULTS_OLD_TEMPLATE = (
155 '{"[BUILDER_NAME]":{'
156 '"allFixableCount":[[TESTDATA_COUNT]],'
157 '"blinkRevision":[[TESTDATA_WEBKITREVISION]],'
158 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
159 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
160 '"failure_map": %s,'
161 '"fixableCount":[[TESTDATA_COUNT]],'
162 '"fixableCounts":[[TESTDATA_COUNTS]],'
163 '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
164 '"tests":{[TESTDATA_TESTS]}'
165 '},'
166 '"version":[VERSION]'
167 '}') % json.dumps(jsonresults.CHAR_TO_FAILURE)
168
169 JSON_RESULTS_COUNTS = ('{"' + '":[[TESTDATA_COUNT]],"'.join(
170 [char for char in jsonresults.CHAR_TO_FAILURE.values()])
171 + '":[[TESTDATA_COUNT]]}')
172
173 JSON_RESULTS_TEMPLATE = (
174 '{"[BUILDER_NAME]":{'
175 '"blinkRevision":[[TESTDATA_WEBKITREVISION]],'
176 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
177 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
178 '"failure_map": %s,'
179 '"num_failures_by_type":%s,'
180 '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
181 '"tests":{[TESTDATA_TESTS]}'
182 '},'
183 '"version":[VERSION]'
184 '}') % (json.dumps(jsonresults.CHAR_TO_FAILURE), JSON_RESULTS_COUNTS)
185
186 JSON_RESULTS_COUNTS_TEMPLATE = '{"' + '":[TESTDATA],"'.join(
187 [char for char in jsonresults.CHAR_TO_FAILURE]) + '":[TESTDATA]}'
188
189 JSON_RESULTS_TEST_LIST_TEMPLATE = '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}'
190
191
192 class MockFile(object):
193
194 @property
195 def file_information(self):
196 return ("master: %s, builder: %s, test_type: %s, build_number: %r, "
197 "name: %s." % (self.master, self.builder, self.test_type,
198 self.build_number, self.name))
199
200 def __init__(self, name='results.json', data=''):
201 self.master = 'MockMasterName'
202 self.builder = 'MockBuilderName'
203 self.test_type = 'MockTestType'
204 self.build_number = 0
205 self.name = name
206 self.data = data
207
208 def save(self, data):
209 self.data = data
210 return True
211
212
213 class JsonResultsTest(unittest.TestCase):
214
215 def setUp(self):
216 self._builder = "Webkit"
217 self.old_log_level = logging.root.level
218 logging.root.setLevel(logging.ERROR)
219
220 def tearDown(self):
221 logging.root.setLevel(self.old_log_level)
222
223 # Use this to get better error messages than just string compare gives.
224 def assert_json_equal(self, a, b):
225 self.maxDiff = None
226 a = json.loads(a) if isinstance(a, str) else a
227 b = json.loads(b) if isinstance(b, str) else b
228 self.assertEqual(a, b)
229
230 def test_strip_prefix_suffix(self):
231 json_string = "['contents']"
232 stripped = jsonresults.JsonResults._strip_prefix_suffix(
233 "ADD_RESULTS(" + json_string + ");")
234 self.assertEqual(stripped, json_string)
235 self.assertEqual(JsonResults._strip_prefix_suffix(json_string), json_string)
236
237 @staticmethod
238 def _make_test_json(test_data, json_string=JSON_RESULTS_TEMPLATE,
239 builder_name="Webkit"):
240 if not test_data:
241 return ""
242
243 builds = test_data["builds"]
244 tests = test_data["tests"]
245 if not builds or not tests:
246 return ""
247
248 counts = []
249 build_numbers = []
250 webkit_revision = []
251 chrome_revision = []
252 times = []
253 for build in builds:
254 counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build))
255 build_numbers.append("1000%s" % build)
256 webkit_revision.append("2000%s" % build)
257 chrome_revision.append("3000%s" % build)
258 times.append("100000%s000" % build)
259
260 json_string = json_string.replace("[BUILDER_NAME]", builder_name)
261 json_string = json_string.replace("[TESTDATA_COUNTS]", ",".join(counts))
262 json_string = json_string.replace("[TESTDATA_COUNT]", ",".join(builds))
263 json_string = json_string.replace(
264 "[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers))
265 json_string = json_string.replace(
266 "[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision))
267 json_string = json_string.replace(
268 "[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision))
269 json_string = json_string.replace("[TESTDATA_TIMES]", ",".join(times))
270
271 version = str(test_data["version"]) if "version" in test_data else "4"
272 json_string = json_string.replace("[VERSION]", version)
273 json_string = json_string.replace("{[TESTDATA_TESTS]}",
274 json.dumps(tests, separators=(',', ':'), sort_keys=True))
275 return json_string
276
277 def _test_merge(self, aggregated_data, incremental_data, expected_data,
278 max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS):
279 aggregated_results = self._make_test_json(
280 aggregated_data, builder_name=self._builder)
281 incremental_results = self._make_test_json(
282 incremental_data, builder_name=self._builder)
283 # FIXME: Why is this called if we ignore the result?
284 JsonResults._get_incremental_json(self._builder,
285 JsonResults.load_json(aggregated_results),
286 is_full_results_format=False)
287 merged_results, status_code = JsonResults.merge(self._builder,
288 aggregated_results, JsonResults.load_json(incremental_results),
289 num_runs=max_builds, sort_keys=True)
290
291 if expected_data:
292 expected_results = self._make_test_json(
293 expected_data, builder_name=self._builder)
294 self.assert_json_equal(merged_results, expected_results)
295 self.assertEqual(status_code, 200)
296 else:
297 self.assertTrue(status_code != 200)
298
299 def _test_get_test_list(self, input_data, expected_data):
300 input_results = self._make_test_json(input_data)
301 expected_results = JSON_RESULTS_TEST_LIST_TEMPLATE.replace(
302 "{[TESTDATA_TESTS]}", json.dumps(expected_data, separators=(',', ':')))
303 actual_results = JsonResults.get_test_list(self._builder, input_results)
304 self.assert_json_equal(actual_results, expected_results)
305
306 def test_update_files_empty_aggregate_data(self):
307 small_file = MockFile(name='results-small.json')
308 large_file = MockFile(name='results.json')
309
310 incremental_data = {
311 "builds": ["2", "1"],
312 "tests": {
313 "001.html": {
314 "results": [[200, jsonresults.TEXT]],
315 "times": [[200, 0]],
316 }
317 }
318 }
319 incremental_string = self._make_test_json(
320 incremental_data, builder_name=small_file.builder)
321 incremental_json = JsonResults.load_json(incremental_string)
322
323 self.assertTrue(JsonResults.update_files(small_file.builder,
324 incremental_json, small_file, large_file, is_full_results_format=False))
325 self.assert_json_equal(small_file.data, incremental_string)
326 self.assert_json_equal(large_file.data, incremental_string)
327
328 def test_update_files_null_incremental_data(self):
329 small_file = MockFile(name='results-small.json')
330 large_file = MockFile(name='results.json')
331
332 aggregated_data = {
333 "builds": ["2", "1"],
334 "tests": {
335 "001.html": {
336 "results": [[200, jsonresults.TEXT]],
337 "times": [[200, 0]],
338 }
339 }
340 }
341 aggregated_string = self._make_test_json(
342 aggregated_data, builder_name=small_file.builder)
343
344 small_file.data = large_file.data = aggregated_string
345
346 incremental_string = ""
347
348 results_tuple = JsonResults.update_files(small_file.builder,
349 incremental_string, small_file, large_file,
350 is_full_results_format=False)
351 self.assertEqual(results_tuple, ('No incremental JSON data to merge.', 403))
352 self.assert_json_equal(small_file.data, aggregated_string)
353 self.assert_json_equal(large_file.data, aggregated_string)
354
355 def test_update_files_empty_incremental_data(self):
356 small_file = MockFile(name='results-small.json')
357 large_file = MockFile(name='results.json')
358
359 aggregated_data = {
360 "builds": ["2", "1"],
361 "tests": {
362 "001.html": {
363 "results": [[200, jsonresults.TEXT]],
364 "times": [[200, 0]],
365 }
366 }
367 }
368 aggregated_string = self._make_test_json(
369 aggregated_data, builder_name=small_file.builder)
370
371 small_file.data = large_file.data = aggregated_string
372
373 incremental_data = {
374 "builds": [],
375 "tests": {}
376 }
377 incremental_string = self._make_test_json(
378 incremental_data, builder_name=small_file.builder)
379
380 results_tuple = JsonResults.update_files(small_file.builder,
381 incremental_string, small_file, large_file,
382 is_full_results_format=False)
383 self.assertEqual(results_tuple, ('No incremental JSON data to merge.', 403))
384 self.assert_json_equal(small_file.data, aggregated_string)
385 self.assert_json_equal(large_file.data, aggregated_string)
386
387 def test_merge_with_empty_aggregated_results(self):
388 incremental_data = {
389 "builds": ["2", "1"],
390 "tests": {
391 "001.html": {
392 "results": [[200, jsonresults.TEXT]],
393 "times": [[200, 0]],
394 }
395 }
396 }
397 incremental_json = JsonResults.load_json(
398 self._make_test_json(incremental_data))
399 incremental_results, _ = JsonResults._get_incremental_json(
400 self._builder, incremental_json, is_full_results_format=False)
401 aggregated_results = ""
402 merged_results, _ = JsonResults.merge(self._builder, aggregated_results,
403 incremental_results, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS,
404 sort_keys=True)
405 self.assert_json_equal(merged_results, incremental_results)
406
407 def test_failures_by_type_added(self):
408 aggregated_results = self._make_test_json({
409 "builds": ["2", "1"],
410 "tests": {
411 "001.html": {
412 "results": [[100, TEXT], [100, FAIL]],
413 "times": [[200, 0]],
414 }
415 }
416 }, json_string=JSON_RESULTS_OLD_TEMPLATE)
417 incremental_results = self._make_test_json({
418 "builds": ["3"],
419 "tests": {
420 "001.html": {
421 "results": [[1, TEXT]],
422 "times": [[1, 0]],
423 }
424 }
425 }, json_string=JSON_RESULTS_OLD_TEMPLATE)
426 incremental_json, _ = JsonResults._get_incremental_json(self._builder,
427 JsonResults.load_json(incremental_results),
428 is_full_results_format=False)
429 merged_results, _ = JsonResults.merge(self._builder, aggregated_results,
430 incremental_json, num_runs=201, sort_keys=True)
431 self.assert_json_equal(merged_results, self._make_test_json({
432 "builds": ["3", "2", "1"],
433 "tests": {
434 "001.html": {
435 "results": [[101, TEXT], [100, FAIL]],
436 "times": [[201, 0]],
437 }
438 }
439 }))
440
441 def test_merge_full_results_format(self):
442 expected_incremental_results = {
443 "Webkit": {
444 "blinkRevision": ["1234"],
445 "buildNumbers": ["3"],
446 "chromeRevision": ["5678"],
447 "failure_map": jsonresults.CHAR_TO_FAILURE,
448 "num_failures_by_type": {
449 "AUDIO": [0],
450 "CRASH": [3],
451 "FAIL": [2],
452 "IMAGE": [1],
453 "IMAGE+TEXT": [0],
454 "MISSING": [0],
455 "PASS": [10],
456 "SKIP": [2],
457 "TEXT": [3],
458 "TIMEOUT": [16],
459 "LEAK": [1]
460 },
461 "secondsSinceEpoch": [1368146629],
462 "tests": {
463 "media": {
464 "W3C": {
465 "audio": {
466 "src": {
467 "src_removal_does_not_trigger_loadstart.html": {
468 "results": [[1, PASS]],
469 "times": [[1, 4]],
470 }
471 }
472 }
473 },
474 "encrypted-media": {
475 "encrypted-media-v2-events.html": {
476 "bugs": ["crbug.com/1234"],
477 "expected": "TIMEOUT",
478 "results": [[1, TIMEOUT]],
479 "times": [[1, 6]],
480 },
481 "encrypted-media-v2-syntax.html": {
482 "expected": "TIMEOUT",
483 "results": [[1, TIMEOUT]],
484 "times": [[1, 0]],
485 }
486 },
487 "media-document-audio-repaint.html": {
488 "expected": "IMAGE",
489 "results": [[1, IMAGE]],
490 "times": [[1, 0]],
491 },
492 "progress-events-generated-correctly.html": {
493 "expected": "PASS FAIL IMAGE TIMEOUT CRASH MISSING",
494 "results": [[1, TIMEOUT]],
495 "times": [[1, 6]],
496 },
497 "flaky-failed.html": {
498 "expected": "PASS FAIL",
499 "results": [[1, FAIL]],
500 "times": [[1, 0]],
501 },
502 "unexpected-fail.html": {
503 "results": [[1, FAIL]],
504 "times": [[1, 0]],
505 },
506 "unexpected-leak.html": {
507 "results": [[1, LEAK]],
508 "times": [[1, 0]],
509 },
510 }
511 }
512 },
513 "version": 4
514 }
515
516 aggregated_results = ""
517 incremental_json, _ = JsonResults._get_incremental_json(self._builder,
518 JsonResults.load_json(FULL_RESULT_EXAMPLE),
519 is_full_results_format=True)
520 merged_results, _ = JsonResults.merge("Webkit", aggregated_results,
521 incremental_json, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS,
522 sort_keys=True)
523 self.assert_json_equal(merged_results, expected_incremental_results)
524
525 def test_merge_empty_aggregated_results(self):
526 # No existing aggregated results.
527 # Merged results == new incremental results.
528 self._test_merge(
529 # Aggregated results
530 None,
531 # Incremental results
532 {"builds": ["2", "1"],
533 "tests": {"001.html": {
534 "results": [[200, TEXT]],
535 "times": [[200, 0]]}}},
536 # Expected result
537 {"builds": ["2", "1"],
538 "tests": {"001.html": {
539 "results": [[200, TEXT]],
540 "times": [[200, 0]]}}})
541
542 def test_merge_duplicate_build_number(self):
543 self._test_merge(
544 # Aggregated results
545 {"builds": ["2", "1"],
546 "tests": {"001.html": {
547 "results": [[100, TEXT]],
548 "times": [[100, 0]]}}},
549 # Incremental results
550 {"builds": ["2"],
551 "tests": {"001.html": {
552 "results": [[1, TEXT]],
553 "times": [[1, 0]]}}},
554 # Expected results
555 None)
556
557 def test_merge_incremental_single_test_single_run_same_result(self):
558 # Incremental results has the latest build and same test results for
559 # that run.
560 # Insert the incremental results at the first place and sum number
561 # of runs for TEXT (200 + 1) to get merged results.
562 self._test_merge(
563 # Aggregated results
564 {"builds": ["2", "1"],
565 "tests": {"001.html": {
566 "results": [[200, TEXT]],
567 "times": [[200, 0]]}}},
568 # Incremental results
569 {"builds": ["3"],
570 "tests": {"001.html": {
571 "results": [[1, TEXT]],
572 "times": [[1, 0]]}}},
573 # Expected results
574 {"builds": ["3", "2", "1"],
575 "tests": {"001.html": {
576 "results": [[201, TEXT]],
577 "times": [[201, 0]]}}})
578
579 def test_merge_single_test_single_run_different_result(self):
580 # Incremental results has the latest build but different test results
581 # for that run.
582 # Insert the incremental results at the first place.
583 self._test_merge(
584 # Aggregated results
585 {"builds": ["2", "1"],
586 "tests": {"001.html": {
587 "results": [[200, TEXT]],
588 "times": [[200, 0]]}}},
589 # Incremental results
590 {"builds": ["3"],
591 "tests": {"001.html": {
592 "results": [[1, IMAGE]],
593 "times": [[1, 1]]}}},
594 # Expected results
595 {"builds": ["3", "2", "1"],
596 "tests": {"001.html": {
597 "results": [[1, IMAGE], [200, TEXT]],
598 "times": [[1, 1], [200, 0]]}}})
599
600 def test_merge_single_test_single_run_result_changed(self):
601 # Incremental results has the latest build but results which differ from
602 # the latest result (but are the same as an older result).
603 self._test_merge(
604 # Aggregated results
605 {"builds": ["2", "1"],
606 "tests": {"001.html": {
607 "results": [[200, TEXT], [10, IMAGE]],
608 "times": [[200, 0], [10, 1]]}}},
609 # Incremental results
610 {"builds": ["3"],
611 "tests": {"001.html": {
612 "results": [[1, IMAGE]],
613 "times": [[1, 1]]}}},
614 # Expected results
615 {"builds": ["3", "2", "1"],
616 "tests": {"001.html": {
617 "results": [[1, IMAGE], [200, TEXT], [10, IMAGE]],
618 "times": [[1, 1], [200, 0], [10, 1]]}}})
619
620 def test_merge_multiple_tests_single_run(self):
621 # All tests have incremental updates.
622 self._test_merge(
623 # Aggregated results
624 {"builds": ["2", "1"],
625 "tests": {"001.html": {
626 "results": [[200, TEXT]],
627 "times": [[200, 0]]},
628 "002.html": {
629 "results": [[100, IMAGE]],
630 "times": [[100, 1]]}}},
631 # Incremental results
632 {"builds": ["3"],
633 "tests": {"001.html": {
634 "results": [[1, TEXT]],
635 "times": [[1, 0]]},
636 "002.html": {
637 "results": [[1, IMAGE]],
638 "times": [[1, 1]]}}},
639 # Expected results
640 {"builds": ["3", "2", "1"],
641 "tests": {"001.html": {
642 "results": [[201, TEXT]],
643 "times": [[201, 0]]},
644 "002.html": {
645 "results": [[101, IMAGE]],
646 "times": [[101, 1]]}}})
647
648 def test_merge_multiple_tests_single_run_one_no_result(self):
649 self._test_merge(
650 # Aggregated results
651 {"builds": ["2", "1"],
652 "tests": {"001.html": {
653 "results": [[200, TEXT]],
654 "times": [[200, 0]]},
655 "002.html": {
656 "results": [[100, IMAGE]],
657 "times": [[100, 1]]}}},
658 # Incremental results
659 {"builds": ["3"],
660 "tests": {"002.html": {
661 "results": [[1, IMAGE]],
662 "times": [[1, 1]]}}},
663 # Expected results
664 {"builds": ["3", "2", "1"],
665 "tests": {"001.html": {
666 "results": [[1, NO_DATA], [200, TEXT]],
667 "times": [[201, 0]]},
668 "002.html": {
669 "results": [[101, IMAGE]],
670 "times": [[101, 1]]}}})
671
672 def test_merge_single_test_multiple_runs(self):
673 self._test_merge(
674 # Aggregated results
675 {"builds": ["2", "1"],
676 "tests": {"001.html": {
677 "results": [[200, TEXT]],
678 "times": [[200, 0]]}}},
679 # Incremental results
680 {"builds": ["4", "3"],
681 "tests": {"001.html": {
682 "results": [[2, IMAGE], [1, FAIL]],
683 "times": [[3, 2]]}}},
684 # Expected results
685 {"builds": ["4", "3", "2", "1"],
686 "tests": {"001.html": {
687 "results": [[1, FAIL], [2, IMAGE], [200, TEXT]],
688 "times": [[3, 2], [200, 0]]}}})
689
690 def test_merge_multiple_tests_multiple_runs(self):
691 self._test_merge(
692 # Aggregated results
693 {"builds": ["2", "1"],
694 "tests": {"001.html": {
695 "results": [[200, TEXT]],
696 "times": [[200, 0]]},
697 "002.html": {
698 "results": [[10, IMAGE_PLUS_TEXT]],
699 "times": [[10, 0]]}}},
700 # Incremental results
701 {"builds": ["4", "3"],
702 "tests": {"001.html": {
703 "results": [[2, IMAGE]],
704 "times": [[2, 2]]},
705 "002.html": {
706 "results": [[1, CRASH]],
707 "times": [[1, 1]]}}},
708 # Expected results
709 {"builds": ["4", "3", "2", "1"],
710 "tests": {"001.html": {
711 "results": [[2, IMAGE], [200, TEXT]],
712 "times": [[2, 2], [200, 0]]},
713 "002.html": {
714 "results": [[1, CRASH], [10, IMAGE_PLUS_TEXT]],
715 "times": [[1, 1], [10, 0]]}}})
716
717 def test_merge_incremental_result_older_build(self):
718 # Test the build in incremental results is older than the most recent
719 # build in aggregated results.
720 self._test_merge(
721 # Aggregated results
722 {"builds": ["3", "1"],
723 "tests": {"001.html": {
724 "results": [[5, TEXT]],
725 "times": [[5, 0]]}}},
726 # Incremental results
727 {"builds": ["2"],
728 "tests": {"001.html": {
729 "results": [[1, TEXT]],
730 "times": [[1, 0]]}}},
731 # Expected no merge happens.
732 {"builds": ["2", "3", "1"],
733 "tests": {"001.html": {
734 "results": [[6, TEXT]],
735 "times": [[6, 0]]}}})
736
737 def test_merge_incremental_result_same_build(self):
738 # Test the build in incremental results is same as the build in
739 # aggregated results.
740 self._test_merge(
741 # Aggregated results
742 {"builds": ["2", "1"],
743 "tests": {"001.html": {
744 "results": [[5, TEXT]],
745 "times": [[5, 0]]}}},
746 # Incremental results
747 {"builds": ["3", "2"],
748 "tests": {"001.html": {
749 "results": [[2, TEXT]],
750 "times": [[2, 0]]}}},
751 # Expected no merge happens.
752 {"builds": ["3", "2", "2", "1"],
753 "tests": {"001.html": {
754 "results": [[7, TEXT]],
755 "times": [[7, 0]]}}})
756
757 def test_merge_remove_new_test(self):
758 self._test_merge(
759 # Aggregated results
760 {"builds": ["2", "1"],
761 "tests": {"001.html": {
762 "results": [[199, TEXT]],
763 "times": [[199, 0]]},
764 }},
765 # Incremental results
766 {"builds": ["3"],
767 "tests": {"001.html": {
768 "results": [[1, TEXT]],
769 "times": [[1, 0]]},
770 "002.html": {
771 "results": [[1, PASS]],
772 "times": [[1, 0]]},
773 "notrun.html": {
774 "results": [[1, NOTRUN]],
775 "times": [[1, 0]]},
776 "003.html": {
777 "results": [[1, NO_DATA]],
778 "times": [[1, 0]]},
779 }},
780 # Expected results
781 {"builds": ["3", "2", "1"],
782 "tests": {"001.html": {
783 "results": [[200, TEXT]],
784 "times": [[200, 0]]},
785 }},
786 max_builds=200)
787
788 def test_merge_remove_test(self):
789 self._test_merge(
790 # Aggregated results
791 {
792 "builds": ["2", "1"],
793 "tests": {
794 "directory": {
795 "directory": {
796 "001.html": {
797 "results": [[200, PASS]],
798 "times": [[200, 0]]
799 }
800 }
801 },
802 "002.html": {
803 "results": [[10, TEXT]],
804 "times": [[10, 0]]
805 },
806 "003.html": {
807 "results": [[190, PASS], [9, NO_DATA], [1, TEXT]],
808 "times": [[200, 0]]
809 },
810 }
811 },
812 # Incremental results
813 {
814 "builds": ["3"],
815 "tests": {
816 "directory": {
817 "directory": {
818 "001.html": {
819 "results": [[1, PASS]],
820 "times": [[1, 0]]
821 }
822 }
823 },
824 "002.html": {
825 "results": [[1, PASS]],
826 "times": [[1, 0]]
827 },
828 "003.html": {
829 "results": [[1, PASS]],
830 "times": [[1, 0]]
831 },
832 }
833 },
834 # Expected results
835 {
836 "builds": ["3", "2", "1"],
837 "tests": {
838 "002.html": {
839 "results": [[1, PASS], [10, TEXT]],
840 "times": [[11, 0]]
841 }
842 }
843 },
844 max_builds=200)
845
846 def test_merge_updates_expected(self):
847 self._test_merge(
848 # Aggregated results
849 {
850 "builds": ["2", "1"],
851 "tests": {
852 "directory": {
853 "directory": {
854 "001.html": {
855 "expected": "FAIL",
856 "results": [[200, PASS]],
857 "times": [[200, 0]]
858 }
859 }
860 },
861 "002.html": {
862 "bugs": ["crbug.com/1234"],
863 "expected": "FAIL",
864 "results": [[10, TEXT]],
865 "times": [[10, 0]]
866 },
867 "003.html": {
868 "expected": "FAIL",
869 "results": [[190, PASS], [9, NO_DATA], [1, TEXT]],
870 "times": [[200, 0]]
871 },
872 "004.html": {
873 "results": [[199, PASS], [1, TEXT]],
874 "times": [[200, 0]]
875 },
876 }
877 },
878 # Incremental results
879 {
880 "builds": ["3"],
881 "tests": {
882 "002.html": {
883 "expected": "PASS",
884 "results": [[1, PASS]],
885 "times": [[1, 0]]
886 },
887 "003.html": {
888 "expected": "TIMEOUT",
889 "results": [[1, PASS]],
890 "times": [[1, 0]]
891 },
892 "004.html": {
893 "bugs": ["crbug.com/1234"],
894 "results": [[1, PASS]],
895 "times": [[1, 0]]
896 },
897 }
898 },
899 # Expected results
900 {
901 "builds": ["3", "2", "1"],
902 "tests": {
903 "002.html": {
904 "results": [[1, PASS], [10, TEXT]],
905 "times": [[11, 0]]
906 },
907 "003.html": {
908 "expected": "TIMEOUT",
909 "results": [[191, PASS], [9, NO_DATA]],
910 "times": [[200, 0]]
911 },
912 "004.html": {
913 "bugs": ["crbug.com/1234"],
914 "results": [[200, PASS]],
915 "times": [[200, 0]]
916 },
917 }
918 },
919 max_builds=200)
920
921 def test_merge_keep_test_with_all_pass_but_slow_time(self):
922 self._test_merge(
923 # Aggregated results
924 {"builds": ["2", "1"],
925 "tests": {"001.html": {
926 "results": [[200, PASS]],
927 "times": [[200, jsonresults.JSON_RESULTS_MIN_TIME]]},
928 "002.html": {
929 "results": [[10, TEXT]],
930 "times": [[10, 0]]}}},
931 # Incremental results
932 {"builds": ["3"],
933 "tests": {"001.html": {
934 "results": [[1, PASS]],
935 "times": [[1, 1]]},
936 "002.html": {
937 "results": [[1, PASS]],
938 "times": [[1, 0]]}}},
939 # Expected results
940 {"builds": ["3", "2", "1"],
941 "tests": {"001.html": {
942 "results": [[201, PASS]],
943 "times": [[1, 1], [200, jsonresults.JSON_RESULTS_MIN_TIME]]},
944 "002.html": {
945 "results": [[1, PASS], [10, TEXT]],
946 "times": [[11, 0]]}}})
947
948 def test_merge_pruning_slow_tests_for_debug_builders(self):
949 self._builder = "MockBuilder(dbg)"
950 self._test_merge(
951 # Aggregated results
952 {"builds": ["2", "1"],
953 "tests": {"001.html": {
954 "results": [[200, PASS]],
955 "times": [[200, 3 * jsonresults.JSON_RESULTS_MIN_TIME]]},
956 "002.html": {
957 "results": [[10, TEXT]],
958 "times": [[10, 0]]}}},
959 # Incremental results
960 {"builds": ["3"],
961 "tests": {"001.html": {
962 "results": [[1, PASS]],
963 "times": [[1, 1]]},
964 "002.html": {
965 "results": [[1, PASS]],
966 "times": [[1, 0]]},
967 "003.html": {
968 "results": [[1, PASS]],
969 "times": [[1, jsonresults.JSON_RESULTS_MIN_TIME]]}}},
970 # Expected results
971 {"builds": ["3", "2", "1"],
972 "tests": {"001.html": {
973 "results": [[201, PASS]],
974 "times": [[1, 1], [200, 3 * jsonresults.JSON_RESULTS_MIN_TIME]]},
975 "002.html": {
976 "results": [[1, PASS], [10, TEXT]],
977 "times": [[11, 0]]}}})
978
979 def test_merge_prune_extra_results(self):
980 # Remove items from test results and times that exceed the max number
981 # of builds to track.
982 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS
983 self._test_merge(
984 # Aggregated results
985 {"builds": ["2", "1"],
986 "tests": {"001.html": {
987 "results": [[max_builds, TEXT], [1, IMAGE]],
988 "times": [[max_builds, 0], [1, 1]]}}},
989 # Incremental results
990 {"builds": ["3"],
991 "tests": {"001.html": {
992 "results": [[1, TIMEOUT]],
993 "times": [[1, 1]]}}},
994 # Expected results
995 {"builds": ["3", "2", "1"],
996 "tests": {"001.html": {
997 "results": [[1, TIMEOUT], [max_builds, TEXT]],
998 "times": [[1, 1], [max_builds, 0]]}}})
999
1000 def test_merge_prune_extra_results_small(self):
1001 # Remove items from test results and times that exceed the max number
1002 # of builds to track, using smaller threshold.
1003 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL
1004 self._test_merge(
1005 # Aggregated results
1006 {"builds": ["2", "1"],
1007 "tests": {"001.html": {
1008 "results": [[max_builds, TEXT], [1, IMAGE]],
1009 "times": [[max_builds, 0], [1, 1]]}}},
1010 # Incremental results
1011 {"builds": ["3"],
1012 "tests": {"001.html": {
1013 "results": [[1, TIMEOUT]],
1014 "times": [[1, 1]]}}},
1015 # Expected results
1016 {"builds": ["3", "2", "1"],
1017 "tests": {"001.html": {
1018 "results": [[1, TIMEOUT], [max_builds, TEXT]],
1019 "times": [[1, 1], [max_builds, 0]]}}},
1020 int(max_builds))
1021
1022 def test_merge_prune_extra_results_with_new_result_of_same_type(self):
1023 # Test that merging in a new result of the same type as the last result
1024 # causes old results to fall off.
1025 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL
1026 self._test_merge(
1027 # Aggregated results
1028 {"builds": ["2", "1"],
1029 "tests": {"001.html": {
1030 "results": [[max_builds, TEXT], [1, NO_DATA]],
1031 "times": [[max_builds, 0], [1, 1]]}}},
1032 # Incremental results
1033 {"builds": ["3"],
1034 "tests": {"001.html": {
1035 "results": [[1, TEXT]],
1036 "times": [[1, 0]]}}},
1037 # Expected results
1038 {"builds": ["3", "2", "1"],
1039 "tests": {"001.html": {
1040 "results": [[max_builds, TEXT]],
1041 "times": [[max_builds, 0]]}}},
1042 int(max_builds))
1043
1044 def test_merge_build_directory_hierarchy(self):
1045 self._test_merge(
1046 # Aggregated results
1047 {"builds": ["2", "1"],
1048 "tests": {"bar": {"baz": {
1049 "003.html": {
1050 "results": [[25, TEXT]],
1051 "times": [[25, 0]]}}},
1052 "foo": {
1053 "001.html": {
1054 "results": [[50, TEXT]],
1055 "times": [[50, 0]]},
1056 "002.html": {
1057 "results": [[100, IMAGE]],
1058 "times": [[100, 0]]}}},
1059 "version": 4},
1060 # Incremental results
1061 {"builds": ["3"],
1062 "tests": {"baz": {
1063 "004.html": {
1064 "results": [[1, IMAGE]],
1065 "times": [[1, 0]]}},
1066 "foo": {
1067 "001.html": {
1068 "results": [[1, TEXT]],
1069 "times": [[1, 0]]},
1070 "002.html": {
1071 "results": [[1, IMAGE]],
1072 "times": [[1, 0]]}}},
1073 "version": 4},
1074 # Expected results
1075 {"builds": ["3", "2", "1"],
1076 "tests": {"bar": {"baz": {
1077 "003.html": {
1078 "results": [[1, NO_DATA], [25, TEXT]],
1079 "times": [[26, 0]]}}},
1080 "baz": {
1081 "004.html": {
1082 "results": [[1, IMAGE]],
1083 "times": [[1, 0]]}},
1084 "foo": {
1085 "001.html": {
1086 "results": [[51, TEXT]],
1087 "times": [[51, 0]]},
1088 "002.html": {
1089 "results": [[101, IMAGE]],
1090 "times": [[101, 0]]}}},
1091 "version": 4})
1092
1093 # FIXME(aboxhall): Add some tests for xhtml/svg test results.
1094
1095 def test_get_test_name_list(self):
1096 # Get test name list only. Don't include non-test-list data and
1097 # of test result details.
1098 # FIXME: This also tests a temporary bug in the data where directory-level
1099 # results have a results and times values. Once that bug is fixed,
1100 # remove this test-case and assert we don't ever hit it.
1101 self._test_get_test_list(
1102 # Input results
1103 {"builds": ["3", "2", "1"],
1104 "tests": {"foo": {
1105 "001.html": {
1106 "results": [[200, PASS]],
1107 "times": [[200, 0]]},
1108 "results": [[1, NO_DATA]],
1109 "times": [[1, 0]]},
1110 "002.html": {
1111 "results": [[10, TEXT]],
1112 "times": [[10, 0]]}}},
1113 # Expected results
1114 {"foo": {"001.html": {}}, "002.html": {}})
1115
1116 def test_gtest(self):
1117 self._test_merge(
1118 # Aggregated results
1119 {"builds": ["2", "1"],
1120 "tests": {"foo.bar": {
1121 "results": [[50, TEXT]],
1122 "times": [[50, 0]]},
1123 "foo.bar2": {
1124 "results": [[100, IMAGE]],
1125 "times": [[100, 0]]},
1126 "test.failed": {
1127 "results": [[5, FAIL]],
1128 "times": [[5, 0]]},
1129 },
1130 "version": 3},
1131 # Incremental results
1132 {"builds": ["3"],
1133 "tests": {"foo.bar2": {
1134 "results": [[1, IMAGE]],
1135 "times": [[1, 0]]},
1136 "foo.bar3": {
1137 "results": [[1, TEXT]],
1138 "times": [[1, 0]]},
1139 "test.failed": {
1140 "results": [[5, FAIL]],
1141 "times": [[5, 0]]},
1142 },
1143 "version": 4},
1144 # Expected results
1145 {"builds": ["3", "2", "1"],
1146 "tests": {"foo.bar": {
1147 "results": [[1, NO_DATA], [50, TEXT]],
1148 "times": [[51, 0]]},
1149 "foo.bar2": {
1150 "results": [[101, IMAGE]],
1151 "times": [[101, 0]]},
1152 "foo.bar3": {
1153 "results": [[1, TEXT]],
1154 "times": [[1, 0]]},
1155 "test.failed": {
1156 "results": [[10, FAIL]],
1157 "times": [[10, 0]]},
1158 },
1159 "version": 4})
1160
1161 def test_deprecated_master_name(self):
1162 tb = testbed.Testbed()
1163 tb.activate()
1164 tb.init_datastore_v3_stub()
1165 tb.init_blobstore_stub()
1166
1167 master = master_config.getMaster('chromium.chromiumos')
1168 builder = 'test-builder'
1169 test_type = 'test-type'
1170
1171 test_data = [
1172 {
1173 'tests': {
1174 'Test1.testproc1': {
1175 'expected': 'PASS',
1176 'actual': 'PASS',
1177 'time': 1,
1178 }
1179 },
1180 'build_number': '123',
1181 'version': jsonresults.JSON_RESULTS_HIERARCHICAL_VERSION,
1182 'builder_name': builder,
1183 'blink_revision': '12345',
1184 'seconds_since_epoch': 1406123456,
1185 'num_failures_by_type': {
1186 'FAIL': 0,
1187 'SKIP': 0,
1188 'PASS': 1
1189 },
1190 'chromium_revision': '67890',
1191 },
1192 {
1193 'tests': {
1194 'Test2.testproc2': {
1195 'expected': 'PASS',
1196 'actual': 'FAIL',
1197 'time': 2,
1198 }
1199 },
1200 'build_number': '456',
1201 'version': jsonresults.JSON_RESULTS_HIERARCHICAL_VERSION,
1202 'builder_name': builder,
1203 'blink_revision': '54321',
1204 'seconds_since_epoch': 1406654321,
1205 'num_failures_by_type': {
1206 'FAIL': 1,
1207 'SKIP': 0,
1208 'PASS': 0
1209 },
1210 'chromium_revision': '98765',
1211 },
1212 ]
1213
1214 # Upload a file using old master name
1215
1216 # Seed results files using the old name.
1217 JsonResults.update(
1218 master['name'], builder, test_type, test_data[0], None, True)
1219 # Update results files using the new name.
1220 JsonResults.update(master['url_name'], builder, test_type, test_data[1],
1221 master['name'], True)
1222 # Verify that the file keyed by url_name contains both sets of results.
1223 files = TestFile.get_files(
1224 master['url_name'], builder, test_type, None, None, limit=3)
1225 self.assertEqual(len(files), 2)
1226 for f in files:
1227 j = json.loads(f.data)
1228 self.assertItemsEqual(j[builder]['blinkRevision'], ['12345', '54321'])
1229
1230 tb.deactivate()
1231
1232 @staticmethod
1233 def test_normalize_results_with_top_level_results_key_does_not_crash():
1234 aggregated_json = {
1235 'Linux Tests': {
1236 'results': {'foo': {'results': [(1, 'P')],
1237 'times': [(1, 1)]}},
1238 }
1239 }
1240 JsonResults._normalize_results(aggregated_json, 1, 2)
1241
1242
1243 if __name__ == '__main__':
1244 unittest.main()
OLDNEW
« no previous file with comments | « appengine/test_results/test/jsonresults_test.py ('k') | appengine/test_results/test/testfile_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698