OLD | NEW |
1 /* | 1 /* |
2 * Copyright 2011 Google Inc. | 2 * Copyright 2011 Google Inc. |
3 * | 3 * |
4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
6 */ | 6 */ |
7 | 7 |
8 /* | 8 /* |
9 * Code for the "gm" (Golden Master) rendering comparison tool. | 9 * Code for the "gm" (Golden Master) rendering comparison tool. |
10 * | 10 * |
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
265 int renderModeCount = 0; | 265 int renderModeCount = 0; |
266 this->fRenderModesEncountered.find(renderModeDescriptor, &renderModeCoun
t); | 266 this->fRenderModesEncountered.find(renderModeDescriptor, &renderModeCoun
t); |
267 renderModeCount++; | 267 renderModeCount++; |
268 this->fRenderModesEncountered.set(renderModeDescriptor, renderModeCount)
; | 268 this->fRenderModesEncountered.set(renderModeDescriptor, renderModeCount)
; |
269 | 269 |
270 if (errorCombination.isEmpty()) { | 270 if (errorCombination.isEmpty()) { |
271 return; | 271 return; |
272 } | 272 } |
273 | 273 |
274 // Things to do only if there is some error condition. | 274 // Things to do only if there is some error condition. |
275 SkString fullName = make_name(name.c_str(), renderModeDescriptor); | 275 SkString fullName = name; |
| 276 fullName.append(renderModeDescriptor); |
276 for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { | 277 for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { |
277 ErrorType type = static_cast<ErrorType>(typeInt); | 278 ErrorType type = static_cast<ErrorType>(typeInt); |
278 if (errorCombination.includes(type)) { | 279 if (errorCombination.includes(type)) { |
279 fFailedTests[type].push_back(fullName); | 280 fFailedTests[type].push_back(fullName); |
280 } | 281 } |
281 } | 282 } |
282 } | 283 } |
283 | 284 |
284 /** | 285 /** |
285 * Return the number of significant (non-ignorable) errors we have | 286 * Return the number of significant (non-ignorable) errors we have |
(...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
697 bool addToJsonSummary=false) { | 698 bool addToJsonSummary=false) { |
698 ErrorCombination errors; | 699 ErrorCombination errors; |
699 Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap); | 700 Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap); |
700 SkString completeNameString = baseNameString; | 701 SkString completeNameString = baseNameString; |
701 completeNameString.append(renderModeDescriptor); | 702 completeNameString.append(renderModeDescriptor); |
702 const char* completeName = completeNameString.c_str(); | 703 const char* completeName = completeNameString.c_str(); |
703 | 704 |
704 if (expectations.empty()) { | 705 if (expectations.empty()) { |
705 errors.add(kMissingExpectations_ErrorType); | 706 errors.add(kMissingExpectations_ErrorType); |
706 } else if (!expectations.match(actualChecksum)) { | 707 } else if (!expectations.match(actualChecksum)) { |
707 errors.add(kImageMismatch_ErrorType); | 708 errors.add(kExpectationsMismatch_ErrorType); |
708 | 709 |
709 // Write out the "actuals" for any mismatches, if we have | 710 // Write out the "actuals" for any mismatches, if we have |
710 // been directed to do so. | 711 // been directed to do so. |
711 if (fMismatchPath) { | 712 if (fMismatchPath) { |
712 SkString path = | 713 SkString path = |
713 make_filename(fMismatchPath, renderModeDescriptor, | 714 make_filename(fMismatchPath, renderModeDescriptor, |
714 baseNameString.c_str(), "png"); | 715 baseNameString.c_str(), "png"); |
715 write_bitmap(path, actualBitmap); | 716 write_bitmap(path, actualBitmap); |
716 } | 717 } |
717 | 718 |
(...skipping 28 matching lines...) Expand all Loading... |
746 asJsonValue(actualChecksum); | 747 asJsonValue(actualChecksum); |
747 if (result.isEmpty()) { | 748 if (result.isEmpty()) { |
748 this->fJsonActualResults_Succeeded[testName] = actualResults; | 749 this->fJsonActualResults_Succeeded[testName] = actualResults; |
749 } else { | 750 } else { |
750 if (ignoreFailure) { | 751 if (ignoreFailure) { |
751 // TODO: Once we have added the ability to compare | 752 // TODO: Once we have added the ability to compare |
752 // actual results against expectations in a JSON file | 753 // actual results against expectations in a JSON file |
753 // (where we can set ignore-failure to either true or | 754 // (where we can set ignore-failure to either true or |
754 // false), add test cases that exercise ignored | 755 // false), add test cases that exercise ignored |
755 // failures (both for kMissingExpectations_ErrorType | 756 // failures (both for kMissingExpectations_ErrorType |
756 // and kImageMismatch_ErrorType). | 757 // and kExpectationsMismatch_ErrorType). |
757 this->fJsonActualResults_FailureIgnored[testName] = | 758 this->fJsonActualResults_FailureIgnored[testName] = |
758 actualResults; | 759 actualResults; |
759 } else { | 760 } else { |
760 if (result.includes(kMissingExpectations_ErrorType)) { | 761 if (result.includes(kMissingExpectations_ErrorType)) { |
761 // TODO: What about the case where there IS an | 762 // TODO: What about the case where there IS an |
762 // expected image checksum, but that gm test | 763 // expected image checksum, but that gm test |
763 // doesn't actually run? For now, those cases | 764 // doesn't actually run? For now, those cases |
764 // will always be ignored, because gm only looks | 765 // will always be ignored, because gm only looks |
765 // at expectations that correspond to gm tests | 766 // at expectations that correspond to gm tests |
766 // that were actually run. | 767 // that were actually run. |
767 // | 768 // |
768 // Once we have the ability to express | 769 // Once we have the ability to express |
769 // expectations as a JSON file, we should fix this | 770 // expectations as a JSON file, we should fix this |
770 // (and add a test case for which an expectation | 771 // (and add a test case for which an expectation |
771 // is given but the test is never run). | 772 // is given but the test is never run). |
772 this->fJsonActualResults_NoComparison[testName] = | 773 this->fJsonActualResults_NoComparison[testName] = |
773 actualResults; | 774 actualResults; |
774 } | 775 } |
775 if (result.includes(kImageMismatch_ErrorType)) { | 776 if (result.includes(kExpectationsMismatch_ErrorType)) { |
776 this->fJsonActualResults_Failed[testName] = actualResults; | 777 this->fJsonActualResults_Failed[testName] = actualResults; |
777 } | 778 } |
778 } | 779 } |
779 } | 780 } |
780 } | 781 } |
781 | 782 |
782 /** | 783 /** |
783 * Add this test to the JSON collection of expected results. | 784 * Add this test to the JSON collection of expected results. |
784 */ | 785 */ |
785 void add_expected_results_to_json_summary(const char testName[], | 786 void add_expected_results_to_json_summary(const char testName[], |
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
989 // TODO(epoger): Report this as a new ErrorType, | 990 // TODO(epoger): Report this as a new ErrorType, |
990 // something like kImageGeneration_ErrorType? | 991 // something like kImageGeneration_ErrorType? |
991 return kEmpty_ErrorCombination; | 992 return kEmpty_ErrorCombination; |
992 } | 993 } |
993 return compare_test_results_to_reference_bitmap( | 994 return compare_test_results_to_reference_bitmap( |
994 gm, gRec, "-deferred", bitmap, &referenceBitmap); | 995 gm, gRec, "-deferred", bitmap, &referenceBitmap); |
995 } | 996 } |
996 return kEmpty_ErrorCombination; | 997 return kEmpty_ErrorCombination; |
997 } | 998 } |
998 | 999 |
999 ErrorCombination test_pipe_playback(GM* gm, | 1000 ErrorCombination test_pipe_playback(GM* gm, const ConfigData& gRec, |
1000 const ConfigData& gRec, | 1001 const SkBitmap& referenceBitmap, bool si
mulateFailure) { |
1001 const SkBitmap& referenceBitmap) { | |
1002 ErrorCombination errors; | 1002 ErrorCombination errors; |
1003 for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { | 1003 for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { |
1004 SkBitmap bitmap; | 1004 SkBitmap bitmap; |
1005 SkISize size = gm->getISize(); | 1005 SkISize size = gm->getISize(); |
1006 setup_bitmap(gRec, size, &bitmap); | 1006 setup_bitmap(gRec, size, &bitmap); |
1007 SkCanvas canvas(bitmap); | 1007 SkCanvas canvas(bitmap); |
1008 installFilter(&canvas); | 1008 installFilter(&canvas); |
1009 PipeController pipeController(&canvas); | 1009 PipeController pipeController(&canvas); |
1010 SkGPipeWriter writer; | 1010 SkGPipeWriter writer; |
1011 SkCanvas* pipeCanvas = writer.startRecording( | 1011 SkCanvas* pipeCanvas = writer.startRecording( |
1012 &pipeController, gPipeWritingFlagCombos[i].flags); | 1012 &pipeController, gPipeWritingFlagCombos[i].flags); |
1013 invokeGM(gm, pipeCanvas, false, false); | 1013 if (!simulateFailure) { |
| 1014 invokeGM(gm, pipeCanvas, false, false); |
| 1015 } |
1014 complete_bitmap(&bitmap); | 1016 complete_bitmap(&bitmap); |
1015 writer.endRecording(); | 1017 writer.endRecording(); |
1016 SkString string("-pipe"); | 1018 SkString string("-pipe"); |
1017 string.append(gPipeWritingFlagCombos[i].name); | 1019 string.append(gPipeWritingFlagCombos[i].name); |
1018 errors.add(compare_test_results_to_reference_bitmap( | 1020 errors.add(compare_test_results_to_reference_bitmap( |
1019 gm, gRec, string.c_str(), bitmap, &referenceBitmap)); | 1021 gm, gRec, string.c_str(), bitmap, &referenceBitmap)); |
1020 if (!errors.isEmpty()) { | 1022 if (!errors.isEmpty()) { |
1021 break; | 1023 break; |
1022 } | 1024 } |
1023 } | 1025 } |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1170 DEFINE_string(modulo, "", "[--modulo <remainder> <divisor>]: only run tests for
which " | 1172 DEFINE_string(modulo, "", "[--modulo <remainder> <divisor>]: only run tests for
which " |
1171 "testIndex %% divisor == remainder."); | 1173 "testIndex %% divisor == remainder."); |
1172 DEFINE_bool(pdf, true, "Exercise the pdf rendering test pass."); | 1174 DEFINE_bool(pdf, true, "Exercise the pdf rendering test pass."); |
1173 DEFINE_bool(pipe, true, "Exercise the SkGPipe replay test pass."); | 1175 DEFINE_bool(pipe, true, "Exercise the SkGPipe replay test pass."); |
1174 DEFINE_string2(readPath, r, "", "Read reference images from this dir, and report
" | 1176 DEFINE_string2(readPath, r, "", "Read reference images from this dir, and report
" |
1175 "any differences between those and the newly generated ones."); | 1177 "any differences between those and the newly generated ones."); |
1176 DEFINE_bool(replay, true, "Exercise the SkPicture replay test pass."); | 1178 DEFINE_bool(replay, true, "Exercise the SkPicture replay test pass."); |
1177 DEFINE_string2(resourcePath, i, "", "Directory that stores image resources."); | 1179 DEFINE_string2(resourcePath, i, "", "Directory that stores image resources."); |
1178 DEFINE_bool(rtree, true, "Exercise the R-Tree variant of SkPicture test pass."); | 1180 DEFINE_bool(rtree, true, "Exercise the R-Tree variant of SkPicture test pass."); |
1179 DEFINE_bool(serialize, true, "Exercise the SkPicture serialization & deserializa
tion test pass."); | 1181 DEFINE_bool(serialize, true, "Exercise the SkPicture serialization & deserializa
tion test pass."); |
| 1182 DEFINE_bool(simulatePipePlaybackFailure, false, "Simulate a rendering failure in
pipe mode only."); |
1180 DEFINE_bool(tiledPipe, false, "Exercise tiled SkGPipe replay."); | 1183 DEFINE_bool(tiledPipe, false, "Exercise tiled SkGPipe replay."); |
1181 DEFINE_bool(tileGrid, true, "Exercise the tile grid variant of SkPicture."); | 1184 DEFINE_bool(tileGrid, true, "Exercise the tile grid variant of SkPicture."); |
1182 DEFINE_string(tileGridReplayScales, "", "Space separated list of floating-point
scale " | 1185 DEFINE_string(tileGridReplayScales, "", "Space separated list of floating-point
scale " |
1183 "factors to be used for tileGrid playback testing. Default value:
1.0"); | 1186 "factors to be used for tileGrid playback testing. Default value:
1.0"); |
1184 DEFINE_string(writeJsonSummaryPath, "", "Write a JSON-formatted result summary t
o this file."); | 1187 DEFINE_string(writeJsonSummaryPath, "", "Write a JSON-formatted result summary t
o this file."); |
1185 DEFINE_bool2(verbose, v, false, "Print diagnostics (e.g. list each config to be
tested)."); | 1188 DEFINE_bool2(verbose, v, false, "Print diagnostics (e.g. list each config to be
tested)."); |
1186 DEFINE_string2(writePath, w, "", "Write rendered images into this directory."); | 1189 DEFINE_string2(writePath, w, "", "Write rendered images into this directory."); |
1187 DEFINE_string2(writePicturePath, wp, "", "Write .skp files into this directory."
); | 1190 DEFINE_string2(writePicturePath, wp, "", "Write .skp files into this directory."
); |
1188 | 1191 |
1189 static int findConfig(const char config[]) { | 1192 static int findConfig(const char config[]) { |
(...skipping 262 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1452 gm, compareConfig, suffix.c_str(), bitmap, &comparisonBitmap)); | 1455 gm, compareConfig, suffix.c_str(), bitmap, &comparisonBitmap)); |
1453 } | 1456 } |
1454 } | 1457 } |
1455 | 1458 |
1456 // run the pipe centric GM steps | 1459 // run the pipe centric GM steps |
1457 if (!(gmFlags & GM::kSkipPipe_Flag)) { | 1460 if (!(gmFlags & GM::kSkipPipe_Flag)) { |
1458 | 1461 |
1459 ErrorCombination pipeErrors; | 1462 ErrorCombination pipeErrors; |
1460 | 1463 |
1461 if (FLAGS_pipe) { | 1464 if (FLAGS_pipe) { |
1462 pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparis
onBitmap)); | 1465 pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparis
onBitmap, |
| 1466 FLAGS_simulatePipePlaybackF
ailure)); |
1463 } | 1467 } |
1464 | 1468 |
1465 if ((pipeErrors.isEmpty()) && | 1469 if ((pipeErrors.isEmpty()) && |
1466 FLAGS_tiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { | 1470 FLAGS_tiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { |
1467 pipeErrors.add(gmmain.test_tiled_pipe_playback(gm, compareConfig, co
mparisonBitmap)); | 1471 pipeErrors.add(gmmain.test_tiled_pipe_playback(gm, compareConfig, co
mparisonBitmap)); |
1468 } | 1472 } |
1469 | 1473 |
1470 errorsForAllModes.add(pipeErrors); | 1474 errorsForAllModes.add(pipeErrors); |
1471 } | 1475 } |
1472 return errorsForAllModes; | 1476 return errorsForAllModes; |
(...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1816 if (FLAGS_forceBWtext) { | 1820 if (FLAGS_forceBWtext) { |
1817 canvas->setDrawFilter(SkNEW(BWTextDrawFilter))->unref(); | 1821 canvas->setDrawFilter(SkNEW(BWTextDrawFilter))->unref(); |
1818 } | 1822 } |
1819 } | 1823 } |
1820 | 1824 |
1821 #if !defined(SK_BUILD_FOR_IOS) && !defined(SK_BUILD_FOR_NACL) | 1825 #if !defined(SK_BUILD_FOR_IOS) && !defined(SK_BUILD_FOR_NACL) |
1822 int main(int argc, char * const argv[]) { | 1826 int main(int argc, char * const argv[]) { |
1823 return tool_main(argc, (char**) argv); | 1827 return tool_main(argc, (char**) argv); |
1824 } | 1828 } |
1825 #endif | 1829 #endif |
OLD | NEW |