| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright 2011 Google Inc. | 2 * Copyright 2011 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 /* | 8 /* |
| 9 * Code for the "gm" (Golden Master) rendering comparison tool. | 9 * Code for the "gm" (Golden Master) rendering comparison tool. |
| 10 * | 10 * |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 265 int renderModeCount = 0; | 265 int renderModeCount = 0; |
| 266 this->fRenderModesEncountered.find(renderModeDescriptor, &renderModeCoun
t); | 266 this->fRenderModesEncountered.find(renderModeDescriptor, &renderModeCoun
t); |
| 267 renderModeCount++; | 267 renderModeCount++; |
| 268 this->fRenderModesEncountered.set(renderModeDescriptor, renderModeCount)
; | 268 this->fRenderModesEncountered.set(renderModeDescriptor, renderModeCount)
; |
| 269 | 269 |
| 270 if (errorCombination.isEmpty()) { | 270 if (errorCombination.isEmpty()) { |
| 271 return; | 271 return; |
| 272 } | 272 } |
| 273 | 273 |
| 274 // Things to do only if there is some error condition. | 274 // Things to do only if there is some error condition. |
| 275 SkString fullName = make_name(name.c_str(), renderModeDescriptor); | 275 SkString fullName = name; |
| 276 fullName.append(renderModeDescriptor); |
| 276 for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { | 277 for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { |
| 277 ErrorType type = static_cast<ErrorType>(typeInt); | 278 ErrorType type = static_cast<ErrorType>(typeInt); |
| 278 if (errorCombination.includes(type)) { | 279 if (errorCombination.includes(type)) { |
| 279 fFailedTests[type].push_back(fullName); | 280 fFailedTests[type].push_back(fullName); |
| 280 } | 281 } |
| 281 } | 282 } |
| 282 } | 283 } |
| 283 | 284 |
| 284 /** | 285 /** |
| 285 * Return the number of significant (non-ignorable) errors we have | 286 * Return the number of significant (non-ignorable) errors we have |
| (...skipping 390 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 676 * (if any) that we saw along the way. | 677 * (if any) that we saw along the way. |
| 677 * | 678 * |
| 678 * If fMismatchPath has been set, and there are pixel diffs, then the | 679 * If fMismatchPath has been set, and there are pixel diffs, then the |
| 679 * actual bitmap will be written out to a file within fMismatchPath. | 680 * actual bitmap will be written out to a file within fMismatchPath. |
| 680 * | 681 * |
| 681 * @param expectations what expectations to compare actualBitmap against | 682 * @param expectations what expectations to compare actualBitmap against |
| 682 * @param actualBitmap the image we actually generated | 683 * @param actualBitmap the image we actually generated |
| 683 * @param baseNameString name of test without renderModeDescriptor added | 684 * @param baseNameString name of test without renderModeDescriptor added |
| 684 * @param renderModeDescriptor e.g., "-rtree", "-deferred" | 685 * @param renderModeDescriptor e.g., "-rtree", "-deferred" |
| 685 * @param addToJsonSummary whether to add these results (both actual and | 686 * @param addToJsonSummary whether to add these results (both actual and |
| 686 * expected) to the JSON summary | 687 * expected) to the JSON summary. Regardless of this setting, if |
| 687 * | 688 * we find an image mismatch in this test, we will write these |
| 688 * TODO: For now, addToJsonSummary is only set to true within | 689 * results to the JSON summary. (This is so that we will always |
| 689 * compare_test_results_to_stored_expectations(), so results of our | 690 * report errors across rendering modes, such as pipe vs tiled. |
| 690 * in-memory comparisons (Rtree vs regular, etc.) are not written to the | 691 * See https://codereview.chromium.org/13650002/ ) |
| 691 * JSON summary. We may wish to change that. | |
| 692 */ | 692 */ |
| 693 ErrorCombination compare_to_expectations(Expectations expectations, | 693 ErrorCombination compare_to_expectations(Expectations expectations, |
| 694 const SkBitmap& actualBitmap, | 694 const SkBitmap& actualBitmap, |
| 695 const SkString& baseNameString, | 695 const SkString& baseNameString, |
| 696 const char renderModeDescriptor[], | 696 const char renderModeDescriptor[], |
| 697 bool addToJsonSummary=false) { | 697 bool addToJsonSummary) { |
| 698 ErrorCombination errors; | 698 ErrorCombination errors; |
| 699 Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap); | 699 Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap); |
| 700 SkString completeNameString = baseNameString; | 700 SkString completeNameString = baseNameString; |
| 701 completeNameString.append(renderModeDescriptor); | 701 completeNameString.append(renderModeDescriptor); |
| 702 const char* completeName = completeNameString.c_str(); | 702 const char* completeName = completeNameString.c_str(); |
| 703 | 703 |
| 704 if (expectations.empty()) { | 704 if (expectations.empty()) { |
| 705 errors.add(kMissingExpectations_ErrorType); | 705 errors.add(kMissingExpectations_ErrorType); |
| 706 } else if (!expectations.match(actualChecksum)) { | 706 } else if (!expectations.match(actualChecksum)) { |
| 707 errors.add(kImageMismatch_ErrorType); | 707 addToJsonSummary = true; |
| 708 // The error mode we record depends on whether this was running |
| 709 // in a non-standard renderMode. |
| 710 if ('\0' == *renderModeDescriptor) { |
| 711 errors.add(kExpectationsMismatch_ErrorType); |
| 712 } else { |
| 713 errors.add(kRenderModeMismatch_ErrorType); |
| 714 } |
| 708 | 715 |
| 709 // Write out the "actuals" for any mismatches, if we have | 716 // Write out the "actuals" for any mismatches, if we have |
| 710 // been directed to do so. | 717 // been directed to do so. |
| 711 if (fMismatchPath) { | 718 if (fMismatchPath) { |
| 712 SkString path = | 719 SkString path = |
| 713 make_filename(fMismatchPath, renderModeDescriptor, | 720 make_filename(fMismatchPath, renderModeDescriptor, |
| 714 baseNameString.c_str(), "png"); | 721 baseNameString.c_str(), "png"); |
| 715 write_bitmap(path, actualBitmap); | 722 write_bitmap(path, actualBitmap); |
| 716 } | 723 } |
| 717 | 724 |
| (...skipping 28 matching lines...) Expand all Loading... |
| 746 asJsonValue(actualChecksum); | 753 asJsonValue(actualChecksum); |
| 747 if (result.isEmpty()) { | 754 if (result.isEmpty()) { |
| 748 this->fJsonActualResults_Succeeded[testName] = actualResults; | 755 this->fJsonActualResults_Succeeded[testName] = actualResults; |
| 749 } else { | 756 } else { |
| 750 if (ignoreFailure) { | 757 if (ignoreFailure) { |
| 751 // TODO: Once we have added the ability to compare | 758 // TODO: Once we have added the ability to compare |
| 752 // actual results against expectations in a JSON file | 759 // actual results against expectations in a JSON file |
| 753 // (where we can set ignore-failure to either true or | 760 // (where we can set ignore-failure to either true or |
| 754 // false), add test cases that exercise ignored | 761 // false), add test cases that exercise ignored |
| 755 // failures (both for kMissingExpectations_ErrorType | 762 // failures (both for kMissingExpectations_ErrorType |
| 756 // and kImageMismatch_ErrorType). | 763 // and kExpectationsMismatch_ErrorType). |
| 757 this->fJsonActualResults_FailureIgnored[testName] = | 764 this->fJsonActualResults_FailureIgnored[testName] = |
| 758 actualResults; | 765 actualResults; |
| 759 } else { | 766 } else { |
| 760 if (result.includes(kMissingExpectations_ErrorType)) { | 767 if (result.includes(kMissingExpectations_ErrorType)) { |
| 761 // TODO: What about the case where there IS an | 768 // TODO: What about the case where there IS an |
| 762 // expected image checksum, but that gm test | 769 // expected image checksum, but that gm test |
| 763 // doesn't actually run? For now, those cases | 770 // doesn't actually run? For now, those cases |
| 764 // will always be ignored, because gm only looks | 771 // will always be ignored, because gm only looks |
| 765 // at expectations that correspond to gm tests | 772 // at expectations that correspond to gm tests |
| 766 // that were actually run. | 773 // that were actually run. |
| 767 // | 774 // |
| 768 // Once we have the ability to express | 775 // Once we have the ability to express |
| 769 // expectations as a JSON file, we should fix this | 776 // expectations as a JSON file, we should fix this |
| 770 // (and add a test case for which an expectation | 777 // (and add a test case for which an expectation |
| 771 // is given but the test is never run). | 778 // is given but the test is never run). |
| 772 this->fJsonActualResults_NoComparison[testName] = | 779 this->fJsonActualResults_NoComparison[testName] = |
| 773 actualResults; | 780 actualResults; |
| 774 } | 781 } |
| 775 if (result.includes(kImageMismatch_ErrorType)) { | 782 if (result.includes(kExpectationsMismatch_ErrorType) || |
| 783 result.includes(kRenderModeMismatch_ErrorType)) { |
| 776 this->fJsonActualResults_Failed[testName] = actualResults; | 784 this->fJsonActualResults_Failed[testName] = actualResults; |
| 777 } | 785 } |
| 778 } | 786 } |
| 779 } | 787 } |
| 780 } | 788 } |
| 781 | 789 |
| 782 /** | 790 /** |
| 783 * Add this test to the JSON collection of expected results. | 791 * Add this test to the JSON collection of expected results. |
| 784 */ | 792 */ |
| 785 void add_expected_results_to_json_summary(const char testName[], | 793 void add_expected_results_to_json_summary(const char testName[], |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 859 * @param gm which test generated the bitmap | 867 * @param gm which test generated the bitmap |
| 860 * @param gRec | 868 * @param gRec |
| 861 * @param renderModeDescriptor | 869 * @param renderModeDescriptor |
| 862 * @param actualBitmap actual bitmap generated by this run | 870 * @param actualBitmap actual bitmap generated by this run |
| 863 * @param referenceBitmap bitmap we expected to be generated | 871 * @param referenceBitmap bitmap we expected to be generated |
| 864 */ | 872 */ |
| 865 ErrorCombination compare_test_results_to_reference_bitmap( | 873 ErrorCombination compare_test_results_to_reference_bitmap( |
| 866 GM* gm, const ConfigData& gRec, const char renderModeDescriptor [], | 874 GM* gm, const ConfigData& gRec, const char renderModeDescriptor [], |
| 867 SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) { | 875 SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) { |
| 868 | 876 |
| 869 // TODO(epoger): This method is run to compare results across | |
| 870 // different rendering modes (as opposed to | |
| 871 // compare_test_results_to_stored_expectations(), which | |
| 872 // compares results against expectations stored on disk). If | |
| 873 // we would like the GenerateGMs step to distinguish between | |
| 874 // those two types of mismatches, we should report image | |
| 875 // mismatches in here with a different ErrorType. | |
| 876 SkASSERT(referenceBitmap); | 877 SkASSERT(referenceBitmap); |
| 877 SkString name = make_name(gm->shortName(), gRec.fName); | 878 SkString name = make_name(gm->shortName(), gRec.fName); |
| 878 Expectations expectations(*referenceBitmap); | 879 Expectations expectations(*referenceBitmap); |
| 879 return compare_to_expectations(expectations, actualBitmap, | 880 return compare_to_expectations(expectations, actualBitmap, |
| 880 name, renderModeDescriptor); | 881 name, renderModeDescriptor, false); |
| 881 } | 882 } |
| 882 | 883 |
| 883 static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t rec
ordFlags, | 884 static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t rec
ordFlags, |
| 884 SkScalar scale = SK_Scalar1) { | 885 SkScalar scale = SK_Scalar1) { |
| 885 // Pictures are refcounted so must be on heap | 886 // Pictures are refcounted so must be on heap |
| 886 SkPicture* pict; | 887 SkPicture* pict; |
| 887 int width = SkScalarCeilToInt(SkScalarMul(SkIntToScalar(gm->getISize().w
idth()), scale)); | 888 int width = SkScalarCeilToInt(SkScalarMul(SkIntToScalar(gm->getISize().w
idth()), scale)); |
| 888 int height = SkScalarCeilToInt(SkScalarMul(SkIntToScalar(gm->getISize().
height()), scale)); | 889 int height = SkScalarCeilToInt(SkScalarMul(SkIntToScalar(gm->getISize().
height()), scale)); |
| 889 | 890 |
| 890 if (kTileGrid_BbhType == bbhType) { | 891 if (kTileGrid_BbhType == bbhType) { |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 989 // TODO(epoger): Report this as a new ErrorType, | 990 // TODO(epoger): Report this as a new ErrorType, |
| 990 // something like kImageGeneration_ErrorType? | 991 // something like kImageGeneration_ErrorType? |
| 991 return kEmpty_ErrorCombination; | 992 return kEmpty_ErrorCombination; |
| 992 } | 993 } |
| 993 return compare_test_results_to_reference_bitmap( | 994 return compare_test_results_to_reference_bitmap( |
| 994 gm, gRec, "-deferred", bitmap, &referenceBitmap); | 995 gm, gRec, "-deferred", bitmap, &referenceBitmap); |
| 995 } | 996 } |
| 996 return kEmpty_ErrorCombination; | 997 return kEmpty_ErrorCombination; |
| 997 } | 998 } |
| 998 | 999 |
| 999 ErrorCombination test_pipe_playback(GM* gm, | 1000 ErrorCombination test_pipe_playback(GM* gm, const ConfigData& gRec, |
| 1000 const ConfigData& gRec, | 1001 const SkBitmap& referenceBitmap, bool si
mulateFailure) { |
| 1001 const SkBitmap& referenceBitmap) { | |
| 1002 ErrorCombination errors; | 1002 ErrorCombination errors; |
| 1003 for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { | 1003 for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { |
| 1004 SkBitmap bitmap; | 1004 SkBitmap bitmap; |
| 1005 SkISize size = gm->getISize(); | 1005 SkISize size = gm->getISize(); |
| 1006 setup_bitmap(gRec, size, &bitmap); | 1006 setup_bitmap(gRec, size, &bitmap); |
| 1007 SkCanvas canvas(bitmap); | 1007 SkCanvas canvas(bitmap); |
| 1008 installFilter(&canvas); | 1008 installFilter(&canvas); |
| 1009 PipeController pipeController(&canvas); | 1009 PipeController pipeController(&canvas); |
| 1010 SkGPipeWriter writer; | 1010 SkGPipeWriter writer; |
| 1011 SkCanvas* pipeCanvas = writer.startRecording( | 1011 SkCanvas* pipeCanvas = writer.startRecording( |
| 1012 &pipeController, gPipeWritingFlagCombos[i].flags); | 1012 &pipeController, gPipeWritingFlagCombos[i].flags); |
| 1013 invokeGM(gm, pipeCanvas, false, false); | 1013 if (!simulateFailure) { |
| 1014 invokeGM(gm, pipeCanvas, false, false); |
| 1015 } |
| 1014 complete_bitmap(&bitmap); | 1016 complete_bitmap(&bitmap); |
| 1015 writer.endRecording(); | 1017 writer.endRecording(); |
| 1016 SkString string("-pipe"); | 1018 SkString string("-pipe"); |
| 1017 string.append(gPipeWritingFlagCombos[i].name); | 1019 string.append(gPipeWritingFlagCombos[i].name); |
| 1018 errors.add(compare_test_results_to_reference_bitmap( | 1020 errors.add(compare_test_results_to_reference_bitmap( |
| 1019 gm, gRec, string.c_str(), bitmap, &referenceBitmap)); | 1021 gm, gRec, string.c_str(), bitmap, &referenceBitmap)); |
| 1020 if (!errors.isEmpty()) { | 1022 if (!errors.isEmpty()) { |
| 1021 break; | 1023 break; |
| 1022 } | 1024 } |
| 1023 } | 1025 } |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1170 DEFINE_string(modulo, "", "[--modulo <remainder> <divisor>]: only run tests for
which " | 1172 DEFINE_string(modulo, "", "[--modulo <remainder> <divisor>]: only run tests for
which " |
| 1171 "testIndex %% divisor == remainder."); | 1173 "testIndex %% divisor == remainder."); |
| 1172 DEFINE_bool(pdf, true, "Exercise the pdf rendering test pass."); | 1174 DEFINE_bool(pdf, true, "Exercise the pdf rendering test pass."); |
| 1173 DEFINE_bool(pipe, true, "Exercise the SkGPipe replay test pass."); | 1175 DEFINE_bool(pipe, true, "Exercise the SkGPipe replay test pass."); |
| 1174 DEFINE_string2(readPath, r, "", "Read reference images from this dir, and report
" | 1176 DEFINE_string2(readPath, r, "", "Read reference images from this dir, and report
" |
| 1175 "any differences between those and the newly generated ones."); | 1177 "any differences between those and the newly generated ones."); |
| 1176 DEFINE_bool(replay, true, "Exercise the SkPicture replay test pass."); | 1178 DEFINE_bool(replay, true, "Exercise the SkPicture replay test pass."); |
| 1177 DEFINE_string2(resourcePath, i, "", "Directory that stores image resources."); | 1179 DEFINE_string2(resourcePath, i, "", "Directory that stores image resources."); |
| 1178 DEFINE_bool(rtree, true, "Exercise the R-Tree variant of SkPicture test pass."); | 1180 DEFINE_bool(rtree, true, "Exercise the R-Tree variant of SkPicture test pass."); |
| 1179 DEFINE_bool(serialize, true, "Exercise the SkPicture serialization & deserializa
tion test pass."); | 1181 DEFINE_bool(serialize, true, "Exercise the SkPicture serialization & deserializa
tion test pass."); |
| 1182 DEFINE_bool(simulatePipePlaybackFailure, false, "Simulate a rendering failure in
pipe mode only."); |
| 1180 DEFINE_bool(tiledPipe, false, "Exercise tiled SkGPipe replay."); | 1183 DEFINE_bool(tiledPipe, false, "Exercise tiled SkGPipe replay."); |
| 1181 DEFINE_bool(tileGrid, true, "Exercise the tile grid variant of SkPicture."); | 1184 DEFINE_bool(tileGrid, true, "Exercise the tile grid variant of SkPicture."); |
| 1182 DEFINE_string(tileGridReplayScales, "", "Space separated list of floating-point
scale " | 1185 DEFINE_string(tileGridReplayScales, "", "Space separated list of floating-point
scale " |
| 1183 "factors to be used for tileGrid playback testing. Default value:
1.0"); | 1186 "factors to be used for tileGrid playback testing. Default value:
1.0"); |
| 1184 DEFINE_string(writeJsonSummaryPath, "", "Write a JSON-formatted result summary t
o this file."); | 1187 DEFINE_string(writeJsonSummaryPath, "", "Write a JSON-formatted result summary t
o this file."); |
| 1185 DEFINE_bool2(verbose, v, false, "Print diagnostics (e.g. list each config to be
tested)."); | 1188 DEFINE_bool2(verbose, v, false, "Print diagnostics (e.g. list each config to be
tested)."); |
| 1186 DEFINE_string2(writePath, w, "", "Write rendered images into this directory."); | 1189 DEFINE_string2(writePath, w, "", "Write rendered images into this directory."); |
| 1187 DEFINE_string2(writePicturePath, wp, "", "Write .skp files into this directory."
); | 1190 DEFINE_string2(writePicturePath, wp, "", "Write .skp files into this directory."
); |
| 1188 | 1191 |
| 1189 static int findConfig(const char config[]) { | 1192 static int findConfig(const char config[]) { |
| (...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1404 const char* pictureSuffix = "skp"; | 1407 const char* pictureSuffix = "skp"; |
| 1405 SkString path = make_filename(FLAGS_writePicturePath[0], "", | 1408 SkString path = make_filename(FLAGS_writePicturePath[0], "", |
| 1406 gm->shortName(), pictureSuffix); | 1409 gm->shortName(), pictureSuffix); |
| 1407 SkFILEWStream stream(path.c_str()); | 1410 SkFILEWStream stream(path.c_str()); |
| 1408 pict->serialize(&stream); | 1411 pict->serialize(&stream); |
| 1409 } | 1412 } |
| 1410 | 1413 |
| 1411 errorsForAllModes.add(pictErrors); | 1414 errorsForAllModes.add(pictErrors); |
| 1412 } | 1415 } |
| 1413 | 1416 |
| 1414 // TODO: add a test in which the RTree rendering results in a | |
| 1415 // different bitmap than the standard rendering. It should | |
| 1416 // show up as failed in the JSON summary, and should be listed | |
| 1417 // in the stdout also. | |
| 1418 if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_rtree) { | 1417 if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_rtree) { |
| 1419 SkPicture* pict = gmmain.generate_new_picture( | 1418 SkPicture* pict = gmmain.generate_new_picture( |
| 1420 gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag); | 1419 gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag); |
| 1421 SkAutoUnref aur(pict); | 1420 SkAutoUnref aur(pict); |
| 1422 SkBitmap bitmap; | 1421 SkBitmap bitmap; |
| 1423 gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap); | 1422 gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap); |
| 1424 errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap( | 1423 errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap( |
| 1425 gm, compareConfig, "-rtree", bitmap, &comparisonBitmap)); | 1424 gm, compareConfig, "-rtree", bitmap, &comparisonBitmap)); |
| 1426 } | 1425 } |
| 1427 | 1426 |
| (...skipping 24 matching lines...) Expand all Loading... |
| 1452 gm, compareConfig, suffix.c_str(), bitmap, &comparisonBitmap)); | 1451 gm, compareConfig, suffix.c_str(), bitmap, &comparisonBitmap)); |
| 1453 } | 1452 } |
| 1454 } | 1453 } |
| 1455 | 1454 |
| 1456 // run the pipe centric GM steps | 1455 // run the pipe centric GM steps |
| 1457 if (!(gmFlags & GM::kSkipPipe_Flag)) { | 1456 if (!(gmFlags & GM::kSkipPipe_Flag)) { |
| 1458 | 1457 |
| 1459 ErrorCombination pipeErrors; | 1458 ErrorCombination pipeErrors; |
| 1460 | 1459 |
| 1461 if (FLAGS_pipe) { | 1460 if (FLAGS_pipe) { |
| 1462 pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparis
onBitmap)); | 1461 pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparis
onBitmap, |
| 1462 FLAGS_simulatePipePlaybackF
ailure)); |
| 1463 } | 1463 } |
| 1464 | 1464 |
| 1465 if ((pipeErrors.isEmpty()) && | 1465 if ((pipeErrors.isEmpty()) && |
| 1466 FLAGS_tiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { | 1466 FLAGS_tiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { |
| 1467 pipeErrors.add(gmmain.test_tiled_pipe_playback(gm, compareConfig, co
mparisonBitmap)); | 1467 pipeErrors.add(gmmain.test_tiled_pipe_playback(gm, compareConfig, co
mparisonBitmap)); |
| 1468 } | 1468 } |
| 1469 | 1469 |
| 1470 errorsForAllModes.add(pipeErrors); | 1470 errorsForAllModes.add(pipeErrors); |
| 1471 } | 1471 } |
| 1472 return errorsForAllModes; | 1472 return errorsForAllModes; |
| (...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1816 if (FLAGS_forceBWtext) { | 1816 if (FLAGS_forceBWtext) { |
| 1817 canvas->setDrawFilter(SkNEW(BWTextDrawFilter))->unref(); | 1817 canvas->setDrawFilter(SkNEW(BWTextDrawFilter))->unref(); |
| 1818 } | 1818 } |
| 1819 } | 1819 } |
| 1820 | 1820 |
| 1821 #if !defined(SK_BUILD_FOR_IOS) && !defined(SK_BUILD_FOR_NACL) | 1821 #if !defined(SK_BUILD_FOR_IOS) && !defined(SK_BUILD_FOR_NACL) |
| 1822 int main(int argc, char * const argv[]) { | 1822 int main(int argc, char * const argv[]) { |
| 1823 return tool_main(argc, (char**) argv); | 1823 return tool_main(argc, (char**) argv); |
| 1824 } | 1824 } |
| 1825 #endif | 1825 #endif |
| OLD | NEW |