Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright 2011 Google Inc. | 2 * Copyright 2011 Google Inc. |
| 3 * | 3 * |
| 4 * Use of this source code is governed by a BSD-style license that can be | 4 * Use of this source code is governed by a BSD-style license that can be |
| 5 * found in the LICENSE file. | 5 * found in the LICENSE file. |
| 6 */ | 6 */ |
| 7 | 7 |
| 8 /* | 8 /* |
| 9 * Code for the "gm" (Golden Master) rendering comparison tool. | 9 * Code for the "gm" (Golden Master) rendering comparison tool. |
| 10 * | 10 * |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 79 | 79 |
| 80 #ifdef SK_BUILD_FOR_MAC | 80 #ifdef SK_BUILD_FOR_MAC |
| 81 #include "SkCGUtils.h" | 81 #include "SkCGUtils.h" |
| 82 #define CAN_IMAGE_PDF 1 | 82 #define CAN_IMAGE_PDF 1 |
| 83 #else | 83 #else |
| 84 #define CAN_IMAGE_PDF 0 | 84 #define CAN_IMAGE_PDF 0 |
| 85 #endif | 85 #endif |
| 86 | 86 |
| 87 using namespace skiagm; | 87 using namespace skiagm; |
| 88 | 88 |
| 89 struct FailRec { | |
| 90 SkString fName; | |
| 91 bool fIsPixelError; | |
| 92 | |
| 93 FailRec() : fIsPixelError(false) {} | |
| 94 FailRec(const SkString& name) : fName(name), fIsPixelError(false) {} | |
| 95 }; | |
| 96 | |
| 97 class Iter { | 89 class Iter { |
| 98 public: | 90 public: |
| 99 Iter() { | 91 Iter() { |
| 100 this->reset(); | 92 this->reset(); |
| 101 } | 93 } |
| 102 | 94 |
| 103 void reset() { | 95 void reset() { |
| 104 fReg = GMRegistry::Head(); | 96 fReg = GMRegistry::Head(); |
| 105 } | 97 } |
| 106 | 98 |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 183 }; | 175 }; |
| 184 | 176 |
| 185 class GMMain { | 177 class GMMain { |
| 186 public: | 178 public: |
| 187 GMMain() { | 179 GMMain() { |
| 188 // Set default values of member variables, which tool_main() | 180 // Set default values of member variables, which tool_main() |
| 189 // may override. | 181 // may override. |
| 190 fUseFileHierarchy = false; | 182 fUseFileHierarchy = false; |
| 191 fIgnorableErrorCombination.add(kMissingExpectations_ErrorType); | 183 fIgnorableErrorCombination.add(kMissingExpectations_ErrorType); |
| 192 fMismatchPath = NULL; | 184 fMismatchPath = NULL; |
| 185 fTestsRun = 0; | |
| 193 } | 186 } |
| 194 | 187 |
| 195 SkString make_name(const char shortName[], const char configName[]) { | 188 SkString make_name(const char shortName[], const char configName[]) { |
| 196 SkString name; | 189 SkString name; |
| 197 if (0 == strlen(configName)) { | 190 if (0 == strlen(configName)) { |
| 198 name.append(shortName); | 191 name.append(shortName); |
| 199 } else if (fUseFileHierarchy) { | 192 } else if (fUseFileHierarchy) { |
| 200 name.appendf("%s%c%s", configName, SkPATH_SEPARATOR, shortName); | 193 name.appendf("%s%c%s", configName, SkPATH_SEPARATOR, shortName); |
| 201 } else { | 194 } else { |
| 202 name.appendf("%s_%s", shortName, configName); | 195 name.appendf("%s_%s", shortName, configName); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 236 // TODO(epoger): Now that we have removed force_all_opaque() | 229 // TODO(epoger): Now that we have removed force_all_opaque() |
| 237 // from this method, we should be able to get rid of the | 230 // from this method, we should be able to get rid of the |
| 238 // transformation to 8888 format also. | 231 // transformation to 8888 format also. |
| 239 SkBitmap copy; | 232 SkBitmap copy; |
| 240 bitmap.copyTo(©, SkBitmap::kARGB_8888_Config); | 233 bitmap.copyTo(©, SkBitmap::kARGB_8888_Config); |
| 241 return SkImageEncoder::EncodeFile(path.c_str(), copy, | 234 return SkImageEncoder::EncodeFile(path.c_str(), copy, |
| 242 SkImageEncoder::kPNG_Type, 100); | 235 SkImageEncoder::kPNG_Type, 100); |
| 243 } | 236 } |
| 244 | 237 |
| 245 /** | 238 /** |
| 246 * Records the errors encountered in fFailedTests, except for any error | 239 * Records the results of this test in fTestsRun and fFailedTests. |
| 247 * types we want to ignore. | 240 * |
| 241 * We even record successes, and errors that we regard as | |
| 242 * "ignorable"; we can filter them out later. | |
| 248 */ | 243 */ |
| 249 void RecordError(const ErrorCombination& errorCombination, const SkString& n ame, | 244 void RecordTestResults(const ErrorCombination& errorCombination, const SkStr ing& name, |
| 250 const char renderModeDescriptor []) { | 245 const char renderModeDescriptor []) { |
| 251 // The common case: no error means nothing to record. | 246 fTestsRun++; |
| 252 if (errorCombination.isEmpty()) { | 247 if (errorCombination.isEmpty()) { |
| 253 return; | 248 return; |
| 254 } | 249 } |
| 255 | 250 SkString fullName = make_name(name.c_str(), renderModeDescriptor); |
| 256 // If only certain error type(s) were reported, we know we can ignore th em. | 251 for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { |
| 257 if (errorCombination.minus(fIgnorableErrorCombination).isEmpty()) { | 252 ErrorType type = static_cast<ErrorType>(typeInt); |
| 258 return; | 253 if (errorCombination.includes(type)) { |
| 259 } | 254 fFailedTests[type].push_back(fullName); |
| 260 | |
| 261 FailRec& rec = fFailedTests.push_back(make_name(name.c_str(), renderMode Descriptor)); | |
| 262 rec.fIsPixelError = errorCombination.includes(kImageMismatch_ErrorType); | |
| 263 } | |
| 264 | |
| 265 // List contents of fFailedTests via SkDebug. | |
| 266 void ListErrors() { | |
| 267 for (int i = 0; i < fFailedTests.count(); ++i) { | |
| 268 if (fFailedTests[i].fIsPixelError) { | |
| 269 gm_fprintf(stderr, "\t\t%s pixel_error\n", fFailedTests[i].fName .c_str()); | |
| 270 } else { | |
| 271 gm_fprintf(stderr, "\t\t%s\n", fFailedTests[i].fName.c_str()); | |
| 272 } | 255 } |
| 273 } | 256 } |
| 274 } | 257 } |
| 275 | 258 |
| 259 /** | |
| 260 * Return the number of significant (non-ignorable) errors we have | |
| 261 * encountered so far. | |
| 262 */ | |
| 263 int NumSignificantErrors() { | |
| 264 int significantErrors = 0; | |
| 265 for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { | |
| 266 ErrorType type = static_cast<ErrorType>(typeInt); | |
| 267 if (!fIgnorableErrorCombination.includes(type)) { | |
| 268 significantErrors += fFailedTests[type].count(); | |
| 269 } | |
| 270 } | |
| 271 return significantErrors; | |
| 272 } | |
| 273 | |
| 274 /** | |
| 275 * List contents of fFailedTests to stdout. | |
| 276 */ | |
| 277 void ListErrors() { | |
| 278 // First, print a single summary line. | |
| 279 SkString summary; | |
| 280 summary.appendf("Ran %d tests:", fTestsRun); | |
| 281 for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { | |
| 282 ErrorType type = static_cast<ErrorType>(typeInt); | |
| 283 summary.appendf(" %s=%d", getErrorTypeName(type), fFailedTests[type] .count()); | |
| 284 } | |
| 285 gm_fprintf(stdout, "%s\n", summary.c_str()); | |
| 286 | |
| 287 // Now, for each failure type, list the tests that failed that way. | |
| 288 for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { | |
| 289 SkString line; | |
| 290 ErrorType type = static_cast<ErrorType>(typeInt); | |
| 291 if (fIgnorableErrorCombination.includes(type)) { | |
| 292 line.append("[ ] "); | |
| 293 } else { | |
| 294 line.append("[*] "); | |
| 295 } | |
| 296 | |
| 297 SkTArray<SkString> *failedTestsOfThisType = &fFailedTests[type]; | |
| 298 int count = failedTestsOfThisType->count(); | |
| 299 line.appendf("%d %s:", count, getErrorTypeName(type)); | |
| 300 for (int i = 0; i < count; ++i) { | |
| 301 line.append(" "); | |
| 302 line.append((*failedTestsOfThisType)[i]); | |
| 303 } | |
| 304 gm_fprintf(stdout, "%s\n", line.c_str()); | |
| 305 } | |
| 306 gm_fprintf(stdout, "(results marked with [*] will cause nonzero return v alue)\n"); | |
| 307 } | |
| 308 | |
| 276 static bool write_document(const SkString& path, | 309 static bool write_document(const SkString& path, |
| 277 const SkDynamicMemoryWStream& document) { | 310 const SkDynamicMemoryWStream& document) { |
| 278 SkFILEWStream stream(path.c_str()); | 311 SkFILEWStream stream(path.c_str()); |
| 279 SkAutoDataUnref data(document.copyToData()); | 312 SkAutoDataUnref data(document.copyToData()); |
| 280 return stream.writeData(data.get()); | 313 return stream.writeData(data.get()); |
| 281 } | 314 } |
| 282 | 315 |
| 283 /** | 316 /** |
| 284 * Prepare an SkBitmap to render a GM into. | 317 * Prepare an SkBitmap to render a GM into. |
| 285 * | 318 * |
| (...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 503 if (kXPS_Backend == gRec.fBackend) { | 536 if (kXPS_Backend == gRec.fBackend) { |
| 504 path = make_filename(writePath, renderModeDescriptor, name.c_str(), | 537 path = make_filename(writePath, renderModeDescriptor, name.c_str(), |
| 505 "xps"); | 538 "xps"); |
| 506 success = write_document(path, *document); | 539 success = write_document(path, *document); |
| 507 } | 540 } |
| 508 if (success) { | 541 if (success) { |
| 509 return kEmpty_ErrorCombination; | 542 return kEmpty_ErrorCombination; |
| 510 } else { | 543 } else { |
| 511 gm_fprintf(stderr, "FAILED to write %s\n", path.c_str()); | 544 gm_fprintf(stderr, "FAILED to write %s\n", path.c_str()); |
| 512 ErrorCombination errors(kWritingReferenceImage_ErrorType); | 545 ErrorCombination errors(kWritingReferenceImage_ErrorType); |
| 513 RecordError(errors, name, renderModeDescriptor); | 546 // TODO(epoger): Don't call RecordTestResults() here... |
|
epoger
2013/04/02 16:00:14
This TODO indicates a relatively minor problem tha
| |
| 547 // Instead, we should make sure to call RecordTestResults | |
| 548 // exactly ONCE per test. (Otherwise, gmmain.fTestsRun | |
| 549 // will be incremented twice for this test: once in | |
| 550 // compare_test_results_to_stored_expectations() before | |
| 551 // that method calls this one, and again here.) | |
| 552 // | |
| 553 // When we make that change, we should probably add a | |
| 554 // WritingReferenceImage test to the gm self-tests.) | |
| 555 RecordTestResults(errors, name, renderModeDescriptor); | |
| 514 return errors; | 556 return errors; |
| 515 } | 557 } |
| 516 } | 558 } |
| 517 | 559 |
| 518 /** | 560 /** |
| 519 * Log more detail about the mistmatch between expectedBitmap and | 561 * Log more detail about the mistmatch between expectedBitmap and |
| 520 * actualBitmap. | 562 * actualBitmap. |
| 521 */ | 563 */ |
| 522 void report_bitmap_diffs(const SkBitmap& expectedBitmap, const SkBitmap& act ualBitmap, | 564 void report_bitmap_diffs(const SkBitmap& expectedBitmap, const SkBitmap& act ualBitmap, |
| 523 const char *testName) { | 565 const char *testName) { |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 615 write_bitmap(path, actualBitmap); | 657 write_bitmap(path, actualBitmap); |
| 616 } | 658 } |
| 617 | 659 |
| 618 // If we have access to a single expected bitmap, log more | 660 // If we have access to a single expected bitmap, log more |
| 619 // detail about the mismatch. | 661 // detail about the mismatch. |
| 620 const SkBitmap *expectedBitmapPtr = expectations.asBitmap(); | 662 const SkBitmap *expectedBitmapPtr = expectations.asBitmap(); |
| 621 if (NULL != expectedBitmapPtr) { | 663 if (NULL != expectedBitmapPtr) { |
| 622 report_bitmap_diffs(*expectedBitmapPtr, actualBitmap, completeNa me); | 664 report_bitmap_diffs(*expectedBitmapPtr, actualBitmap, completeNa me); |
| 623 } | 665 } |
| 624 } | 666 } |
| 625 RecordError(errors, baseNameString, renderModeDescriptor); | 667 RecordTestResults(errors, baseNameString, renderModeDescriptor); |
| 626 | 668 |
| 627 if (addToJsonSummary) { | 669 if (addToJsonSummary) { |
| 628 add_actual_results_to_json_summary(completeName, actualChecksum, err ors, | 670 add_actual_results_to_json_summary(completeName, actualChecksum, err ors, |
| 629 expectations.ignoreFailure()); | 671 expectations.ignoreFailure()); |
| 630 add_expected_results_to_json_summary(completeName, expectations); | 672 add_expected_results_to_json_summary(completeName, expectations); |
| 631 } | 673 } |
| 632 | 674 |
| 633 return errors; | 675 return errors; |
| 634 } | 676 } |
| 635 | 677 |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 731 errors.add(compare_to_expectations(expectations, actualBitmap, | 773 errors.add(compare_to_expectations(expectations, actualBitmap, |
| 732 name, "", true)); | 774 name, "", true)); |
| 733 } else { | 775 } else { |
| 734 // If we are running without expectations, we still want to | 776 // If we are running without expectations, we still want to |
| 735 // record the actual results. | 777 // record the actual results. |
| 736 Checksum actualChecksum = | 778 Checksum actualChecksum = |
| 737 SkBitmapChecksummer::Compute64(actualBitmap); | 779 SkBitmapChecksummer::Compute64(actualBitmap); |
| 738 add_actual_results_to_json_summary(name.c_str(), actualChecksum, | 780 add_actual_results_to_json_summary(name.c_str(), actualChecksum, |
| 739 ErrorCombination(kMissingExpectat ions_ErrorType), | 781 ErrorCombination(kMissingExpectat ions_ErrorType), |
| 740 false); | 782 false); |
| 783 RecordTestResults(ErrorCombination(kMissingExpectations_ErrorType), name, ""); | |
| 741 } | 784 } |
| 742 | 785 |
| 743 // TODO: Consider moving this into compare_to_expectations(), | 786 // TODO: Consider moving this into compare_to_expectations(), |
| 744 // similar to fMismatchPath... for now, we don't do that, because | 787 // similar to fMismatchPath... for now, we don't do that, because |
| 745 // we don't want to write out the actual bitmaps for all | 788 // we don't want to write out the actual bitmaps for all |
| 746 // renderModes of all tests! That would be a lot of files. | 789 // renderModes of all tests! That would be a lot of files. |
| 747 if (writePath && (gRec.fFlags & kWrite_ConfigFlag)) { | 790 if (writePath && (gRec.fFlags & kWrite_ConfigFlag)) { |
| 748 errors.add(write_reference_image(gRec, writePath, "", | 791 errors.add(write_reference_image(gRec, writePath, "", |
| 749 name, actualBitmap, pdf)); | 792 name, actualBitmap, pdf)); |
| 750 } | 793 } |
| 751 | 794 |
| 752 return errors; | 795 return errors; |
| 753 } | 796 } |
| 754 | 797 |
| 755 /** | 798 /** |
| 756 * Compare actualBitmap to referenceBitmap. | 799 * Compare actualBitmap to referenceBitmap. |
| 757 * | 800 * |
| 758 * @param gm which test generated the bitmap | 801 * @param gm which test generated the bitmap |
| 759 * @param gRec | 802 * @param gRec |
| 760 * @param renderModeDescriptor | 803 * @param renderModeDescriptor |
| 761 * @param actualBitmap actual bitmap generated by this run | 804 * @param actualBitmap actual bitmap generated by this run |
| 762 * @param referenceBitmap bitmap we expected to be generated | 805 * @param referenceBitmap bitmap we expected to be generated |
| 763 */ | 806 */ |
| 764 ErrorCombination compare_test_results_to_reference_bitmap( | 807 ErrorCombination compare_test_results_to_reference_bitmap( |
| 765 GM* gm, const ConfigData& gRec, const char renderModeDescriptor [], | 808 GM* gm, const ConfigData& gRec, const char renderModeDescriptor [], |
| 766 SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) { | 809 SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) { |
| 767 | 810 |
| 811 // TODO(epoger): This method is run to compare results across | |
| 812 // different rendering modes (as opposed to | |
| 813 // compare_test_results_to_stored_expectations(), which | |
| 814 // compares results against expectations stored on disk). If | |
| 815 // we would like the GenerateGMs step to distinguish between | |
| 816 // those two types of mismatches, we should report image | |
| 817 // mismatches in here with a different ErrorType. | |
|
borenet
2013/03/29 05:55:14
The number of TODOs in this CL is worrisome to me.
epoger
2013/04/02 16:00:14
I have added a review comment at each TODO...
Thi
| |
| 768 SkASSERT(referenceBitmap); | 818 SkASSERT(referenceBitmap); |
| 769 SkString name = make_name(gm->shortName(), gRec.fName); | 819 SkString name = make_name(gm->shortName(), gRec.fName); |
| 770 Expectations expectations(*referenceBitmap); | 820 Expectations expectations(*referenceBitmap); |
| 771 return compare_to_expectations(expectations, actualBitmap, | 821 return compare_to_expectations(expectations, actualBitmap, |
| 772 name, renderModeDescriptor); | 822 name, renderModeDescriptor); |
| 773 } | 823 } |
| 774 | 824 |
| 775 static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t rec ordFlags, | 825 static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t rec ordFlags, |
| 776 SkScalar scale = SK_Scalar1) { | 826 SkScalar scale = SK_Scalar1) { |
| 777 // Pictures are refcounted so must be on heap | 827 // Pictures are refcounted so must be on heap |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 873 // -deferred image, we exit early! We should fix this | 923 // -deferred image, we exit early! We should fix this |
| 874 // ASAP, because it is hiding -deferred errors... but for | 924 // ASAP, because it is hiding -deferred errors... but for |
| 875 // now, I'm leaving the logic as it is so that the | 925 // now, I'm leaving the logic as it is so that the |
| 876 // refactoring change | 926 // refactoring change |
| 877 // https://codereview.chromium.org/12992003/ is unblocked. | 927 // https://codereview.chromium.org/12992003/ is unblocked. |
| 878 // | 928 // |
| 879 // Filed as https://code.google.com/p/skia/issues/detail?id=1180 | 929 // Filed as https://code.google.com/p/skia/issues/detail?id=1180 |
| 880 // ('image-surface gm test is failing in "deferred" mode, | 930 // ('image-surface gm test is failing in "deferred" mode, |
| 881 // and gm is not reporting the failure') | 931 // and gm is not reporting the failure') |
| 882 if (errors.isEmpty()) { | 932 if (errors.isEmpty()) { |
| 933 // TODO(epoger): Report this as a new ErrorType, | |
| 934 // something like kImageGeneration_ErrorType? | |
|
epoger
2013/04/02 16:00:14
This TODO indicates a relatively minor problem tha
| |
| 883 return kEmpty_ErrorCombination; | 935 return kEmpty_ErrorCombination; |
| 884 } | 936 } |
| 885 return compare_test_results_to_reference_bitmap( | 937 return compare_test_results_to_reference_bitmap( |
| 886 gm, gRec, "-deferred", bitmap, &referenceBitmap); | 938 gm, gRec, "-deferred", bitmap, &referenceBitmap); |
| 887 } | 939 } |
| 888 return kEmpty_ErrorCombination; | 940 return kEmpty_ErrorCombination; |
| 889 } | 941 } |
| 890 | 942 |
| 891 ErrorCombination test_pipe_playback(GM* gm, | 943 ErrorCombination test_pipe_playback(GM* gm, |
| 892 const ConfigData& gRec, | 944 const ConfigData& gRec, |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 946 // | 998 // |
| 947 // member variables. | 999 // member variables. |
| 948 // They are public for now, to allow easier setting by tool_main(). | 1000 // They are public for now, to allow easier setting by tool_main(). |
| 949 // | 1001 // |
| 950 | 1002 |
| 951 bool fUseFileHierarchy; | 1003 bool fUseFileHierarchy; |
| 952 ErrorCombination fIgnorableErrorCombination; | 1004 ErrorCombination fIgnorableErrorCombination; |
| 953 | 1005 |
| 954 const char* fMismatchPath; | 1006 const char* fMismatchPath; |
| 955 | 1007 |
| 956 // information about all failed tests we have encountered so far | 1008 // collection of tests that have failed with each ErrorType |
| 957 SkTArray<FailRec> fFailedTests; | 1009 SkTArray<SkString> fFailedTests[kLast_ErrorType+1]; |
| 1010 int fTestsRun; | |
| 958 | 1011 |
| 959 // Where to read expectations (expected image checksums, etc.) from. | 1012 // Where to read expectations (expected image checksums, etc.) from. |
| 960 // If unset, we don't do comparisons. | 1013 // If unset, we don't do comparisons. |
| 961 SkAutoTUnref<ExpectationsSource> fExpectationsSource; | 1014 SkAutoTUnref<ExpectationsSource> fExpectationsSource; |
| 962 | 1015 |
| 963 // JSON summaries that we generate as we go (just for output). | 1016 // JSON summaries that we generate as we go (just for output). |
| 964 Json::Value fJsonExpectedResults; | 1017 Json::Value fJsonExpectedResults; |
| 965 Json::Value fJsonActualResults_Failed; | 1018 Json::Value fJsonActualResults_Failed; |
| 966 Json::Value fJsonActualResults_FailureIgnored; | 1019 Json::Value fJsonActualResults_FailureIgnored; |
| 967 Json::Value fJsonActualResults_NoComparison; | 1020 Json::Value fJsonActualResults_NoComparison; |
| (...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1258 */ | 1311 */ |
| 1259 ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co mpareConfig, | 1312 ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co mpareConfig, |
| 1260 const SkBitmap &comparisonBitmap, | 1313 const SkBitmap &comparisonBitmap, |
| 1261 const SkTDArray<SkScalar> &tileGridReplaySca les); | 1314 const SkTDArray<SkScalar> &tileGridReplaySca les); |
| 1262 ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co mpareConfig, | 1315 ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co mpareConfig, |
| 1263 const SkBitmap &comparisonBitmap, | 1316 const SkBitmap &comparisonBitmap, |
| 1264 const SkTDArray<SkScalar> &tileGridReplaySca les) { | 1317 const SkTDArray<SkScalar> &tileGridReplaySca les) { |
| 1265 ErrorCombination errorsForAllModes; | 1318 ErrorCombination errorsForAllModes; |
| 1266 uint32_t gmFlags = gm->getFlags(); | 1319 uint32_t gmFlags = gm->getFlags(); |
| 1267 | 1320 |
| 1268 // run the picture centric GM steps | 1321 // TODO(epoger): We should start recording any per-GM skipped |
|
epoger
2013/04/02 16:00:14
This TODO indicates a problem that already existed
| |
| 1322 // modes (i.e. those we skipped due to gmFlags) with a new | |
| 1323 // ErrorType, perhaps named kIntentionallySkipped_ErrorType. | |
| 1269 if (!(gmFlags & GM::kSkipPicture_Flag)) { | 1324 if (!(gmFlags & GM::kSkipPicture_Flag)) { |
| 1270 | 1325 |
| 1271 ErrorCombination pictErrors; | 1326 ErrorCombination pictErrors; |
| 1272 | 1327 |
| 1273 //SkAutoTUnref<SkPicture> pict(generate_new_picture(gm)); | 1328 //SkAutoTUnref<SkPicture> pict(generate_new_picture(gm)); |
| 1274 SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0); | 1329 SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0); |
| 1275 SkAutoUnref aur(pict); | 1330 SkAutoUnref aur(pict); |
| 1276 | 1331 |
| 1277 if (FLAGS_replay) { | 1332 if (FLAGS_replay) { |
| 1278 SkBitmap bitmap; | 1333 SkBitmap bitmap; |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1352 if ((pipeErrors.isEmpty()) && | 1407 if ((pipeErrors.isEmpty()) && |
| 1353 FLAGS_tiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { | 1408 FLAGS_tiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { |
| 1354 pipeErrors.add(gmmain.test_tiled_pipe_playback(gm, compareConfig, co mparisonBitmap)); | 1409 pipeErrors.add(gmmain.test_tiled_pipe_playback(gm, compareConfig, co mparisonBitmap)); |
| 1355 } | 1410 } |
| 1356 | 1411 |
| 1357 errorsForAllModes.add(pipeErrors); | 1412 errorsForAllModes.add(pipeErrors); |
| 1358 } | 1413 } |
| 1359 return errorsForAllModes; | 1414 return errorsForAllModes; |
| 1360 } | 1415 } |
| 1361 | 1416 |
| 1417 /** | |
| 1418 * Return a list of all entries in an array of strings as a single string | |
| 1419 * of this form: | |
| 1420 * "item1", "item2", "item3" | |
| 1421 */ | |
| 1422 SkString list_all(const SkTArray<SkString> &stringArray); | |
| 1423 SkString list_all(const SkTArray<SkString> &stringArray) { | |
| 1424 SkString total; | |
| 1425 for (int i = 0; i < stringArray.count(); i++) { | |
| 1426 if (i > 0) { | |
| 1427 total.append(", "); | |
| 1428 } | |
| 1429 total.append("\""); | |
| 1430 total.append(stringArray[i]); | |
| 1431 total.append("\""); | |
| 1432 } | |
| 1433 return total; | |
| 1434 } | |
| 1435 | |
| 1436 /** | |
| 1437 * Return a list of configuration names, as a single string of this form: | |
| 1438 * "item1", "item2", "item3" | |
| 1439 * | |
| 1440 * @param configs configurations, as a list of indices into gRec | |
| 1441 */ | |
| 1442 SkString list_all_config_names(const SkTDArray<size_t> &configs); | |
| 1443 SkString list_all_config_names(const SkTDArray<size_t> &configs) { | |
| 1444 SkString total; | |
| 1445 for (int i = 0; i < configs.count(); i++) { | |
| 1446 if (i > 0) { | |
| 1447 total.append(", "); | |
| 1448 } | |
| 1449 total.append("\""); | |
| 1450 total.append(gRec[configs[i]].fName); | |
| 1451 total.append("\""); | |
| 1452 } | |
| 1453 return total; | |
| 1454 } | |
| 1455 | |
| 1362 int tool_main(int argc, char** argv); | 1456 int tool_main(int argc, char** argv); |
| 1363 int tool_main(int argc, char** argv) { | 1457 int tool_main(int argc, char** argv) { |
| 1364 | 1458 |
| 1365 #if SK_ENABLE_INST_COUNT | 1459 #if SK_ENABLE_INST_COUNT |
| 1366 gPrintInstCount = true; | 1460 gPrintInstCount = true; |
| 1367 #endif | 1461 #endif |
| 1368 | 1462 |
| 1369 SkGraphics::Init(); | 1463 SkGraphics::Init(); |
| 1370 // we don't need to see this during a run | 1464 // we don't need to see this during a run |
| 1371 gSkSuppressFontCachePurgeSpew = true; | 1465 gSkSuppressFontCachePurgeSpew = true; |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1533 gm_fprintf(stderr, "reading resources from %s\n", FLAGS_resourcePath[0]) ; | 1627 gm_fprintf(stderr, "reading resources from %s\n", FLAGS_resourcePath[0]) ; |
| 1534 } | 1628 } |
| 1535 | 1629 |
| 1536 if (moduloDivisor <= 0) { | 1630 if (moduloDivisor <= 0) { |
| 1537 moduloRemainder = -1; | 1631 moduloRemainder = -1; |
| 1538 } | 1632 } |
| 1539 if (moduloRemainder < 0 || moduloRemainder >= moduloDivisor) { | 1633 if (moduloRemainder < 0 || moduloRemainder >= moduloDivisor) { |
| 1540 moduloRemainder = -1; | 1634 moduloRemainder = -1; |
| 1541 } | 1635 } |
| 1542 | 1636 |
| 1543 // Accumulate success of all tests. | 1637 int gmsRun = 0; |
| 1544 int testsRun = 0; | |
| 1545 int testsPassed = 0; | |
| 1546 int testsFailed = 0; | |
| 1547 int testsMissingReferenceImages = 0; | |
| 1548 | |
| 1549 int gmIndex = -1; | 1638 int gmIndex = -1; |
| 1550 SkString moduloStr; | 1639 SkString moduloStr; |
| 1551 | 1640 |
| 1552 // If we will be writing out files, prepare subdirectories. | 1641 // If we will be writing out files, prepare subdirectories. |
| 1553 if (FLAGS_writePath.count() == 1) { | 1642 if (FLAGS_writePath.count() == 1) { |
| 1554 if (!sk_mkdir(FLAGS_writePath[0])) { | 1643 if (!sk_mkdir(FLAGS_writePath[0])) { |
| 1555 return -1; | 1644 return -1; |
| 1556 } | 1645 } |
| 1557 if (gmmain.fUseFileHierarchy) { | 1646 if (gmmain.fUseFileHierarchy) { |
| 1558 for (int i = 0; i < configs.count(); i++) { | 1647 for (int i = 0; i < configs.count(); i++) { |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 1578 } | 1667 } |
| 1579 moduloStr.printf("[%d.%d] ", gmIndex, moduloDivisor); | 1668 moduloStr.printf("[%d.%d] ", gmIndex, moduloDivisor); |
| 1580 } | 1669 } |
| 1581 | 1670 |
| 1582 const char* shortName = gm->shortName(); | 1671 const char* shortName = gm->shortName(); |
| 1583 if (skip_name(FLAGS_match, shortName)) { | 1672 if (skip_name(FLAGS_match, shortName)) { |
| 1584 SkDELETE(gm); | 1673 SkDELETE(gm); |
| 1585 continue; | 1674 continue; |
| 1586 } | 1675 } |
| 1587 | 1676 |
| 1677 gmsRun++; | |
| 1588 SkISize size = gm->getISize(); | 1678 SkISize size = gm->getISize(); |
| 1589 gm_fprintf(stdout, "%sdrawing... %s [%d %d]\n", moduloStr.c_str(), short Name, | 1679 gm_fprintf(stdout, "%sdrawing... %s [%d %d]\n", moduloStr.c_str(), short Name, |
| 1590 size.width(), size.height()); | 1680 size.width(), size.height()); |
| 1591 | 1681 |
| 1592 ErrorCombination testErrors; | 1682 run_multiple_configs(gmmain, gm, configs, grFactory); |
| 1593 testErrors.add(run_multiple_configs(gmmain, gm, configs, grFactory)); | |
| 1594 | 1683 |
| 1595 SkBitmap comparisonBitmap; | 1684 SkBitmap comparisonBitmap; |
| 1596 const ConfigData compareConfig = | 1685 const ConfigData compareConfig = |
| 1597 { SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextT ype, 0, kRW_ConfigFlag, "comparison", false }; | 1686 { SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextT ype, 0, kRW_ConfigFlag, "comparison", false }; |
| 1598 testErrors.add(gmmain.generate_image( | 1687 gmmain.generate_image(gm, compareConfig, NULL, NULL, &comparisonBitmap, false); |
| 1599 gm, compareConfig, NULL, NULL, &comparisonBitmap, false)); | |
| 1600 | 1688 |
| 1601 // TODO(epoger): only run this if gmmain.generate_image() succeeded? | 1689 // TODO(epoger): only run this if gmmain.generate_image() succeeded? |
| 1602 // Otherwise, what are we comparing against? | 1690 // Otherwise, what are we comparing against? |
| 1603 testErrors.add(run_multiple_modes(gmmain, gm, compareConfig, comparisonB itmap, | 1691 run_multiple_modes(gmmain, gm, compareConfig, comparisonBitmap, tileGrid ReplayScales); |
| 1604 tileGridReplayScales)); | |
| 1605 | |
| 1606 // Update overall results. | |
| 1607 // We only tabulate the particular error types that we currently | |
| 1608 // care about (e.g., missing reference images). Later on, if we | |
| 1609 // want to also tabulate other error types, we can do so. | |
| 1610 testsRun++; | |
| 1611 if (!gmmain.fExpectationsSource.get() || | |
| 1612 (testErrors.includes(kMissingExpectations_ErrorType))) { | |
| 1613 testsMissingReferenceImages++; | |
| 1614 } | |
| 1615 if (testErrors.minus(gmmain.fIgnorableErrorCombination).isEmpty()) { | |
| 1616 testsPassed++; | |
| 1617 } else { | |
| 1618 testsFailed++; | |
| 1619 } | |
| 1620 | 1692 |
| 1621 SkDELETE(gm); | 1693 SkDELETE(gm); |
| 1622 } | 1694 } |
| 1623 gm_fprintf(stdout, "Ran %d tests: %d passed, %d failed, %d missing reference images\n", | 1695 |
| 1624 testsRun, testsPassed, testsFailed, testsMissingReferenceImages); | 1696 // Assemble the list of modes we ran each test through. |
| 1697 // | |
| 1698 // TODO(epoger): Instead of assembling this list of modes here, | |
|
epoger
2013/04/02 16:00:14
This TODO indicates something suboptimal about cod
borenet
2013/04/02 16:17:56
Sounds good to me.
epoger
2013/04/03 03:56:50
Done in patchset 5. Do you think this change is p
| |
| 1699 // can/should we assemble it as we actually run the tests in | |
| 1700 // run_multiple_modes()? | |
| 1701 SkTArray<SkString> modes; | |
| 1702 if (FLAGS_replay) { | |
| 1703 modes.push_back(SkString("replay")); | |
| 1704 } | |
| 1705 if (FLAGS_serialize) { | |
| 1706 modes.push_back(SkString("serialize")); | |
| 1707 } | |
| 1708 if (FLAGS_rtree) { | |
| 1709 modes.push_back(SkString("rtree")); | |
| 1710 } | |
| 1711 if (FLAGS_tileGrid) { | |
| 1712 for (int i = 0; i < tileGridReplayScales.count(); i++) { | |
| 1713 SkString modeName("tileGrid"); | |
| 1714 modeName.appendf("%f", tileGridReplayScales[i]); | |
| 1715 modes.push_back(modeName); | |
| 1716 } | |
| 1717 } | |
| 1718 if (FLAGS_pipe) { | |
| 1719 for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); i++) { | |
| 1720 SkString modeName("pipe"); | |
| 1721 modeName.append(gPipeWritingFlagCombos[i].name); | |
| 1722 modes.push_back(modeName); | |
| 1723 } | |
| 1724 } | |
| 1725 if (FLAGS_tiledPipe) { | |
| 1726 for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); i++) { | |
| 1727 SkString modeName("tiledPipe"); | |
| 1728 modeName.append(gPipeWritingFlagCombos[i].name); | |
| 1729 modes.push_back(modeName); | |
| 1730 } | |
| 1731 } | |
| 1732 | |
| 1733 // Output summary to stdout. | |
| 1734 gm_fprintf(stdout, "Ran %d GMs, each with %d configs [%s] and %d modes [%s], " | |
| 1735 " so there should be a total of %d tests\n", | |
| 1736 gmsRun, configs.count(), list_all_config_names(configs).c_str(), | |
| 1737 modes.count(), list_all(modes).c_str(), gmsRun * (configs.count() + modes.count())); | |
| 1738 // TODO(epoger): Ultimately, we should signal an error if the | |
|
epoger
2013/04/02 16:00:14
This TODO indicates a problem that already existed
| |
| 1739 // expected total number of tests (displayed above) does not match | |
| 1740 // gmmain.fTestsRun. But for now, there are cases where those | |
| 1741 // numbers won't match: specifically, if some configs/modes are | |
| 1742 // skipped on a per-GM basis (due to gm->getFlags() for a specific | |
| 1743 // GM). Later on, we should record tests like that using some new | |
| 1744 // ErrorType, like kIntentionallySkipped_ErrorType. Then we could | |
| 1745 // signal an error if the totals didn't match up. | |
| 1625 gmmain.ListErrors(); | 1746 gmmain.ListErrors(); |
| 1626 | 1747 |
| 1627 if (FLAGS_writeJsonSummaryPath.count() == 1) { | 1748 if (FLAGS_writeJsonSummaryPath.count() == 1) { |
| 1628 Json::Value actualResults; | 1749 Json::Value actualResults; |
| 1629 actualResults[kJsonKey_ActualResults_Failed] = | 1750 actualResults[kJsonKey_ActualResults_Failed] = |
| 1630 gmmain.fJsonActualResults_Failed; | 1751 gmmain.fJsonActualResults_Failed; |
| 1631 actualResults[kJsonKey_ActualResults_FailureIgnored] = | 1752 actualResults[kJsonKey_ActualResults_FailureIgnored] = |
| 1632 gmmain.fJsonActualResults_FailureIgnored; | 1753 gmmain.fJsonActualResults_FailureIgnored; |
| 1633 actualResults[kJsonKey_ActualResults_NoComparison] = | 1754 actualResults[kJsonKey_ActualResults_NoComparison] = |
| 1634 gmmain.fJsonActualResults_NoComparison; | 1755 gmmain.fJsonActualResults_NoComparison; |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 1654 gm_fprintf(stdout, "config: %s %x\n", config.fName, gr); | 1775 gm_fprintf(stdout, "config: %s %x\n", config.fName, gr); |
| 1655 gr->printCacheStats(); | 1776 gr->printCacheStats(); |
| 1656 } | 1777 } |
| 1657 } | 1778 } |
| 1658 #endif | 1779 #endif |
| 1659 | 1780 |
| 1660 delete grFactory; | 1781 delete grFactory; |
| 1661 #endif | 1782 #endif |
| 1662 SkGraphics::Term(); | 1783 SkGraphics::Term(); |
| 1663 | 1784 |
| 1664 return (0 == testsFailed) ? 0 : -1; | 1785 return (0 == gmmain.NumSignificantErrors()) ? 0 : -1; |
| 1665 } | 1786 } |
| 1666 | 1787 |
| 1667 void GMMain::installFilter(SkCanvas* canvas) { | 1788 void GMMain::installFilter(SkCanvas* canvas) { |
| 1668 if (FLAGS_forceBWtext) { | 1789 if (FLAGS_forceBWtext) { |
| 1669 canvas->setDrawFilter(SkNEW(BWTextDrawFilter))->unref(); | 1790 canvas->setDrawFilter(SkNEW(BWTextDrawFilter))->unref(); |
| 1670 } | 1791 } |
| 1671 } | 1792 } |
| 1672 | 1793 |
| 1673 #if !defined(SK_BUILD_FOR_IOS) && !defined(SK_BUILD_FOR_NACL) | 1794 #if !defined(SK_BUILD_FOR_IOS) && !defined(SK_BUILD_FOR_NACL) |
| 1674 int main(int argc, char * const argv[]) { | 1795 int main(int argc, char * const argv[]) { |
| 1675 return tool_main(argc, (char**) argv); | 1796 return tool_main(argc, (char**) argv); |
| 1676 } | 1797 } |
| 1677 #endif | 1798 #endif |
| OLD | NEW |