Chromium Code Reviews| Index: gm/gmmain.cpp |
| =================================================================== |
| --- gm/gmmain.cpp (revision 8399) |
| +++ gm/gmmain.cpp (working copy) |
| @@ -34,6 +34,7 @@ |
| #include "SkRefCnt.h" |
| #include "SkStream.h" |
| #include "SkTArray.h" |
| +#include "SkTDict.h" |
| #include "SkTileGridPicture.h" |
| #include "SamplePipeControllers.h" |
| @@ -64,6 +65,7 @@ |
| #endif |
| extern bool gSkSuppressFontCachePurgeSpew; |
| +const static int kAnyIntValue = 1; // when it doesn't matter what int value, just SOMETHING |
|
borenet
2013/04/03 12:46:48
What if we counted the number of times we encounte
epoger
2013/04/03 14:28:25
Done. Note that this now takes twice as long, sin
|
| #ifdef SK_SUPPORT_PDF |
| #include "SkPDFDevice.h" |
| @@ -86,14 +88,6 @@ |
| using namespace skiagm; |
| -struct FailRec { |
| - SkString fName; |
| - bool fIsPixelError; |
| - |
| - FailRec() : fIsPixelError(false) {} |
| - FailRec(const SkString& name) : fName(name), fIsPixelError(false) {} |
| -}; |
| - |
| class Iter { |
| public: |
| Iter() { |
| @@ -184,12 +178,14 @@ |
| class GMMain { |
| public: |
| - GMMain() { |
| + GMMain() : fRenderModesEncountered(1) { |
|
borenet
2013/04/03 12:46:48
Do you mind moving fUseFileHierarchy, fMismatchPat
epoger
2013/04/03 14:28:25
Done.
|
| // Set default values of member variables, which tool_main() |
| // may override. |
| fUseFileHierarchy = false; |
| fIgnorableErrorCombination.add(kMissingExpectations_ErrorType); |
| fMismatchPath = NULL; |
| + fTestsRun = 0; |
| + fRenderModesEncountered.reset(); |
| } |
| SkString make_name(const char shortName[], const char configName[]) { |
| @@ -243,34 +239,100 @@ |
| } |
| /** |
| - * Records the errors encountered in fFailedTests, except for any error |
| - * types we want to ignore. |
| + * Add all render modes encountered thus far to the "modes" array. |
| */ |
| - void RecordError(const ErrorCombination& errorCombination, const SkString& name, |
| - const char renderModeDescriptor []) { |
| - // The common case: no error means nothing to record. |
| + void GetRenderModesEncountered(SkTArray<SkString> &modes) { |
| + SkTDict<int>::Iter iter(this->fRenderModesEncountered); |
| + const char* mode; |
| + while ((mode = iter.next(NULL)) != NULL) { |
| + SkString modeAsString = SkString(mode); |
| + // TODO(epoger): It seems a bit silly that all of these modes were |
| + // recorded with a leading "-" which we have to remove here |
| + // (except for mode "", which means plain old original mode). |
| + // But that's how renderModeDescriptor has been passed into |
| + // compare_test_results_to_reference_bitmap() historically, |
| + // and changing that now may affect other parts of our code. |
| + if (modeAsString.startsWith("-")) { |
| + modeAsString.remove(0, 1); |
| + modes.push_back(modeAsString); |
| + } |
| + } |
| + } |
| + |
| + /** |
| + * Records the results of this test in fTestsRun and fFailedTests. |
| + * |
| + * We even record successes, and errors that we regard as |
| + * "ignorable"; we can filter them out later. |
| + */ |
| + void RecordTestResults(const ErrorCombination& errorCombination, const SkString& name, |
| + const char renderModeDescriptor []) { |
| + // Things to do regardless of errorCombination. |
| + fTestsRun++; |
| + this->fRenderModesEncountered.set(renderModeDescriptor, kAnyIntValue); |
| + |
| if (errorCombination.isEmpty()) { |
| return; |
| } |
| - // If only certain error type(s) were reported, we know we can ignore them. |
| - if (errorCombination.minus(fIgnorableErrorCombination).isEmpty()) { |
| - return; |
| + // Things to do only if there is some error condition. |
| + SkString fullName = make_name(name.c_str(), renderModeDescriptor); |
| + for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { |
| + ErrorType type = static_cast<ErrorType>(typeInt); |
| + if (errorCombination.includes(type)) { |
| + fFailedTests[type].push_back(fullName); |
| + } |
| } |
| + } |
| - FailRec& rec = fFailedTests.push_back(make_name(name.c_str(), renderModeDescriptor)); |
| - rec.fIsPixelError = errorCombination.includes(kImageMismatch_ErrorType); |
| + /** |
| + * Return the number of significant (non-ignorable) errors we have |
| + * encountered so far. |
| + */ |
| + int NumSignificantErrors() { |
| + int significantErrors = 0; |
| + for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { |
| + ErrorType type = static_cast<ErrorType>(typeInt); |
| + if (!fIgnorableErrorCombination.includes(type)) { |
| + significantErrors += fFailedTests[type].count(); |
| + } |
| + } |
| + return significantErrors; |
| } |
| - // List contents of fFailedTests via SkDebug. |
| + /** |
| + * List contents of fFailedTests to stdout. |
| + */ |
| void ListErrors() { |
| - for (int i = 0; i < fFailedTests.count(); ++i) { |
| - if (fFailedTests[i].fIsPixelError) { |
| - gm_fprintf(stderr, "\t\t%s pixel_error\n", fFailedTests[i].fName.c_str()); |
| + // First, print a single summary line. |
| + SkString summary; |
| + summary.appendf("Ran %d tests:", fTestsRun); |
| + for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { |
| + ErrorType type = static_cast<ErrorType>(typeInt); |
| + summary.appendf(" %s=%d", getErrorTypeName(type), fFailedTests[type].count()); |
| + } |
| + gm_fprintf(stdout, "%s\n", summary.c_str()); |
| + |
| + // Now, for each failure type, list the tests that failed that way. |
| + for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) { |
| + SkString line; |
| + ErrorType type = static_cast<ErrorType>(typeInt); |
| + if (fIgnorableErrorCombination.includes(type)) { |
| + line.append("[ ] "); |
| } else { |
| - gm_fprintf(stderr, "\t\t%s\n", fFailedTests[i].fName.c_str()); |
| + line.append("[*] "); |
| } |
| + |
| + SkTArray<SkString> *failedTestsOfThisType = &fFailedTests[type]; |
| + int count = failedTestsOfThisType->count(); |
| + line.appendf("%d %s:", count, getErrorTypeName(type)); |
| + for (int i = 0; i < count; ++i) { |
| + line.append(" "); |
| + line.append((*failedTestsOfThisType)[i]); |
| + } |
| + gm_fprintf(stdout, "%s\n", line.c_str()); |
| } |
| + gm_fprintf(stdout, "(results marked with [*] will cause nonzero return value)\n"); |
| } |
| static bool write_document(const SkString& path, |
| @@ -510,7 +572,16 @@ |
| } else { |
| gm_fprintf(stderr, "FAILED to write %s\n", path.c_str()); |
| ErrorCombination errors(kWritingReferenceImage_ErrorType); |
| - RecordError(errors, name, renderModeDescriptor); |
| + // TODO(epoger): Don't call RecordTestResults() here... |
| + // Instead, we should make sure to call RecordTestResults |
| + // exactly ONCE per test. (Otherwise, gmmain.fTestsRun |
| + // will be incremented twice for this test: once in |
| + // compare_test_results_to_stored_expectations() before |
| + // that method calls this one, and again here.) |
| + // |
| + // When we make that change, we should probably add a |
| + // WritingReferenceImage test to the gm self-tests.) |
| + RecordTestResults(errors, name, renderModeDescriptor); |
| return errors; |
| } |
| } |
| @@ -622,7 +693,7 @@ |
| report_bitmap_diffs(*expectedBitmapPtr, actualBitmap, completeName); |
| } |
| } |
| - RecordError(errors, baseNameString, renderModeDescriptor); |
| + RecordTestResults(errors, baseNameString, renderModeDescriptor); |
| if (addToJsonSummary) { |
| add_actual_results_to_json_summary(completeName, actualChecksum, errors, |
| @@ -738,6 +809,7 @@ |
| add_actual_results_to_json_summary(name.c_str(), actualChecksum, |
| ErrorCombination(kMissingExpectations_ErrorType), |
| false); |
| + RecordTestResults(ErrorCombination(kMissingExpectations_ErrorType), name, ""); |
| } |
| // TODO: Consider moving this into compare_to_expectations(), |
| @@ -765,6 +837,13 @@ |
| GM* gm, const ConfigData& gRec, const char renderModeDescriptor [], |
| SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) { |
| + // TODO(epoger): This method is run to compare results across |
| + // different rendering modes (as opposed to |
| + // compare_test_results_to_stored_expectations(), which |
| + // compares results against expectations stored on disk). If |
| + // we would like the GenerateGMs step to distinguish between |
| + // those two types of mismatches, we should report image |
| + // mismatches in here with a different ErrorType. |
| SkASSERT(referenceBitmap); |
| SkString name = make_name(gm->shortName(), gRec.fName); |
| Expectations expectations(*referenceBitmap); |
| @@ -880,6 +959,8 @@ |
| // ('image-surface gm test is failing in "deferred" mode, |
| // and gm is not reporting the failure') |
| if (errors.isEmpty()) { |
| + // TODO(epoger): Report this as a new ErrorType, |
| + // something like kImageGeneration_ErrorType? |
| return kEmpty_ErrorCombination; |
| } |
| return compare_test_results_to_reference_bitmap( |
| @@ -953,8 +1034,10 @@ |
| const char* fMismatchPath; |
| - // information about all failed tests we have encountered so far |
| - SkTArray<FailRec> fFailedTests; |
| + // collection of tests that have failed with each ErrorType |
| + SkTArray<SkString> fFailedTests[kLast_ErrorType+1]; |
| + int fTestsRun; |
| + SkTDict<int> fRenderModesEncountered; |
| // Where to read expectations (expected image checksums, etc.) from. |
| // If unset, we don't do comparisons. |
| @@ -1265,7 +1348,9 @@ |
| ErrorCombination errorsForAllModes; |
| uint32_t gmFlags = gm->getFlags(); |
| - // run the picture centric GM steps |
| + // TODO(epoger): We should start recording any per-GM skipped |
| + // modes (i.e. those we skipped due to gmFlags) with a new |
| + // ErrorType, perhaps named kIntentionallySkipped_ErrorType. |
| if (!(gmFlags & GM::kSkipPicture_Flag)) { |
| ErrorCombination pictErrors; |
| @@ -1359,6 +1444,45 @@ |
| return errorsForAllModes; |
| } |
| +/** |
| + * Return a list of all entries in an array of strings as a single string |
| + * of this form: |
| + * "item1", "item2", "item3" |
| + */ |
| +SkString list_all(const SkTArray<SkString> &stringArray); |
| +SkString list_all(const SkTArray<SkString> &stringArray) { |
| + SkString total; |
| + for (int i = 0; i < stringArray.count(); i++) { |
| + if (i > 0) { |
| + total.append(", "); |
| + } |
| + total.append("\""); |
| + total.append(stringArray[i]); |
| + total.append("\""); |
| + } |
| + return total; |
| +} |
| + |
| +/** |
| + * Return a list of configuration names, as a single string of this form: |
| + * "item1", "item2", "item3" |
| + * |
| + * @param configs configurations, as a list of indices into gRec |
| + */ |
| +SkString list_all_config_names(const SkTDArray<size_t> &configs); |
| +SkString list_all_config_names(const SkTDArray<size_t> &configs) { |
| + SkString total; |
| + for (int i = 0; i < configs.count(); i++) { |
| + if (i > 0) { |
| + total.append(", "); |
| + } |
| + total.append("\""); |
| + total.append(gRec[configs[i]].fName); |
| + total.append("\""); |
| + } |
| + return total; |
| +} |
| + |
| int tool_main(int argc, char** argv); |
| int tool_main(int argc, char** argv) { |
| @@ -1540,12 +1664,7 @@ |
| moduloRemainder = -1; |
| } |
| - // Accumulate success of all tests. |
| - int testsRun = 0; |
| - int testsPassed = 0; |
| - int testsFailed = 0; |
| - int testsMissingReferenceImages = 0; |
| - |
| + int gmsRun = 0; |
| int gmIndex = -1; |
| SkString moduloStr; |
| @@ -1585,43 +1704,44 @@ |
| continue; |
| } |
| + gmsRun++; |
| SkISize size = gm->getISize(); |
| gm_fprintf(stdout, "%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName, |
| size.width(), size.height()); |
| - ErrorCombination testErrors; |
| - testErrors.add(run_multiple_configs(gmmain, gm, configs, grFactory)); |
| + run_multiple_configs(gmmain, gm, configs, grFactory); |
| SkBitmap comparisonBitmap; |
| const ConfigData compareConfig = |
| { SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextType, 0, kRW_ConfigFlag, "comparison", false }; |
| - testErrors.add(gmmain.generate_image( |
| - gm, compareConfig, NULL, NULL, &comparisonBitmap, false)); |
| + gmmain.generate_image(gm, compareConfig, NULL, NULL, &comparisonBitmap, false); |
| // TODO(epoger): only run this if gmmain.generate_image() succeeded? |
| // Otherwise, what are we comparing against? |
| - testErrors.add(run_multiple_modes(gmmain, gm, compareConfig, comparisonBitmap, |
| - tileGridReplayScales)); |
| + run_multiple_modes(gmmain, gm, compareConfig, comparisonBitmap, tileGridReplayScales); |
| - // Update overall results. |
| - // We only tabulate the particular error types that we currently |
| - // care about (e.g., missing reference images). Later on, if we |
| - // want to also tabulate other error types, we can do so. |
| - testsRun++; |
| - if (!gmmain.fExpectationsSource.get() || |
| - (testErrors.includes(kMissingExpectations_ErrorType))) { |
| - testsMissingReferenceImages++; |
| - } |
| - if (testErrors.minus(gmmain.fIgnorableErrorCombination).isEmpty()) { |
| - testsPassed++; |
| - } else { |
| - testsFailed++; |
| - } |
| - |
| SkDELETE(gm); |
| } |
| - gm_fprintf(stdout, "Ran %d tests: %d passed, %d failed, %d missing reference images\n", |
| - testsRun, testsPassed, testsFailed, testsMissingReferenceImages); |
| + |
| + SkTArray<SkString> modes; |
| + gmmain.GetRenderModesEncountered(modes); |
| + |
| + // Output summary to stdout. |
| + gm_fprintf(stdout, "Ran %d GMs\n", gmsRun); |
| + gm_fprintf(stdout, "... over %2d configs [%s]\n", configs.count(), |
| + list_all_config_names(configs).c_str()); |
| + gm_fprintf(stdout, "... and %2d modes [%s]\n", modes.count(), list_all(modes).c_str()); |
| + gm_fprintf(stdout, "... so there should be a total of %d tests.\n", |
| + gmsRun * (configs.count() + modes.count())); |
| + |
| + // TODO(epoger): Ultimately, we should signal an error if the |
| + // expected total number of tests (displayed above) does not match |
| + // gmmain.fTestsRun. But for now, there are cases where those |
| + // numbers won't match: specifically, if some configs/modes are |
| + // skipped on a per-GM basis (due to gm->getFlags() for a specific |
| + // GM). Later on, we should record tests like that using some new |
| + // ErrorType, like kIntentionallySkipped_ErrorType. Then we could |
| + // signal an error if the totals didn't match up. |
| gmmain.ListErrors(); |
| if (FLAGS_writeJsonSummaryPath.count() == 1) { |
| @@ -1661,7 +1781,7 @@ |
| #endif |
| SkGraphics::Term(); |
| - return (0 == testsFailed) ? 0 : -1; |
| + return (0 == gmmain.NumSignificantErrors()) ? 0 : -1; |
| } |
| void GMMain::installFilter(SkCanvas* canvas) { |