Index: gm/gmmain.cpp |
=================================================================== |
--- gm/gmmain.cpp (revision 8292) |
+++ gm/gmmain.cpp (working copy) |
@@ -262,8 +262,9 @@ |
return; |
} |
- FailRec& rec = fFailedTests.push_back(make_name( |
- name.c_str(), renderModeDescriptor)); |
+ SkString completeName = name; |
+ completeName.append(renderModeDescriptor); |
+ FailRec& rec = fFailedTests.push_back(completeName); |
rec.fIsPixelError = |
(kEmptyErrorBitfield != (errorType & kImageMismatch_ErrorBitmask)); |
} |
@@ -591,18 +592,17 @@ |
* @param baseNameString name of test without renderModeDescriptor added |
* @param renderModeDescriptor e.g., "-rtree", "-deferred" |
* @param addToJsonSummary whether to add these results (both actual and |
- * expected) to the JSON summary |
- * |
- * TODO: For now, addToJsonSummary is only set to true within |
- * compare_test_results_to_stored_expectations(), so results of our |
- * in-memory comparisons (Rtree vs regular, etc.) are not written to the |
- * JSON summary. We may wish to change that. |
+ * expected) to the JSON summary. Regardless of this setting, if |
+ * we find an image mismatch in this test, we will write these |
+ * results to the JSON summary. (This is so that we will always |
+ * report errors across rendering modes, such as pipe vs tiled. |
+ * See https://codereview.chromium.org/12825005/ ) |
*/ |
ErrorBitfield compare_to_expectations(Expectations expectations, |
const SkBitmap& actualBitmap, |
const SkString& baseNameString, |
const char renderModeDescriptor[], |
- bool addToJsonSummary=false) { |
+ bool addToJsonSummary) { |
ErrorBitfield retval; |
Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap); |
SkString completeNameString = baseNameString; |
@@ -614,6 +614,7 @@ |
} else if (expectations.match(actualChecksum)) { |
retval = kEmptyErrorBitfield; |
} else { |
+ addToJsonSummary = true; |
retval = kImageMismatch_ErrorBitmask; |
// Write out the "actuals" for any mismatches, if we have |
@@ -781,7 +782,7 @@ |
SkString name = make_name(gm->shortName(), gRec.fName); |
Expectations expectations(*referenceBitmap); |
return compare_to_expectations(expectations, actualBitmap, |
- name, renderModeDescriptor); |
+ name, renderModeDescriptor, false); |
} |
static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags, |
@@ -889,9 +890,8 @@ |
return kEmptyErrorBitfield; |
} |
- ErrorBitfield test_pipe_playback(GM* gm, |
- const ConfigData& gRec, |
- const SkBitmap& referenceBitmap) { |
+ ErrorBitfield test_pipe_playback(GM* gm, const ConfigData& gRec, |
+ const SkBitmap& referenceBitmap, bool simulateFailure) { |
ErrorBitfield errors = kEmptyErrorBitfield; |
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { |
SkBitmap bitmap; |
@@ -903,7 +903,9 @@ |
SkGPipeWriter writer; |
SkCanvas* pipeCanvas = writer.startRecording( |
&pipeController, gPipeWritingFlagCombos[i].flags); |
- invokeGM(gm, pipeCanvas, false, false); |
+ if (!simulateFailure) { |
+ invokeGM(gm, pipeCanvas, false, false); |
+ } |
complete_bitmap(&bitmap); |
writer.endRecording(); |
SkString string("-pipe"); |
@@ -1061,6 +1063,7 @@ |
DEFINE_string2(resourcePath, i, "", "Directory that stores image resources."); |
DEFINE_bool(rtree, true, "Exercise the R-Tree variant of SkPicture test pass."); |
DEFINE_bool(serialize, true, "Exercise the SkPicture serialization & deserialization test pass."); |
+DEFINE_bool(simulatePipePlaybackFailure, false, "Simulate a rendering failure in pipe mode only."); |
DEFINE_bool(tiledPipe, false, "Exercise tiled SkGPipe replay."); |
DEFINE_bool(tileGrid, true, "Exercise the tile grid variant of SkPicture."); |
DEFINE_string(tileGridReplayScales, "", "Space separated list of floating-point scale " |
@@ -1372,7 +1375,8 @@ |
ErrorBitfield pipeErrors = kEmptyErrorBitfield; |
if (FLAGS_pipe) { |
- pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap); |
+ pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap, |
+ FLAGS_simulatePipePlaybackFailure); |
} |
if ((kEmptyErrorBitfield == pipeErrors) && |
@@ -1539,6 +1543,7 @@ |
int testsRun = 0; |
int testsPassed = 0; |
int testsFailed = 0; |
+ int testsWithDrawingModeDiscrepancies = 0; |
int testsMissingReferenceImages = 0; |
int gmIndex = -1; |
@@ -1584,37 +1589,49 @@ |
gm_fprintf(stdout, "%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName, |
size.width(), size.height()); |
- ErrorBitfield testErrors = kEmptyErrorBitfield; |
- testErrors |= run_multiple_configs(gmmain, gm, configs, grFactory); |
+ ErrorBitfield compositeErrors = kEmptyErrorBitfield; |
+ ErrorBitfield multipleConfigErrors = run_multiple_configs(gmmain, gm, configs, grFactory); |
+ compositeErrors |= multipleConfigErrors; |
SkBitmap comparisonBitmap; |
const ConfigData compareConfig = |
- { SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextType, 0, kRW_ConfigFlag, "comparison", false }; |
- testErrors |= gmmain.generate_image(gm, compareConfig, NULL, NULL, &comparisonBitmap, false); |
+ { SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextType, 0, |
+ kRW_ConfigFlag, "comparison", false }; |
+ ErrorBitfield generateModeBaselineErrors = gmmain.generate_image( |
+ gm, compareConfig, NULL, NULL, &comparisonBitmap, false); |
+ compositeErrors |= generateModeBaselineErrors; |
- // TODO(epoger): only run this if gmmain.generate_image() succeeded? |
+ // TODO(epoger): only run this if generateModeBaselineErrors is kEmptyErrorBitfield? |
// Otherwise, what are we comparing against? |
- testErrors |= run_multiple_modes(gmmain, gm, compareConfig, comparisonBitmap); |
+ ErrorBitfield multipleModeErrors = run_multiple_modes(gmmain, gm, compareConfig, |
+ comparisonBitmap); |
+ compositeErrors |= multipleModeErrors; |
- // Update overall results. |
- // We only tabulate the particular error types that we currently |
- // care about (e.g., missing reference images). Later on, if we |
- // want to also tabulate other error types, we can do so. |
+ // A non-ignorable error in run_multiple_configs(), or ANY error in |
+ // generate_image()/run_multiple_modes(), counts as a failure. |
testsRun++; |
- if (!gmmain.fExpectationsSource.get() || |
- (kEmptyErrorBitfield != (kMissingExpectations_ErrorBitmask & testErrors))) { |
- testsMissingReferenceImages++; |
- } |
- if (testErrors == (testErrors & kIgnorable_ErrorBitmask)) { |
+ if (kEmptyErrorBitfield != multipleModeErrors) { |
+ testsWithDrawingModeDiscrepancies++; |
+ testsFailed++; |
+ } else if (kEmptyErrorBitfield != generateModeBaselineErrors) { |
+ testsFailed++; |
+ } else if (compositeErrors == (compositeErrors & kIgnorable_ErrorBitmask)) { |
testsPassed++; |
} else { |
testsFailed++; |
} |
+ // Any other result categories we care to report. |
+ if (!gmmain.fExpectationsSource.get() || |
+ (kEmptyErrorBitfield != (kMissingExpectations_ErrorBitmask & compositeErrors))) { |
+ testsMissingReferenceImages++; |
+ } |
SkDELETE(gm); |
} |
- gm_fprintf(stdout, "Ran %d tests: %d passed, %d failed, %d missing reference images\n", |
- testsRun, testsPassed, testsFailed, testsMissingReferenceImages); |
+ gm_fprintf(stdout, "Ran %d tests: %d passed, %d failed, %d with drawing mode discrepancies, " |
+ "%d missing reference images\n", |
+ testsRun, testsPassed, testsFailed, testsWithDrawingModeDiscrepancies, |
+ testsMissingReferenceImages); |
gmmain.ListErrors(); |
if (FLAGS_writeJsonSummaryPath.count() == 1) { |