Index: gm/gmmain.cpp |
=================================================================== |
--- gm/gmmain.cpp (revision 8160) |
+++ gm/gmmain.cpp (working copy) |
@@ -194,6 +194,7 @@ |
// Set default values of member variables, which tool_main() |
// may override. |
fUseFileHierarchy = false; |
+ fSimulatePipePlaybackFailure = false; |
fMismatchPath = NULL; |
} |
@@ -261,8 +262,9 @@ |
return; |
} |
- FailRec& rec = fFailedTests.push_back(make_name( |
- name.c_str(), renderModeDescriptor)); |
+ SkString completeName = name; |
+ completeName.append(renderModeDescriptor); |
+ FailRec& rec = fFailedTests.push_back(completeName); |
rec.fIsPixelError = |
(kEmptyErrorBitfield != (errorType & kImageMismatch_ErrorBitmask)); |
} |
@@ -594,18 +596,17 @@ |
* @param baseNameString name of test without renderModeDescriptor added |
* @param renderModeDescriptor e.g., "-rtree", "-deferred" |
* @param addToJsonSummary whether to add these results (both actual and |
- * expected) to the JSON summary |
- * |
- * TODO: For now, addToJsonSummary is only set to true within |
- * compare_test_results_to_stored_expectations(), so results of our |
- * in-memory comparisons (Rtree vs regular, etc.) are not written to the |
- * JSON summary. We may wish to change that. |
+ * expected) to the JSON summary. Regardless of this setting, if |
+ * we find an image mismatch in this test, we will write these |
+ * results to the JSON summary. (This is so that we will always |
+ * report errors across rendering modes, such as pipe vs tiled. |
+ * See https://codereview.chromium.org/12825005/ |
*/ |
ErrorBitfield compare_to_expectations(Expectations expectations, |
const SkBitmap& actualBitmap, |
const SkString& baseNameString, |
const char renderModeDescriptor[], |
- bool addToJsonSummary=false) { |
+ bool addToJsonSummary) { |
ErrorBitfield retval; |
Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap); |
SkString completeNameString = baseNameString; |
@@ -617,6 +618,7 @@ |
} else if (expectations.match(actualChecksum)) { |
retval = kEmptyErrorBitfield; |
} else { |
+ addToJsonSummary = true; |
retval = kImageMismatch_ErrorBitmask; |
// Write out the "actuals" for any mismatches, if we have |
@@ -784,7 +786,7 @@ |
SkString name = make_name(gm->shortName(), gRec.fName); |
Expectations expectations(*referenceBitmap); |
return compare_to_expectations(expectations, actualBitmap, |
- name, renderModeDescriptor); |
+ name, renderModeDescriptor, false); |
} |
static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags, |
@@ -905,7 +907,9 @@ |
SkGPipeWriter writer; |
SkCanvas* pipeCanvas = writer.startRecording( |
&pipeController, gPipeWritingFlagCombos[i].flags); |
- invokeGM(gm, pipeCanvas, false, false); |
+ if (!this->fSimulatePipePlaybackFailure) { |
+ invokeGM(gm, pipeCanvas, false, false); |
+ } |
complete_bitmap(&bitmap); |
writer.endRecording(); |
SkString string("-pipe"); |
@@ -951,6 +955,7 @@ |
// |
bool fUseFileHierarchy; |
+ bool fSimulatePipePlaybackFailure; |
const char* fMismatchPath; |
@@ -1054,6 +1059,7 @@ |
" [--resourcePath|-i <path>]: directory that stores image resources\n" |
" [--nortree]: Do not exercise the R-Tree variant of SkPicture\n" |
" [--noserialize]: do not exercise SkPicture serialization & deserialization\n" |
+" [--simulatePipePlaybackFailure]: simulate a rendering failure in pipe mode only\n" |
" [--tiledPipe]: Exercise tiled SkGPipe replay\n" |
" [--notileGrid]: Do not exercise the tile grid variant of SkPicture\n" |
" [--tileGridReplayScales <scales>]: Comma separated list of floating-point scale\n" |
@@ -1136,6 +1142,233 @@ |
} |
} |
+/** |
+ * Run this test in a number of different configs (8888, 565, PDF, |
+ * etc.), confirming that the resulting bitmaps match expectations |
+ * (which may be different for each config). |
+ */ |
+ErrorBitfield run_multiple_configs(GMMain &gmmain, GM *gm, const SkTDArray<size_t> &configs, |
+ GrContextFactory *grFactory, int gpuCacheSizeBytes, |
+ int gpuCacheSizeCount, const char *writePath, bool doPDF, |
+ bool doDeferred) { |
+ uint32_t gmFlags = gm->getFlags(); |
+ ErrorBitfield testErrors = kEmptyErrorBitfield; |
+ for (int i = 0; i < configs.count(); i++) { |
+ ConfigData config = gRec[configs[i]]; |
+ |
+ // Skip any tests that we don't even need to try. |
+ if ((kPDF_Backend == config.fBackend) && |
+ (!doPDF || (gmFlags & GM::kSkipPDF_Flag))) |
+ { |
+ continue; |
+ } |
+ if ((gmFlags & GM::kSkip565_Flag) && |
+ (kRaster_Backend == config.fBackend) && |
+ (SkBitmap::kRGB_565_Config == config.fConfig)) { |
+ continue; |
+ } |
+ if ((gmFlags & GM::kSkipGPU_Flag) && |
+ kGPU_Backend == config.fBackend) { |
+ continue; |
+ } |
+ |
+ // Now we know that we want to run this test and record its |
+ // success or failure. |
+ ErrorBitfield renderErrors = kEmptyErrorBitfield; |
+ GrRenderTarget* renderTarget = NULL; |
+#if SK_SUPPORT_GPU |
+ SkAutoTUnref<GrRenderTarget> rt; |
+ AutoResetGr autogr; |
+ if ((kEmptyErrorBitfield == renderErrors) && |
+ kGPU_Backend == config.fBackend) { |
+ GrContext* gr = grFactory->get(config.fGLContextType); |
+ bool grSuccess = false; |
+ if (gr) { |
+ // create a render target to back the device |
+ GrTextureDesc desc; |
+ desc.fConfig = kSkia8888_GrPixelConfig; |
+ desc.fFlags = kRenderTarget_GrTextureFlagBit; |
+ desc.fWidth = gm->getISize().width(); |
+ desc.fHeight = gm->getISize().height(); |
+ desc.fSampleCnt = config.fSampleCnt; |
+ GrTexture* tex = gr->createUncachedTexture(desc, NULL, 0); |
+ if (tex) { |
+ rt.reset(tex->asRenderTarget()); |
+ rt.get()->ref(); |
+ tex->unref(); |
+ autogr.set(gr); |
+ renderTarget = rt.get(); |
+ grSuccess = NULL != renderTarget; |
+ } |
+ // Set the user specified cache limits if non-default. |
+ size_t bytes; |
+ int count; |
+ gr->getTextureCacheLimits(&count, &bytes); |
+ if (-1 != gpuCacheSizeBytes) { |
+ bytes = static_cast<size_t>(gpuCacheSizeBytes); |
+ } |
+ if (-1 != gpuCacheSizeCount) { |
+ count = gpuCacheSizeCount; |
+ } |
+ gr->setTextureCacheLimits(count, bytes); |
+ } |
+ if (!grSuccess) { |
+ renderErrors |= kNoGpuContext_ErrorBitmask; |
+ } |
+ } |
+#endif |
+ |
+ SkBitmap comparisonBitmap; |
+ |
+ if (kEmptyErrorBitfield == renderErrors) { |
+ renderErrors |= gmmain.test_drawing(gm, config, writePath, |
+ GetGr(), |
+ renderTarget, |
+ &comparisonBitmap); |
+ } |
+ |
+ if (doDeferred && !renderErrors && |
+ (kGPU_Backend == config.fBackend || |
+ kRaster_Backend == config.fBackend)) { |
+ renderErrors |= gmmain.test_deferred_drawing(gm, config, |
+ comparisonBitmap, |
+ GetGr(), |
+ renderTarget); |
+ } |
+ |
+ testErrors |= renderErrors; |
+ } |
+ return testErrors; |
+} |
+ |
+/** |
+ * Run this test in a number of different drawing modes (pipe, |
+ * deferred, tiled, etc.), confirming that the resulting bitmaps are |
+ * *exactly* the same in all drawing modes. |
+ * |
+ * TODO(epoger): Right now, we only run the different drawing modes |
+ * with the 8888 config. Would there be value in running all those |
+ * different drawing modes in whatever configs (8888, 565, PDF) we are |
+ * testing? |
+ */ |
+ErrorBitfield run_multiple_drawing_modes(GMMain &gmmain, GM *gm, |
+ const char *writePicturePath, bool doReplay, |
+ bool doSerialize, bool doRTree, bool doTileGrid, |
+ const SkTDArray<SkScalar> &tileGridReplayScales, |
+ bool doPipe, bool doTiledPipe) { |
+ uint32_t gmFlags = gm->getFlags(); |
+ SkBitmap comparisonBitmap; |
+ const ConfigData compareConfig = |
+ { SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextType, 0, |
+ kRW_ConfigFlag, "comparison" }; |
+ ErrorBitfield testErrors = gmmain.generate_image(gm, compareConfig, NULL, NULL, |
+ &comparisonBitmap, false); |
+ |
+ // run the picture centric GM steps |
+ if (!(gmFlags & GM::kSkipPicture_Flag)) { |
+ |
+ ErrorBitfield pictErrors = kEmptyErrorBitfield; |
+ |
+ //SkAutoTUnref<SkPicture> pict(generate_new_picture(gm)); |
+ SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0); |
+ SkAutoUnref aur(pict); |
+ |
+ if ((kEmptyErrorBitfield == testErrors) && doReplay) { |
+ SkBitmap bitmap; |
+ gmmain.generate_image_from_picture(gm, compareConfig, pict, |
+ &bitmap); |
+ pictErrors |= gmmain.compare_test_results_to_reference_bitmap( |
+ gm, compareConfig, "-replay", bitmap, &comparisonBitmap); |
+ } |
+ |
+ if ((kEmptyErrorBitfield == testErrors) && |
+ (kEmptyErrorBitfield == pictErrors) && |
+ doSerialize) { |
+ SkPicture* repict = gmmain.stream_to_new_picture(*pict); |
+ SkAutoUnref aurr(repict); |
+ |
+ SkBitmap bitmap; |
+ gmmain.generate_image_from_picture(gm, compareConfig, repict, |
+ &bitmap); |
+ pictErrors |= gmmain.compare_test_results_to_reference_bitmap( |
+ gm, compareConfig, "-serialize", bitmap, &comparisonBitmap); |
+ } |
+ |
+ if (writePicturePath) { |
+ const char* pictureSuffix = "skp"; |
+ SkString path = make_filename(writePicturePath, "", |
+ gm->shortName(), |
+ pictureSuffix); |
+ SkFILEWStream stream(path.c_str()); |
+ pict->serialize(&stream); |
+ } |
+ |
+ testErrors |= pictErrors; |
+ } |
+ |
+ // TODO: add a test in which the RTree rendering results in a |
+ // different bitmap than the standard rendering. It should |
+ // show up as failed in the JSON summary, and should be listed |
+ // in the stdout also. |
+ if (!(gmFlags & GM::kSkipPicture_Flag) && doRTree) { |
+ SkPicture* pict = gmmain.generate_new_picture( |
+ gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag); |
+ SkAutoUnref aur(pict); |
+ SkBitmap bitmap; |
+ gmmain.generate_image_from_picture(gm, compareConfig, pict, |
+ &bitmap); |
+ testErrors |= gmmain.compare_test_results_to_reference_bitmap( |
+ gm, compareConfig, "-rtree", bitmap, &comparisonBitmap); |
+ } |
+ |
+ if (!(gmFlags & GM::kSkipPicture_Flag) && doTileGrid) { |
+ for(int scaleIndex = 0; scaleIndex < tileGridReplayScales.count(); ++scaleIndex) { |
+ SkScalar replayScale = tileGridReplayScales[scaleIndex]; |
+ if ((gmFlags & GM::kSkipScaledReplay_Flag) && replayScale != 1) |
+ continue; |
+ // We record with the reciprocal scale to obtain a replay |
+ // result that can be validated against comparisonBitmap. |
+ SkScalar recordScale = SkScalarInvert(replayScale); |
+ SkPicture* pict = gmmain.generate_new_picture( |
+ gm, kTileGrid_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag, |
+ recordScale); |
+ SkAutoUnref aur(pict); |
+ SkBitmap bitmap; |
+ gmmain.generate_image_from_picture(gm, compareConfig, pict, |
+ &bitmap, replayScale); |
+ SkString suffix("-tilegrid"); |
+ if (SK_Scalar1 != replayScale) { |
+ suffix += "-scale-"; |
+ suffix.appendScalar(replayScale); |
+ } |
+ testErrors |= gmmain.compare_test_results_to_reference_bitmap( |
+ gm, compareConfig, suffix.c_str(), bitmap, |
+ &comparisonBitmap); |
+ } |
+ } |
+ |
+ // run the pipe centric GM steps |
+ if (!(gmFlags & GM::kSkipPipe_Flag)) { |
+ |
+ ErrorBitfield pipeErrors = kEmptyErrorBitfield; |
+ |
+ if ((kEmptyErrorBitfield == testErrors) && doPipe) { |
+ pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig, |
+ comparisonBitmap); |
+ } |
+ |
+ if ((kEmptyErrorBitfield == testErrors) && |
+ (kEmptyErrorBitfield == pipeErrors) && |
+ doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { |
+ pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig, |
+ comparisonBitmap); |
+ } |
+ |
+ testErrors |= pipeErrors; |
+ } |
+ return testErrors; |
+} |
+ |
int tool_main(int argc, char** argv); |
int tool_main(int argc, char** argv) { |
@@ -1180,12 +1413,9 @@ |
int moduloRemainder = -1; |
int moduloDivisor = -1; |
-#if SK_SUPPORT_GPU |
- struct { |
- int fBytes; |
- int fCount; |
- } gpuCacheSize = { -1, -1 }; // -1s mean use the default |
-#endif |
+ int gpuCacheSizeBytes = -1; |
+ int gpuCacheSizeCount = -1; |
+ // -1 means use the default |
const char* const commandName = argv[0]; |
char* const* stop = argv + argc; |
@@ -1261,8 +1491,8 @@ |
#if SK_SUPPORT_GPU |
} else if (strcmp(*argv, "--gpuCacheSize") == 0) { |
if (stop - argv > 2) { |
- gpuCacheSize.fBytes = atoi(*++argv); |
- gpuCacheSize.fCount = atoi(*++argv); |
+ gpuCacheSizeBytes = atoi(*++argv); |
+ gpuCacheSizeCount = atoi(*++argv); |
} else { |
gm_fprintf(stderr, "missing arg for --gpuCacheSize\n"); |
usage(commandName); |
@@ -1320,6 +1550,8 @@ |
doSerialize = true; |
} else if (strcmp(*argv, "--noserialize") == 0) { |
doSerialize = false; |
+ } else if (strcmp(*argv, "--simulatePipePlaybackFailure") == 0) { |
+ gmmain.fSimulatePipePlaybackFailure = true; |
} else if (strcmp(*argv, "--tiledPipe") == 0) { |
doTiledPipe = true; |
} else if (!strcmp(*argv, "--verbose") || !strcmp(*argv, "-v")) { |
@@ -1415,6 +1647,7 @@ |
int testsRun = 0; |
int testsPassed = 0; |
int testsFailed = 0; |
+ int testsWithDrawingModeDiscrepancies = 0; |
int testsMissingReferenceImages = 0; |
#if SK_SUPPORT_GPU |
@@ -1464,222 +1697,39 @@ |
gm_fprintf(stdout, "%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName, |
size.width(), size.height()); |
- ErrorBitfield testErrors = kEmptyErrorBitfield; |
- uint32_t gmFlags = gm->getFlags(); |
+ ErrorBitfield compositeErrors = kEmptyErrorBitfield; |
+ ErrorBitfield multipleConfigErrors = run_multiple_configs( |
+ gmmain, gm, configs, grFactory, gpuCacheSizeBytes, gpuCacheSizeCount, writePath, |
+ doPDF, doDeferred); |
+ compositeErrors |= multipleConfigErrors; |
+ ErrorBitfield multipleModeErrors = run_multiple_drawing_modes( |
+ gmmain, gm, writePicturePath, doReplay, doSerialize, doRTree, doTileGrid, |
+ tileGridReplayScales, doPipe, doTiledPipe); |
+ compositeErrors |= multipleModeErrors; |
- for (int i = 0; i < configs.count(); i++) { |
- ConfigData config = gRec[configs[i]]; |
- |
- // Skip any tests that we don't even need to try. |
- if ((kPDF_Backend == config.fBackend) && |
- (!doPDF || (gmFlags & GM::kSkipPDF_Flag))) |
- { |
- continue; |
- } |
- if ((gmFlags & GM::kSkip565_Flag) && |
- (kRaster_Backend == config.fBackend) && |
- (SkBitmap::kRGB_565_Config == config.fConfig)) { |
- continue; |
- } |
- if ((gmFlags & GM::kSkipGPU_Flag) && |
- kGPU_Backend == config.fBackend) { |
- continue; |
- } |
- |
- // Now we know that we want to run this test and record its |
- // success or failure. |
- ErrorBitfield renderErrors = kEmptyErrorBitfield; |
- GrRenderTarget* renderTarget = NULL; |
-#if SK_SUPPORT_GPU |
- SkAutoTUnref<GrRenderTarget> rt; |
- AutoResetGr autogr; |
- if ((kEmptyErrorBitfield == renderErrors) && |
- kGPU_Backend == config.fBackend) { |
- GrContext* gr = grFactory->get(config.fGLContextType); |
- bool grSuccess = false; |
- if (gr) { |
- // create a render target to back the device |
- GrTextureDesc desc; |
- desc.fConfig = kSkia8888_GrPixelConfig; |
- desc.fFlags = kRenderTarget_GrTextureFlagBit; |
- desc.fWidth = gm->getISize().width(); |
- desc.fHeight = gm->getISize().height(); |
- desc.fSampleCnt = config.fSampleCnt; |
- GrTexture* tex = gr->createUncachedTexture(desc, NULL, 0); |
- if (tex) { |
- rt.reset(tex->asRenderTarget()); |
- rt.get()->ref(); |
- tex->unref(); |
- autogr.set(gr); |
- renderTarget = rt.get(); |
- grSuccess = NULL != renderTarget; |
- } |
- // Set the user specified cache limits if non-default. |
- size_t bytes; |
- int count; |
- gr->getTextureCacheLimits(&count, &bytes); |
- if (-1 != gpuCacheSize.fBytes) { |
- bytes = static_cast<size_t>(gpuCacheSize.fBytes); |
- } |
- if (-1 != gpuCacheSize.fCount) { |
- count = gpuCacheSize.fCount; |
- } |
- gr->setTextureCacheLimits(count, bytes); |
- } |
- if (!grSuccess) { |
- renderErrors |= kNoGpuContext_ErrorBitmask; |
- } |
- } |
-#endif |
- |
- SkBitmap comparisonBitmap; |
- |
- if (kEmptyErrorBitfield == renderErrors) { |
- renderErrors |= gmmain.test_drawing(gm, config, writePath, |
- GetGr(), |
- renderTarget, |
- &comparisonBitmap); |
- } |
- |
- if (doDeferred && !renderErrors && |
- (kGPU_Backend == config.fBackend || |
- kRaster_Backend == config.fBackend)) { |
- renderErrors |= gmmain.test_deferred_drawing(gm, config, |
- comparisonBitmap, |
- GetGr(), |
- renderTarget); |
- } |
- |
- testErrors |= renderErrors; |
- } |
- |
- SkBitmap comparisonBitmap; |
- const ConfigData compareConfig = |
- { SkBitmap::kARGB_8888_Config, kRaster_Backend, kDontCare_GLContextType, 0, kRW_ConfigFlag, "comparison" }; |
- testErrors |= gmmain.generate_image(gm, compareConfig, NULL, NULL, &comparisonBitmap, false); |
- |
- // run the picture centric GM steps |
- if (!(gmFlags & GM::kSkipPicture_Flag)) { |
- |
- ErrorBitfield pictErrors = kEmptyErrorBitfield; |
- |
- //SkAutoTUnref<SkPicture> pict(generate_new_picture(gm)); |
- SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0); |
- SkAutoUnref aur(pict); |
- |
- if ((kEmptyErrorBitfield == testErrors) && doReplay) { |
- SkBitmap bitmap; |
- gmmain.generate_image_from_picture(gm, compareConfig, pict, |
- &bitmap); |
- pictErrors |= gmmain.compare_test_results_to_reference_bitmap( |
- gm, compareConfig, "-replay", bitmap, &comparisonBitmap); |
- } |
- |
- if ((kEmptyErrorBitfield == testErrors) && |
- (kEmptyErrorBitfield == pictErrors) && |
- doSerialize) { |
- SkPicture* repict = gmmain.stream_to_new_picture(*pict); |
- SkAutoUnref aurr(repict); |
- |
- SkBitmap bitmap; |
- gmmain.generate_image_from_picture(gm, compareConfig, repict, |
- &bitmap); |
- pictErrors |= gmmain.compare_test_results_to_reference_bitmap( |
- gm, compareConfig, "-serialize", bitmap, &comparisonBitmap); |
- } |
- |
- if (writePicturePath) { |
- const char* pictureSuffix = "skp"; |
- SkString path = make_filename(writePicturePath, "", |
- gm->shortName(), |
- pictureSuffix); |
- SkFILEWStream stream(path.c_str()); |
- pict->serialize(&stream); |
- } |
- |
- testErrors |= pictErrors; |
- } |
- |
- // TODO: add a test in which the RTree rendering results in a |
- // different bitmap than the standard rendering. It should |
- // show up as failed in the JSON summary, and should be listed |
- // in the stdout also. |
- if (!(gmFlags & GM::kSkipPicture_Flag) && doRTree) { |
- SkPicture* pict = gmmain.generate_new_picture( |
- gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag); |
- SkAutoUnref aur(pict); |
- SkBitmap bitmap; |
- gmmain.generate_image_from_picture(gm, compareConfig, pict, |
- &bitmap); |
- testErrors |= gmmain.compare_test_results_to_reference_bitmap( |
- gm, compareConfig, "-rtree", bitmap, &comparisonBitmap); |
- } |
- |
- if (!(gmFlags & GM::kSkipPicture_Flag) && doTileGrid) { |
- for(int scaleIndex = 0; scaleIndex < tileGridReplayScales.count(); ++scaleIndex) { |
- SkScalar replayScale = tileGridReplayScales[scaleIndex]; |
- if ((gmFlags & GM::kSkipScaledReplay_Flag) && replayScale != 1) |
- continue; |
- // We record with the reciprocal scale to obtain a replay |
- // result that can be validated against comparisonBitmap. |
- SkScalar recordScale = SkScalarInvert(replayScale); |
- SkPicture* pict = gmmain.generate_new_picture( |
- gm, kTileGrid_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag, |
- recordScale); |
- SkAutoUnref aur(pict); |
- SkBitmap bitmap; |
- gmmain.generate_image_from_picture(gm, compareConfig, pict, |
- &bitmap, replayScale); |
- SkString suffix("-tilegrid"); |
- if (SK_Scalar1 != replayScale) { |
- suffix += "-scale-"; |
- suffix.appendScalar(replayScale); |
- } |
- testErrors |= gmmain.compare_test_results_to_reference_bitmap( |
- gm, compareConfig, suffix.c_str(), bitmap, |
- &comparisonBitmap); |
- } |
- } |
- |
- // run the pipe centric GM steps |
- if (!(gmFlags & GM::kSkipPipe_Flag)) { |
- |
- ErrorBitfield pipeErrors = kEmptyErrorBitfield; |
- |
- if ((kEmptyErrorBitfield == testErrors) && doPipe) { |
- pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig, |
- comparisonBitmap); |
- } |
- |
- if ((kEmptyErrorBitfield == testErrors) && |
- (kEmptyErrorBitfield == pipeErrors) && |
- doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { |
- pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig, |
- comparisonBitmap); |
- } |
- |
- testErrors |= pipeErrors; |
- } |
- |
- // Update overall results. |
- // We only tabulate the particular error types that we currently |
- // care about (e.g., missing reference images). Later on, if we |
- // want to also tabulate other error types, we can do so. |
+ // A non-ignorable error in run_multiple_configs, or ANY error in |
+ // run_multiple_drawing_modes, counts as a failure. |
testsRun++; |
- if (!gmmain.fExpectationsSource.get() || |
- (kEmptyErrorBitfield != (kMissingExpectations_ErrorBitmask & testErrors))) { |
- testsMissingReferenceImages++; |
- } |
- if (testErrors == (testErrors & kIgnorable_ErrorBitmask)) { |
+ if (kEmptyErrorBitfield != multipleModeErrors) { |
+ testsWithDrawingModeDiscrepancies++; |
+ testsFailed++; |
+ } else if (compositeErrors == (compositeErrors & kIgnorable_ErrorBitmask)) { |
testsPassed++; |
} else { |
testsFailed++; |
} |
+ // Any other result categories we care to report. |
+ if (!gmmain.fExpectationsSource.get() || |
+ (kEmptyErrorBitfield != (kMissingExpectations_ErrorBitmask & compositeErrors))) { |
+ testsMissingReferenceImages++; |
+ } |
SkDELETE(gm); |
} |
- gm_fprintf(stdout, "Ran %d tests: %d passed, %d failed, %d missing reference images\n", |
- testsRun, testsPassed, testsFailed, testsMissingReferenceImages); |
+ gm_fprintf(stdout, "Ran %d tests: %d passed, %d failed, %d with drawing mode discrepancies, " |
+ "%d missing reference images\n", |
+ testsRun, testsPassed, testsFailed, testsWithDrawingModeDiscrepancies, |
+ testsMissingReferenceImages); |
gmmain.ListErrors(); |
if (NULL != writeJsonSummaryPath) { |