| Index: gm/gmmain.cpp
|
| ===================================================================
|
| --- gm/gmmain.cpp (revision 8579)
|
| +++ gm/gmmain.cpp (working copy)
|
| @@ -179,6 +179,7 @@
|
| GMMain() : fUseFileHierarchy(false), fMismatchPath(NULL), fTestsRun(0),
|
| fRenderModesEncountered(1) {
|
| fIgnorableErrorCombination.add(kMissingExpectations_ErrorType);
|
| + fIgnorableErrorCombination.add(kIntentionallySkipped_ErrorType);
|
| }
|
|
|
| SkString make_name(const char shortName[], const char configName[]) {
|
| @@ -864,21 +865,19 @@
|
| /**
|
| * Compare actualBitmap to referenceBitmap.
|
| *
|
| - * @param gm which test generated the bitmap
|
| - * @param gRec
|
| + * @param baseNameString name of test without renderModeDescriptor added
|
| * @param renderModeDescriptor
|
| * @param actualBitmap actual bitmap generated by this run
|
| * @param referenceBitmap bitmap we expected to be generated
|
| */
|
| ErrorCombination compare_test_results_to_reference_bitmap(
|
| - GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
|
| + const SkString& baseNameString, const char renderModeDescriptor[],
|
| SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
|
|
|
| SkASSERT(referenceBitmap);
|
| - SkString name = make_name(gm->shortName(), gRec.fName);
|
| Expectations expectations(*referenceBitmap);
|
| return compare_to_expectations(expectations, actualBitmap,
|
| - name, renderModeDescriptor, false);
|
| + baseNameString, renderModeDescriptor, false);
|
| }
|
|
|
| static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags,
|
| @@ -971,6 +970,7 @@
|
|
|
| if (gRec.fBackend == kRaster_Backend ||
|
| gRec.fBackend == kGPU_Backend) {
|
| + const char renderModeDescriptor[] = "-deferred";
|
| SkBitmap bitmap;
|
| // Early exit if we can't generate the image, but this is
|
| // expected in some cases, so don't report a test failure.
|
| @@ -991,63 +991,82 @@
|
| // something like kImageGeneration_ErrorType?
|
| return kEmpty_ErrorCombination;
|
| }
|
| + const SkString name = make_name(gm->shortName(), gRec.fName);
|
| return compare_test_results_to_reference_bitmap(
|
| - gm, gRec, "-deferred", bitmap, &referenceBitmap);
|
| + name, renderModeDescriptor, bitmap, &referenceBitmap);
|
| }
|
| return kEmpty_ErrorCombination;
|
| }
|
|
|
| ErrorCombination test_pipe_playback(GM* gm, const ConfigData& gRec,
|
| const SkBitmap& referenceBitmap, bool simulateFailure) {
|
| + const SkString name = make_name(gm->shortName(), gRec.fName);
|
| ErrorCombination errors;
|
| for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
|
| - SkBitmap bitmap;
|
| - SkISize size = gm->getISize();
|
| - setup_bitmap(gRec, size, &bitmap);
|
| - SkCanvas canvas(bitmap);
|
| - installFilter(&canvas);
|
| - PipeController pipeController(&canvas);
|
| - SkGPipeWriter writer;
|
| - SkCanvas* pipeCanvas = writer.startRecording(
|
| - &pipeController, gPipeWritingFlagCombos[i].flags);
|
| - if (!simulateFailure) {
|
| - invokeGM(gm, pipeCanvas, false, false);
|
| + SkString renderModeDescriptor("-pipe");
|
| + renderModeDescriptor.append(gPipeWritingFlagCombos[i].name);
|
| +
|
| + if (gm->getFlags() & GM::kSkipPipe_Flag) {
|
| + RecordTestResults(kIntentionallySkipped_ErrorType, name,
|
| + renderModeDescriptor.c_str());
|
| + errors.add(kIntentionallySkipped_ErrorType);
|
| + } else {
|
| + SkBitmap bitmap;
|
| + SkISize size = gm->getISize();
|
| + setup_bitmap(gRec, size, &bitmap);
|
| + SkCanvas canvas(bitmap);
|
| + installFilter(&canvas);
|
| + PipeController pipeController(&canvas);
|
| + SkGPipeWriter writer;
|
| + SkCanvas* pipeCanvas = writer.startRecording(&pipeController,
|
| + gPipeWritingFlagCombos[i].flags);
|
| + if (!simulateFailure) {
|
| + invokeGM(gm, pipeCanvas, false, false);
|
| + }
|
| + complete_bitmap(&bitmap);
|
| + writer.endRecording();
|
| + errors.add(compare_test_results_to_reference_bitmap(
|
| + name, renderModeDescriptor.c_str(), bitmap, &referenceBitmap));
|
| + if (!errors.isEmpty()) {
|
| + break;
|
| + }
|
| }
|
| - complete_bitmap(&bitmap);
|
| - writer.endRecording();
|
| - SkString string("-pipe");
|
| - string.append(gPipeWritingFlagCombos[i].name);
|
| - errors.add(compare_test_results_to_reference_bitmap(
|
| - gm, gRec, string.c_str(), bitmap, &referenceBitmap));
|
| - if (!errors.isEmpty()) {
|
| - break;
|
| - }
|
| }
|
| return errors;
|
| }
|
|
|
| ErrorCombination test_tiled_pipe_playback(GM* gm, const ConfigData& gRec,
|
| const SkBitmap& referenceBitmap) {
|
| + const SkString name = make_name(gm->shortName(), gRec.fName);
|
| ErrorCombination errors;
|
| for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
|
| - SkBitmap bitmap;
|
| - SkISize size = gm->getISize();
|
| - setup_bitmap(gRec, size, &bitmap);
|
| - SkCanvas canvas(bitmap);
|
| - installFilter(&canvas);
|
| - TiledPipeController pipeController(bitmap);
|
| - SkGPipeWriter writer;
|
| - SkCanvas* pipeCanvas = writer.startRecording(
|
| - &pipeController, gPipeWritingFlagCombos[i].flags);
|
| - invokeGM(gm, pipeCanvas, false, false);
|
| - complete_bitmap(&bitmap);
|
| - writer.endRecording();
|
| - SkString string("-tiled pipe");
|
| - string.append(gPipeWritingFlagCombos[i].name);
|
| - errors.add(compare_test_results_to_reference_bitmap(
|
| - gm, gRec, string.c_str(), bitmap, &referenceBitmap));
|
| - if (!errors.isEmpty()) {
|
| - break;
|
| + SkString renderModeDescriptor("-tiled pipe");
|
| + renderModeDescriptor.append(gPipeWritingFlagCombos[i].name);
|
| +
|
| + if ((gm->getFlags() & GM::kSkipPipe_Flag) ||
|
| + (gm->getFlags() & GM::kSkipTiled_Flag)) {
|
| + RecordTestResults(kIntentionallySkipped_ErrorType, name,
|
| + renderModeDescriptor.c_str());
|
| + errors.add(kIntentionallySkipped_ErrorType);
|
| + } else {
|
| + SkBitmap bitmap;
|
| + SkISize size = gm->getISize();
|
| + setup_bitmap(gRec, size, &bitmap);
|
| + SkCanvas canvas(bitmap);
|
| + installFilter(&canvas);
|
| + TiledPipeController pipeController(bitmap);
|
| + SkGPipeWriter writer;
|
| + SkCanvas* pipeCanvas = writer.startRecording(&pipeController,
|
| + gPipeWritingFlagCombos[i].flags);
|
| + invokeGM(gm, pipeCanvas, false, false);
|
| + complete_bitmap(&bitmap);
|
| + writer.endRecording();
|
| + errors.add(compare_test_results_to_reference_bitmap(name,
|
| + renderModeDescriptor.c_str(),
|
| + bitmap, &referenceBitmap));
|
| + if (!errors.isEmpty()) {
|
| + break;
|
| + }
|
| }
|
| }
|
| return errors;
|
| @@ -1272,24 +1291,41 @@
|
| GrContextFactory *grFactory);
|
| ErrorCombination run_multiple_configs(GMMain &gmmain, GM *gm, const SkTDArray<size_t> &configs,
|
| GrContextFactory *grFactory) {
|
| + const char renderModeDescriptor[] = "";
|
| ErrorCombination errorsForAllConfigs;
|
| uint32_t gmFlags = gm->getFlags();
|
|
|
| for (int i = 0; i < configs.count(); i++) {
|
| ConfigData config = gRec[configs[i]];
|
| + const SkString name = gmmain.make_name(gm->shortName(), config.fName);
|
|
|
| // Skip any tests that we don't even need to try.
|
| - if ((kPDF_Backend == config.fBackend) &&
|
| - (!FLAGS_pdf|| (gmFlags & GM::kSkipPDF_Flag))) {
|
| + // If any of these were skipped on a per-GM basis, record them as
|
| + // kIntentionallySkipped.
|
| + if (kPDF_Backend == config.fBackend) {
|
| + if (!FLAGS_pdf) {
|
| continue;
|
| }
|
| + if (gmFlags & GM::kSkipPDF_Flag) {
|
| + gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name,
|
| + renderModeDescriptor);
|
| + errorsForAllConfigs.add(kIntentionallySkipped_ErrorType);
|
| + continue;
|
| + }
|
| + }
|
| if ((gmFlags & GM::kSkip565_Flag) &&
|
| (kRaster_Backend == config.fBackend) &&
|
| (SkBitmap::kRGB_565_Config == config.fConfig)) {
|
| + gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name,
|
| + renderModeDescriptor);
|
| + errorsForAllConfigs.add(kIntentionallySkipped_ErrorType);
|
| continue;
|
| }
|
| if ((gmFlags & GM::kSkipGPU_Flag) &&
|
| kGPU_Backend == config.fBackend) {
|
| + gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name,
|
| + renderModeDescriptor);
|
| + errorsForAllConfigs.add(kIntentionallySkipped_ErrorType);
|
| continue;
|
| }
|
|
|
| @@ -1374,100 +1410,105 @@
|
| const SkTDArray<SkScalar> &tileGridReplayScales) {
|
| ErrorCombination errorsForAllModes;
|
| uint32_t gmFlags = gm->getFlags();
|
| + const SkString name = gmmain.make_name(gm->shortName(), compareConfig.fName);
|
|
|
| - // TODO(epoger): We should start recording any per-GM skipped
|
| - // modes (i.e. those we skipped due to gmFlags) with a new
|
| - // ErrorType, perhaps named kIntentionallySkipped_ErrorType.
|
| - if (!(gmFlags & GM::kSkipPicture_Flag)) {
|
| -
|
| - ErrorCombination pictErrors;
|
| -
|
| - //SkAutoTUnref<SkPicture> pict(generate_new_picture(gm));
|
| - SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0);
|
| - SkAutoUnref aur(pict);
|
| -
|
| - if (FLAGS_replay) {
|
| + SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0);
|
| + SkAutoUnref aur(pict);
|
| + if (FLAGS_replay) {
|
| + const char renderModeDescriptor[] = "-replay";
|
| + if (gmFlags & GM::kSkipPicture_Flag) {
|
| + gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name, renderModeDescriptor);
|
| + errorsForAllModes.add(kIntentionallySkipped_ErrorType);
|
| + } else {
|
| SkBitmap bitmap;
|
| gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap);
|
| - pictErrors.add(gmmain.compare_test_results_to_reference_bitmap(
|
| - gm, compareConfig, "-replay", bitmap, &comparisonBitmap));
|
| + errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
|
| + name, renderModeDescriptor, bitmap, &comparisonBitmap));
|
| }
|
| + }
|
|
|
| - if ((pictErrors.isEmpty()) && FLAGS_serialize) {
|
| + if (FLAGS_serialize) {
|
| + const char renderModeDescriptor[] = "-serialize";
|
| + if (gmFlags & GM::kSkipPicture_Flag) {
|
| + gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name, renderModeDescriptor);
|
| + errorsForAllModes.add(kIntentionallySkipped_ErrorType);
|
| + } else {
|
| SkPicture* repict = gmmain.stream_to_new_picture(*pict);
|
| SkAutoUnref aurr(repict);
|
| -
|
| SkBitmap bitmap;
|
| gmmain.generate_image_from_picture(gm, compareConfig, repict, &bitmap);
|
| - pictErrors.add(gmmain.compare_test_results_to_reference_bitmap(
|
| - gm, compareConfig, "-serialize", bitmap, &comparisonBitmap));
|
| + errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
|
| + name, renderModeDescriptor, bitmap, &comparisonBitmap));
|
| }
|
| -
|
| - if (FLAGS_writePicturePath.count() == 1) {
|
| - const char* pictureSuffix = "skp";
|
| - SkString path = make_filename(FLAGS_writePicturePath[0], "",
|
| - gm->shortName(), pictureSuffix);
|
| - SkFILEWStream stream(path.c_str());
|
| - pict->serialize(&stream);
|
| - }
|
| -
|
| - errorsForAllModes.add(pictErrors);
|
| }
|
|
|
| - if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_rtree) {
|
| - SkPicture* pict = gmmain.generate_new_picture(
|
| - gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag);
|
| - SkAutoUnref aur(pict);
|
| - SkBitmap bitmap;
|
| - gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap);
|
| - errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
|
| - gm, compareConfig, "-rtree", bitmap, &comparisonBitmap));
|
| + if ((1 == FLAGS_writePicturePath.count()) &&
|
| + !(gmFlags & GM::kSkipPicture_Flag)) {
|
| + const char* pictureSuffix = "skp";
|
| + SkString path = make_filename(FLAGS_writePicturePath[0], "",
|
| + gm->shortName(), pictureSuffix);
|
| + SkFILEWStream stream(path.c_str());
|
| + pict->serialize(&stream);
|
| }
|
|
|
| - if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_tileGrid) {
|
| - for(int scaleIndex = 0; scaleIndex < tileGridReplayScales.count(); ++scaleIndex) {
|
| - SkScalar replayScale = tileGridReplayScales[scaleIndex];
|
| - if ((gmFlags & GM::kSkipScaledReplay_Flag) && replayScale != 1) {
|
| - continue;
|
| - }
|
| - // We record with the reciprocal scale to obtain a replay
|
| - // result that can be validated against comparisonBitmap.
|
| - SkScalar recordScale = SkScalarInvert(replayScale);
|
| + if (FLAGS_rtree) {
|
| + const char renderModeDescriptor[] = "-rtree";
|
| + if (gmFlags & GM::kSkipPicture_Flag) {
|
| + gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name, renderModeDescriptor);
|
| + errorsForAllModes.add(kIntentionallySkipped_ErrorType);
|
| + } else {
|
| SkPicture* pict = gmmain.generate_new_picture(
|
| - gm, kTileGrid_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag, recordScale);
|
| + gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag);
|
| SkAutoUnref aur(pict);
|
| SkBitmap bitmap;
|
| - // We cannot yet pass 'true' to generate_image_from_picture to
|
| - // perform actual tiled rendering (see Issue 1198 -
|
| - // https://code.google.com/p/skia/issues/detail?id=1198)
|
| - gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap,
|
| - replayScale /*, true */);
|
| - SkString suffix("-tilegrid");
|
| - if (SK_Scalar1 != replayScale) {
|
| - suffix += "-scale-";
|
| - suffix.appendScalar(replayScale);
|
| - }
|
| + gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap);
|
| errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
|
| - gm, compareConfig, suffix.c_str(), bitmap, &comparisonBitmap));
|
| + name, renderModeDescriptor, bitmap, &comparisonBitmap));
|
| }
|
| }
|
|
|
| - // run the pipe centric GM steps
|
| - if (!(gmFlags & GM::kSkipPipe_Flag)) {
|
| + if (FLAGS_tileGrid) {
|
| + for(int scaleIndex = 0; scaleIndex < tileGridReplayScales.count(); ++scaleIndex) {
|
| + SkScalar replayScale = tileGridReplayScales[scaleIndex];
|
| + SkString renderModeDescriptor("-tilegrid");
|
| + if (SK_Scalar1 != replayScale) {
|
| + renderModeDescriptor += "-scale-";
|
| + renderModeDescriptor.appendScalar(replayScale);
|
| + }
|
|
|
| - ErrorCombination pipeErrors;
|
| -
|
| - if (FLAGS_pipe) {
|
| - pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap,
|
| - FLAGS_simulatePipePlaybackFailure));
|
| + if ((gmFlags & GM::kSkipPicture_Flag) ||
|
| + ((gmFlags & GM::kSkipScaledReplay_Flag) && replayScale != 1)) {
|
| + gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name,
|
| + renderModeDescriptor.c_str());
|
| + errorsForAllModes.add(kIntentionallySkipped_ErrorType);
|
| + } else {
|
| + // We record with the reciprocal scale to obtain a replay
|
| + // result that can be validated against comparisonBitmap.
|
| + SkScalar recordScale = SkScalarInvert(replayScale);
|
| + SkPicture* pict = gmmain.generate_new_picture(
|
| + gm, kTileGrid_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag,
|
| + recordScale);
|
| + SkAutoUnref aur(pict);
|
| + SkBitmap bitmap;
|
| + // We cannot yet pass 'true' to generate_image_from_picture to
|
| + // perform actual tiled rendering (see Issue 1198 -
|
| + // https://code.google.com/p/skia/issues/detail?id=1198)
|
| + gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap,
|
| + replayScale /*, true */);
|
| + errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
|
| + name, renderModeDescriptor.c_str(), bitmap, &comparisonBitmap));
|
| + }
|
| }
|
| + }
|
|
|
| - if ((pipeErrors.isEmpty()) &&
|
| - FLAGS_tiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) {
|
| - pipeErrors.add(gmmain.test_tiled_pipe_playback(gm, compareConfig, comparisonBitmap));
|
| + // run the pipe centric GM steps
|
| + if (FLAGS_pipe) {
|
| + errorsForAllModes.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap,
|
| + FLAGS_simulatePipePlaybackFailure));
|
| + if (FLAGS_tiledPipe) {
|
| + errorsForAllModes.add(gmmain.test_tiled_pipe_playback(gm, compareConfig,
|
| + comparisonBitmap));
|
| }
|
| -
|
| - errorsForAllModes.add(pipeErrors);
|
| }
|
| return errorsForAllModes;
|
| }
|
| @@ -1753,25 +1794,29 @@
|
|
|
| SkTArray<SkString> modes;
|
| gmmain.GetRenderModesEncountered(modes);
|
| + bool reportError = false;
|
| + if (gmmain.NumSignificantErrors() > 0) {
|
| + reportError = true;
|
| + }
|
|
|
| // Output summary to stdout.
|
| gm_fprintf(stdout, "Ran %d GMs\n", gmsRun);
|
| gm_fprintf(stdout, "... over %2d configs [%s]\n", configs.count(),
|
| list_all_config_names(configs).c_str());
|
| gm_fprintf(stdout, "... and %2d modes [%s]\n", modes.count(), list_all(modes).c_str());
|
| - gm_fprintf(stdout, "... so there should be a total of %d tests.\n",
|
| - gmsRun * (configs.count() + modes.count()));
|
| -
|
| - // TODO(epoger): Ultimately, we should signal an error if the
|
| - // expected total number of tests (displayed above) does not match
|
| - // gmmain.fTestsRun. But for now, there are cases where those
|
| - // numbers won't match: specifically, if some configs/modes are
|
| - // skipped on a per-GM basis (due to gm->getFlags() for a specific
|
| - // GM). Later on, we should record tests like that using some new
|
| - // ErrorType, like kIntentionallySkipped_ErrorType. Then we could
|
| - // signal an error if the totals didn't match up.
|
| + int expectedNumberOfTests = gmsRun * (configs.count() + modes.count());
|
| + gm_fprintf(stdout, "... so there should be a total of %d tests.\n", expectedNumberOfTests);
|
| gmmain.ListErrors();
|
|
|
| + // TODO(epoger): in a standalone CL, enable this new check.
|
| +#if 0
|
| + if (expectedNumberOfTests != gmmain.fTestsRun) {
|
| + gm_fprintf(stderr, "expected %d tests, but ran or skipped %d tests\n",
|
| + expectedNumberOfTests, gmmain.fTestsRun);
|
| + reportError = true;
|
| + }
|
| +#endif
|
| +
|
| if (FLAGS_writeJsonSummaryPath.count() == 1) {
|
| Json::Value actualResults;
|
| actualResults[kJsonKey_ActualResults_Failed] =
|
| @@ -1809,7 +1854,7 @@
|
| #endif
|
| SkGraphics::Term();
|
|
|
| - return (0 == gmmain.NumSignificantErrors()) ? 0 : -1;
|
| + return (reportError) ? -1 : 0;
|
| }
|
|
|
| void GMMain::installFilter(SkCanvas* canvas) {
|
|
|