Chromium Code Reviews| Index: gm/gmmain.cpp |
| =================================================================== |
| --- gm/gmmain.cpp (revision 8020) |
| +++ gm/gmmain.cpp (working copy) |
| @@ -80,19 +80,16 @@ |
| #define CAN_IMAGE_PDF 0 |
| #endif |
| -// TODO(epoger): We created this ErrorBitfield so that we could record |
| -// multiple error types for the same comparison. But in practice, we |
| -// process its final value in switch() statements, which inherently |
| -// assume that only one error type will be set. |
| -// I think we should probably change this to be an enum, and thus |
| -// constrain ourselves to a single error type per comparison. |
| typedef int ErrorBitfield; |
| -const static ErrorBitfield ERROR_NONE = 0x00; |
| -const static ErrorBitfield ERROR_NO_GPU_CONTEXT = 0x01; |
| -const static ErrorBitfield ERROR_IMAGE_MISMATCH = 0x02; |
| -// const static ErrorBitfield ERROR_DIMENSION_MISMATCH = 0x04; DEPRECATED in https://codereview.appspot.com/7064047 |
| -const static ErrorBitfield ERROR_READING_REFERENCE_IMAGE = 0x08; |
| -const static ErrorBitfield ERROR_WRITING_REFERENCE_IMAGE = 0x10; |
| +// an empty bitfield means no errors: |
| +const static ErrorBitfield kEmptyErrorBitfield = 0x00; |
| +// individual error types: |
| +const static ErrorBitfield kNoGpuContext_ErrorBitmask = 0x01; |
|
epoger
2013/03/07 19:51:03
While I was at it, changed the constant names to f
|
| +const static ErrorBitfield kImageMismatch_ErrorBitmask = 0x02; |
| +const static ErrorBitfield kReadingReferenceImage_ErrorBitmask = 0x04; |
| +const static ErrorBitfield kWritingReferenceImage_ErrorBitmask = 0x08; |
| +// we typically ignore any errors matching this bitmask: |
| +const static ErrorBitfield kIgnorable_ErrorBitmask = kReadingReferenceImage_ErrorBitmask; |
| using namespace skiagm; |
| @@ -254,23 +251,19 @@ |
| // of this type. |
| void RecordError(ErrorBitfield errorType, const SkString& name, |
| const char renderModeDescriptor []) { |
| - bool isPixelError; |
| - switch (errorType) { |
| - case ERROR_NONE: |
| + // The common case: no error means nothing to record. |
| + if (kEmptyErrorBitfield == errorType) { |
| return; |
| - case ERROR_READING_REFERENCE_IMAGE: |
| + } |
| + |
| + // If only certain error type(s) were reported, we know we can ignore them. |
| + if (errorType == (errorType & kIgnorable_ErrorBitmask)) { |
|
borenet
2013/03/07 20:10:28
I like this, because it allows us to have multiple
|
| return; |
| - case ERROR_IMAGE_MISMATCH: |
| - isPixelError = true; |
| - break; |
| - default: |
| - isPixelError = false; |
| - break; |
| } |
| FailRec& rec = fFailedTests.push_back(make_name( |
| name.c_str(), renderModeDescriptor)); |
| - rec.fIsPixelError = isPixelError; |
| + rec.fIsPixelError = (errorType & kImageMismatch_ErrorBitmask); |
| } |
| // List contents of fFailedTests via SkDebug. |
| @@ -410,7 +403,7 @@ |
| #if SK_SUPPORT_GPU |
| else { // GPU |
| if (NULL == context) { |
| - return ERROR_NO_GPU_CONTEXT; |
| + return kNoGpuContext_ErrorBitmask; |
| } |
| SkAutoTUnref<SkDevice> device(new SkGpuDevice(context, rt)); |
| if (deferred) { |
| @@ -428,7 +421,7 @@ |
| } |
| #endif |
| complete_bitmap(bitmap); |
| - return ERROR_NONE; |
| + return kEmptyErrorBitfield; |
| } |
| static void generate_image_from_picture(GM* gm, const ConfigData& gRec, |
| @@ -522,12 +515,12 @@ |
| success = write_document(path, *document); |
| } |
| if (success) { |
| - return ERROR_NONE; |
| + return kEmptyErrorBitfield; |
| } else { |
| fprintf(stderr, "FAILED to write %s\n", path.c_str()); |
| - RecordError(ERROR_WRITING_REFERENCE_IMAGE, name, |
| + RecordError(kWritingReferenceImage_ErrorBitmask, name, |
| renderModeDescriptor); |
| - return ERROR_WRITING_REFERENCE_IMAGE; |
| + return kWritingReferenceImage_ErrorBitmask; |
| } |
| } |
| @@ -587,8 +580,9 @@ |
| } |
| /** |
| - * Compares actual checksum to expectations. |
| - * Returns ERROR_NONE if they match, or some particular error code otherwise |
| + * Compares actual checksum to expectations. Returns |
| + * kEmptyErrorBitfield if they match, or some combination of |
| + * _ErrorBitmask values otherwise. |
| * |
| * If fMismatchPath has been set, and there are pixel diffs, then the |
| * actual bitmap will be written out to a file within fMismatchPath. |
| @@ -617,11 +611,11 @@ |
| const char* completeName = completeNameString.c_str(); |
| if (expectations.empty()) { |
| - retval = ERROR_READING_REFERENCE_IMAGE; |
| + retval = kReadingReferenceImage_ErrorBitmask; |
|
borenet
2013/03/07 20:10:28
So kReadingReferenceImage_ErrorBitmask occurs for
epoger
2013/03/07 20:24:20
Good thoughts, thanks... I changed it to kMissingE
borenet
2013/03/07 20:27:44
LGTM!
|
| } else if (expectations.match(actualChecksum)) { |
| - retval = ERROR_NONE; |
| + retval = kEmptyErrorBitfield; |
| } else { |
| - retval = ERROR_IMAGE_MISMATCH; |
| + retval = kImageMismatch_ErrorBitmask; |
| // Write out the "actuals" for any mismatches, if we have |
| // been directed to do so. |
| @@ -662,7 +656,7 @@ |
| Json::Value actualResults; |
| actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] = |
| asJsonValue(actualChecksum); |
| - if (ERROR_NONE == result) { |
| + if (kEmptyErrorBitfield == result) { |
| this->fJsonActualResults_Succeeded[testName] = actualResults; |
| } else { |
| if (ignoreFailure) { |
| @@ -670,13 +664,12 @@ |
| // actual results against expectations in a JSON file |
| // (where we can set ignore-failure to either true or |
| // false), add test cases that exercise ignored |
| - // failures (both for ERROR_READING_REFERENCE_IMAGE |
| - // and ERROR_IMAGE_MISMATCH). |
| + // failures (both for kReadingReferenceImage_ErrorBitmask |
| + // and kImageMismatch_ErrorBitmask). |
| this->fJsonActualResults_FailureIgnored[testName] = |
| actualResults; |
| } else { |
| - switch(result) { |
| - case ERROR_READING_REFERENCE_IMAGE: |
| + if (result & kReadingReferenceImage_ErrorBitmask) { |
| // TODO: What about the case where there IS an |
| // expected image checksum, but that gm test |
| // doesn't actually run? For now, those cases |
| @@ -690,15 +683,9 @@ |
| // is given but the test is never run). |
| this->fJsonActualResults_NoComparison[testName] = |
| actualResults; |
| - break; |
| - case ERROR_IMAGE_MISMATCH: |
| + } |
| + if (result & kImageMismatch_ErrorBitmask) { |
| this->fJsonActualResults_Failed[testName] = actualResults; |
| - break; |
| - default: |
| - fprintf(stderr, "encountered unexpected result %d\n", |
| - result); |
| - SkDEBUGFAIL("encountered unexpected result"); |
| - break; |
| } |
| } |
| } |
| @@ -735,7 +722,7 @@ |
| SkBitmap& actualBitmap, SkDynamicMemoryWStream* pdf) { |
| SkString name = make_name(gm->shortName(), gRec.fName); |
| - ErrorBitfield retval = ERROR_NONE; |
| + ErrorBitfield retval = kEmptyErrorBitfield; |
| ExpectationsSource *expectationsSource = |
| this->fExpectationsSource.get(); |
| @@ -762,7 +749,7 @@ |
| Checksum actualChecksum = |
| SkBitmapChecksummer::Compute64(actualBitmap); |
| add_actual_results_to_json_summary(name.c_str(), actualChecksum, |
| - ERROR_READING_REFERENCE_IMAGE, |
| + kReadingReferenceImage_ErrorBitmask, |
| false); |
| } |
| @@ -862,7 +849,7 @@ |
| // Early exit if we can't generate the image. |
| ErrorBitfield errors = generate_image(gm, gRec, context, rt, bitmap, |
| false); |
| - if (ERROR_NONE != errors) { |
| + if (kEmptyErrorBitfield != errors) { |
| // TODO: Add a test to exercise what the stdout and |
| // JSON look like if we get an "early error" while |
| // trying to generate the image. |
| @@ -895,18 +882,18 @@ |
| // Early exit if we can't generate the image, but this is |
| // expected in some cases, so don't report a test failure. |
| if (!generate_image(gm, gRec, context, rt, &bitmap, true)) { |
| - return ERROR_NONE; |
| + return kEmptyErrorBitfield; |
| } |
| return compare_test_results_to_reference_bitmap( |
| gm, gRec, "-deferred", bitmap, &referenceBitmap); |
| } |
| - return ERROR_NONE; |
| + return kEmptyErrorBitfield; |
| } |
| ErrorBitfield test_pipe_playback(GM* gm, |
| const ConfigData& gRec, |
| const SkBitmap& referenceBitmap) { |
| - ErrorBitfield errors = ERROR_NONE; |
| + ErrorBitfield errors = kEmptyErrorBitfield; |
| for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { |
| SkBitmap bitmap; |
| SkISize size = gm->getISize(); |
| @@ -923,7 +910,7 @@ |
| string.append(gPipeWritingFlagCombos[i].name); |
| errors |= compare_test_results_to_reference_bitmap( |
| gm, gRec, string.c_str(), bitmap, &referenceBitmap); |
| - if (errors != ERROR_NONE) { |
| + if (errors != kEmptyErrorBitfield) { |
| break; |
| } |
| } |
| @@ -932,7 +919,7 @@ |
| ErrorBitfield test_tiled_pipe_playback( |
| GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap) { |
| - ErrorBitfield errors = ERROR_NONE; |
| + ErrorBitfield errors = kEmptyErrorBitfield; |
| for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) { |
| SkBitmap bitmap; |
| SkISize size = gm->getISize(); |
| @@ -949,7 +936,7 @@ |
| string.append(gPipeWritingFlagCombos[i].name); |
| errors |= compare_test_results_to_reference_bitmap( |
| gm, gRec, string.c_str(), bitmap, &referenceBitmap); |
| - if (errors != ERROR_NONE) { |
| + if (errors != kEmptyErrorBitfield) { |
| break; |
| } |
| } |
| @@ -1464,7 +1451,7 @@ |
| SkDebugf("%sdrawing... %s [%d %d]\n", moduloStr.c_str(), shortName, |
| size.width(), size.height()); |
| - ErrorBitfield testErrors = ERROR_NONE; |
| + ErrorBitfield testErrors = kEmptyErrorBitfield; |
| uint32_t gmFlags = gm->getFlags(); |
| for (int i = 0; i < configs.count(); i++) { |
| @@ -1484,12 +1471,12 @@ |
| // Now we know that we want to run this test and record its |
| // success or failure. |
| - ErrorBitfield renderErrors = ERROR_NONE; |
| + ErrorBitfield renderErrors = kEmptyErrorBitfield; |
| GrRenderTarget* renderTarget = NULL; |
| #if SK_SUPPORT_GPU |
| SkAutoTUnref<GrRenderTarget> rt; |
| AutoResetGr autogr; |
| - if ((ERROR_NONE == renderErrors) && |
| + if ((kEmptyErrorBitfield == renderErrors) && |
| kGPU_Backend == config.fBackend) { |
| GrContext* gr = grFactory->get(config.fGLContextType); |
| bool grSuccess = false; |
| @@ -1512,14 +1499,14 @@ |
| } |
| } |
| if (!grSuccess) { |
| - renderErrors |= ERROR_NO_GPU_CONTEXT; |
| + renderErrors |= kNoGpuContext_ErrorBitmask; |
| } |
| } |
| #endif |
| SkBitmap comparisonBitmap; |
| - if (ERROR_NONE == renderErrors) { |
| + if (kEmptyErrorBitfield == renderErrors) { |
| renderErrors |= gmmain.test_drawing(gm, config, writePath, |
| GetGr(), |
| renderTarget, |
| @@ -1546,13 +1533,13 @@ |
| // run the picture centric GM steps |
| if (!(gmFlags & GM::kSkipPicture_Flag)) { |
| - ErrorBitfield pictErrors = ERROR_NONE; |
| + ErrorBitfield pictErrors = kEmptyErrorBitfield; |
| //SkAutoTUnref<SkPicture> pict(generate_new_picture(gm)); |
| SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0); |
| SkAutoUnref aur(pict); |
| - if ((ERROR_NONE == testErrors) && doReplay) { |
| + if ((kEmptyErrorBitfield == testErrors) && doReplay) { |
| SkBitmap bitmap; |
| gmmain.generate_image_from_picture(gm, compareConfig, pict, |
| &bitmap); |
| @@ -1560,8 +1547,8 @@ |
| gm, compareConfig, "-replay", bitmap, &comparisonBitmap); |
| } |
| - if ((ERROR_NONE == testErrors) && |
| - (ERROR_NONE == pictErrors) && |
| + if ((kEmptyErrorBitfield == testErrors) && |
| + (kEmptyErrorBitfield == pictErrors) && |
| doSerialize) { |
| SkPicture* repict = gmmain.stream_to_new_picture(*pict); |
| SkAutoUnref aurr(repict); |
| @@ -1629,15 +1616,15 @@ |
| // run the pipe centric GM steps |
| if (!(gmFlags & GM::kSkipPipe_Flag)) { |
| - ErrorBitfield pipeErrors = ERROR_NONE; |
| + ErrorBitfield pipeErrors = kEmptyErrorBitfield; |
| - if ((ERROR_NONE == testErrors) && doPipe) { |
| + if ((kEmptyErrorBitfield == testErrors) && doPipe) { |
| pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig, |
| comparisonBitmap); |
| } |
| - if ((ERROR_NONE == testErrors) && |
| - (ERROR_NONE == pipeErrors) && |
| + if ((kEmptyErrorBitfield == testErrors) && |
| + (kEmptyErrorBitfield == pipeErrors) && |
| doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) { |
| pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig, |
| comparisonBitmap); |
| @@ -1652,10 +1639,10 @@ |
| // want to also tabulate other error types, we can do so. |
| testsRun++; |
| if (!gmmain.fExpectationsSource.get() || |
| - (ERROR_READING_REFERENCE_IMAGE & testErrors)) { |
| + (kReadingReferenceImage_ErrorBitmask & testErrors)) { |
| testsMissingReferenceImages++; |
| } |
| - if (ERROR_NONE == testErrors || ERROR_READING_REFERENCE_IMAGE == testErrors) { |
| + if (testErrors == (testErrors & kIgnorable_ErrorBitmask)) { |
| testsPassed++; |
| } else { |
| testsFailed++; |