Index: gm/gmmain.cpp |
=================================================================== |
--- gm/gmmain.cpp (revision 9724) |
+++ gm/gmmain.cpp (working copy) |
@@ -193,7 +193,8 @@ |
public: |
GMMain() : fUseFileHierarchy(false), fWriteChecksumBasedFilenames(false), |
fIgnorableErrorTypes(kDefaultIgnorableErrorTypes), |
- fMismatchPath(NULL), fTestsRun(0), fRenderModesEncountered(1) {} |
+ fMismatchPath(NULL), fMissingExpectationsPath(NULL), fTestsRun(0), |
epoger
2013/06/21 17:46:44
Patchset 2 adds the new missingExpectationsPath fl
borenet
2013/06/21 18:01:14
So, we put the actuals for GMs with no expectation
epoger
2013/06/21 18:06:46
As you'll see in https://codereview.chromium.org/1
borenet
2013/06/21 18:14:09
I think that's fine, I just can't think of a case
|
+ fRenderModesEncountered(1) {} |
/** |
* Assemble shortNamePlusConfig from (surprise!) shortName and configName. |
@@ -767,6 +768,7 @@ |
* |
* If fMismatchPath has been set, and there are pixel diffs, then the |
* actual bitmap will be written out to a file within fMismatchPath. |
+ * And similarly for fMissingExpectationsPath... |
* |
* @param expectations what expectations to compare actualBitmap against |
* @param actualBitmapAndDigest the SkBitmap we actually generated, and its GmResultDigest |
@@ -795,6 +797,16 @@ |
if (expectations.empty()) { |
errors.add(kMissingExpectations_ErrorType); |
+ |
+ // Write out the "actuals" for any tests without expectations, if we have |
+ // been directed to do so. |
+ if (fMissingExpectationsPath) { |
+ SkString path = make_bitmap_filename(fMissingExpectationsPath, shortName, |
+ configName, renderModeDescriptor, |
+ actualBitmapAndDigest.fDigest); |
+ write_bitmap(path, actualBitmapAndDigest.fBitmap); |
+ } |
+ |
} else if (!expectations.match(actualBitmapAndDigest.fDigest)) { |
addToJsonSummary = true; |
// The error mode we record depends on whether this was running |
@@ -1178,6 +1190,7 @@ |
ErrorCombination fIgnorableErrorTypes; |
const char* fMismatchPath; |
+ const char* fMissingExpectationsPath; |
// collection of tests that have failed with each ErrorType |
SkTArray<SkString> fFailedTests[kLast_ErrorType+1]; |
@@ -1317,6 +1330,8 @@ |
"^ and $ requires an exact match\n" |
"If a test does not match any list entry,\n" |
"it is skipped unless some list entry starts with ~"); |
+DEFINE_string(missingExpectationsPath, "", "Write images for tests without expectations " |
+ "into this directory."); |
DEFINE_string(mismatchPath, "", "Write images for tests that failed due to " |
"pixel mismatches into this directory."); |
DEFINE_string(modulo, "", "[--modulo <remainder> <divisor>]: only run tests for which " |
@@ -1799,6 +1814,9 @@ |
if (FLAGS_mismatchPath.count() == 1) { |
gmmain.fMismatchPath = FLAGS_mismatchPath[0]; |
} |
+ if (FLAGS_missingExpectationsPath.count() == 1) { |
+ gmmain.fMissingExpectationsPath = FLAGS_missingExpectationsPath[0]; |
+ } |
for (int i = 0; i < FLAGS_config.count(); i++) { |
const char* config = FLAGS_config[i]; |
@@ -1987,6 +2005,10 @@ |
if (NULL != gmmain.fMismatchPath) { |
gm_fprintf(stdout, "writing mismatches to %s\n", gmmain.fMismatchPath); |
} |
+ if (NULL != gmmain.fMissingExpectationsPath) { |
+ gm_fprintf(stdout, "writing images without expectations to %s\n", |
+ gmmain.fMissingExpectationsPath); |
+ } |
if (FLAGS_writePicturePath.count() == 1) { |
gm_fprintf(stdout, "writing pictures to %s\n", FLAGS_writePicturePath[0]); |
} |
@@ -2017,6 +2039,12 @@ |
return -1; |
} |
} |
+ if (NULL != gmmain.fMissingExpectationsPath) { |
+ if (!prepare_subdirectories(gmmain.fMissingExpectationsPath, gmmain.fUseFileHierarchy, |
+ configs)) { |
+ return -1; |
+ } |
+ } |
if (FLAGS_pdfJpegQuality < -1 || FLAGS_pdfJpegQuality > 100) { |
gm_fprintf(stderr, "%s\n", "pdfJpegQuality must be in [-1 .. 100] range."); |