Index: dm/DMSrcSink.cpp |
diff --git a/dm/DMSrcSink.cpp b/dm/DMSrcSink.cpp |
index e27628ad043aeedc9676dfd1a00187c843ad2fa8..73b076fe18a6f77b333fe3d4205f3115ef58bbfd 100644 |
--- a/dm/DMSrcSink.cpp |
+++ b/dm/DMSrcSink.cpp |
@@ -240,9 +240,16 @@ CodecSrc::CodecSrc(Path path, Mode mode, DstColorType dstColorType, float scale) |
{} |
bool CodecSrc::veto(SinkFlags flags) const { |
+ if (kYUV_Mode == fMode) { |
+ // YUV mode does not support scaling or non-canvas color types. |
+ if (CodecSrc::kGetFromCanvas_DstColorType != fDstColorType || 1.0f != fScale) { |
scroggo
2016/01/13 21:04:54
For the DstColorType, can we just limit creating C
msarett
2016/01/15 18:57:34
Done.
|
+ return true; |
+ } |
+ |
+ return flags.type != SinkFlags::kGPU; |
+ } |
+ |
// No need to test decoding to non-raster or indirect backend. |
- // TODO: Once we implement GPU paths (e.g. JPEG YUV), we should use a deferred decode to |
- // let the GPU handle it. |
return flags.type != SinkFlags::kRaster |
|| flags.approach != SinkFlags::kDirect; |
} |
@@ -274,6 +281,36 @@ bool get_decode_info(SkImageInfo* decodeInfo, const SkImageInfo& defaultInfo, |
return true; |
} |
+Error test_yuv(SkCanvas* canvas, SkCodec* codec) { |
+ SkAutoTDelete<SkCodec> deleter(codec); |
+ |
+ SkCodec::YUVPlanesSizes sizes; |
+ SkCodec::YUVPlanesWidthBytes widthBytes; |
+ SkYUVColorSpace colorSpace; |
+ if (!codec->queryYUV8(&sizes, &widthBytes, &colorSpace)) { |
+ return Error::Nonfatal("YUV not supported."); |
+ } |
+ |
+ const size_t totalBytes = sizes.YSize.height() * widthBytes.YWidthBytes + |
+ sizes.USize.height() * widthBytes.UWidthBytes + |
+ sizes.VSize.height() * widthBytes.VWidthBytes; |
+ SkAutoMalloc storage(totalBytes); |
+ void* planes[3]; |
+ planes[0] = storage.get(); |
+ planes[1] = SkTAddOffset<void>(planes[0], sizes.YSize.height() * widthBytes.YWidthBytes); |
+ planes[2] = SkTAddOffset<void>(planes[1], sizes.USize.height() * widthBytes.UWidthBytes); |
+ |
+ switch (codec->getYUV8Planes(&sizes, planes, &widthBytes)) { |
+ case SkCodec::kSuccess: |
+ case SkCodec::kIncompleteInput: { |
+ /* How do we draw this to canvas? */ |
+ return ""; |
+ } |
+ default: |
+ return SkStringPrintf("Couldn't getYUV8Planes."); |
+ } |
+} |
+ |
Error CodecSrc::draw(SkCanvas* canvas) const { |
SkAutoTUnref<SkData> encoded(SkData::NewFromFileName(fPath.c_str())); |
if (!encoded) { |
@@ -284,6 +321,12 @@ Error CodecSrc::draw(SkCanvas* canvas) const { |
return SkStringPrintf("Couldn't create codec for %s.", fPath.c_str()); |
} |
+ // The YUV test does not share much code with the other tests, so we will handle |
+ // it in its own function. |
+ if (kYUV_Mode == fMode) { |
+ return test_yuv(canvas, codec.detach()); |
scroggo
2016/01/13 21:04:54
Alternatively, you could let this method delete th
msarett
2016/01/15 18:57:35
Done.
|
+ } |
+ |
SkImageInfo decodeInfo; |
if (!get_decode_info(&decodeInfo, codec->getInfo(), canvas->imageInfo().colorType(), |
fDstColorType)) { |
@@ -501,6 +544,9 @@ Error CodecSrc::draw(SkCanvas* canvas) const { |
} |
return ""; |
} |
+ default: |
+ SkASSERT(false); |
+ return "Invalid fMode"; |
} |
return ""; |
} |
@@ -533,8 +579,6 @@ AndroidCodecSrc::AndroidCodecSrc(Path path, Mode mode, CodecSrc::DstColorType ds |
bool AndroidCodecSrc::veto(SinkFlags flags) const { |
// No need to test decoding to non-raster or indirect backend. |
- // TODO: Once we implement GPU paths (e.g. JPEG YUV), we should use a deferred decode to |
- // let the GPU handle it. |
return flags.type != SinkFlags::kRaster |
|| flags.approach != SinkFlags::kDirect; |
} |