Index: cc/resources/media/skcanvas_video_renderer.cc |
diff --git a/cc/resources/media/skcanvas_video_renderer.cc b/cc/resources/media/skcanvas_video_renderer.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..3a89a98576344ffb9d8657af78fc7e321283a84b |
--- /dev/null |
+++ b/cc/resources/media/skcanvas_video_renderer.cc |
@@ -0,0 +1,380 @@ |
+// Copyright (c) 2012 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include "cc/resources/media/skcanvas_video_renderer.h" |
+ |
+#include "base/logging.h" |
+#include "gpu/GLES2/gl2extchromium.h" |
+#include "gpu/command_buffer/client/gles2_interface.h" |
+#include "gpu/command_buffer/common/mailbox_holder.h" |
+#include "media/base/video_frame.h" |
+#include "media/base/yuv_convert.h" |
+#include "skia/ext/refptr.h" |
+#include "third_party/libyuv/include/libyuv.h" |
+#include "third_party/skia/include/core/SkCanvas.h" |
+#include "third_party/skia/include/gpu/GrContext.h" |
+#include "third_party/skia/include/gpu/SkGrPixelRef.h" |
+ |
+// Skia internal format depends on a platform. On Android it is ABGR, on others |
+// it is ARGB. |
+#if SK_B32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_R32_SHIFT == 16 && \ |
+ SK_A32_SHIFT == 24 |
+#define LIBYUV_I420_TO_ARGB libyuv::I420ToARGB |
+#define LIBYUV_I422_TO_ARGB libyuv::I422ToARGB |
+#elif SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ |
+ SK_A32_SHIFT == 24 |
+#define LIBYUV_I420_TO_ARGB libyuv::I420ToABGR |
+#define LIBYUV_I422_TO_ARGB libyuv::I422ToABGR |
dshwang
2014/08/06 14:10:59
Now cc depends on thirdparty/libyuv directly.
Howe
|
+#else |
+#error Unexpected Skia ARGB_8888 layout! |
+#endif |
+ |
+namespace cc { |
+ |
+namespace { |
+ |
+bool IsYUV(media::VideoFrame::Format format) { |
+ return format == media::VideoFrame::YV12 || |
+ format == media::VideoFrame::YV16 || |
+ format == media::VideoFrame::I420 || |
+ format == media::VideoFrame::YV12A || |
+ format == media::VideoFrame::YV12J || |
+ format == media::VideoFrame::YV24; |
+} |
+ |
+bool IsYUVOrNative(media::VideoFrame::Format format) { |
+ return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE; |
+} |
+ |
+// Converts a VideoFrame containing YUV data to a SkBitmap containing RGB data. |
+// |
+// |bitmap| will be (re)allocated to match the dimensions of |video_frame|. |
+void ConvertVideoFrameToBitmap(media::VideoFrame* video_frame, |
+ SkBitmap* bitmap) { |
+ DCHECK(IsYUVOrNative(video_frame->format())) << video_frame->format(); |
+ if (IsYUV(video_frame->format())) { |
+ DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), |
+ video_frame->stride(media::VideoFrame::kVPlane)); |
+ } |
+ |
+ // Check if |bitmap| needs to be (re)allocated. |
+ if (bitmap->isNull() || |
+ bitmap->width() != video_frame->visible_rect().width() || |
+ bitmap->height() != video_frame->visible_rect().height()) { |
+ bitmap->allocN32Pixels(video_frame->visible_rect().width(), |
+ video_frame->visible_rect().height()); |
+ bitmap->setIsVolatile(true); |
+ } |
+ |
+ bitmap->lockPixels(); |
+ |
+ size_t y_offset = 0; |
+ size_t uv_offset = 0; |
+ if (IsYUV(video_frame->format())) { |
+ int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1; |
+ // Use the "left" and "top" of the destination rect to locate the offset |
+ // in Y, U and V planes. |
+ y_offset = (video_frame->stride(media::VideoFrame::kYPlane) * |
+ video_frame->visible_rect().y()) + |
+ video_frame->visible_rect().x(); |
+ // For format YV12, there is one U, V value per 2x2 block. |
+ // For format YV16, there is one U, V value per 2x1 block. |
+ uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) * |
+ (video_frame->visible_rect().y() >> y_shift)) + |
+ (video_frame->visible_rect().x() >> 1); |
+ } |
+ |
+ switch (video_frame->format()) { |
+ case media::VideoFrame::YV12: |
+ case media::VideoFrame::I420: |
+ LIBYUV_I420_TO_ARGB( |
+ video_frame->data(media::VideoFrame::kYPlane) + y_offset, |
+ video_frame->stride(media::VideoFrame::kYPlane), |
+ video_frame->data(media::VideoFrame::kUPlane) + uv_offset, |
+ video_frame->stride(media::VideoFrame::kUPlane), |
+ video_frame->data(media::VideoFrame::kVPlane) + uv_offset, |
+ video_frame->stride(media::VideoFrame::kVPlane), |
+ static_cast<uint8*>(bitmap->getPixels()), |
+ bitmap->rowBytes(), |
+ video_frame->visible_rect().width(), |
+ video_frame->visible_rect().height()); |
+ break; |
+ |
+ case media::VideoFrame::YV12J: |
+ media::ConvertYUVToRGB32( |
+ video_frame->data(media::VideoFrame::kYPlane) + y_offset, |
+ video_frame->data(media::VideoFrame::kUPlane) + uv_offset, |
+ video_frame->data(media::VideoFrame::kVPlane) + uv_offset, |
+ static_cast<uint8*>(bitmap->getPixels()), |
+ video_frame->visible_rect().width(), |
+ video_frame->visible_rect().height(), |
+ video_frame->stride(media::VideoFrame::kYPlane), |
+ video_frame->stride(media::VideoFrame::kUPlane), |
+ bitmap->rowBytes(), |
+ media::YV12J); |
+ break; |
+ |
+ case media::VideoFrame::YV16: |
+ LIBYUV_I422_TO_ARGB( |
+ video_frame->data(media::VideoFrame::kYPlane) + y_offset, |
+ video_frame->stride(media::VideoFrame::kYPlane), |
+ video_frame->data(media::VideoFrame::kUPlane) + uv_offset, |
+ video_frame->stride(media::VideoFrame::kUPlane), |
+ video_frame->data(media::VideoFrame::kVPlane) + uv_offset, |
+ video_frame->stride(media::VideoFrame::kVPlane), |
+ static_cast<uint8*>(bitmap->getPixels()), |
+ bitmap->rowBytes(), |
+ video_frame->visible_rect().width(), |
+ video_frame->visible_rect().height()); |
+ break; |
+ |
+ case media::VideoFrame::YV12A: |
+ // Since libyuv doesn't support YUVA, fallback to media, which is not ARM |
+ // optimized. |
+ // TODO(fbarchard, mtomasz): Use libyuv, then copy the alpha channel. |
+ media::ConvertYUVAToARGB( |
+ video_frame->data(media::VideoFrame::kYPlane) + y_offset, |
+ video_frame->data(media::VideoFrame::kUPlane) + uv_offset, |
+ video_frame->data(media::VideoFrame::kVPlane) + uv_offset, |
+ video_frame->data(media::VideoFrame::kAPlane), |
+ static_cast<uint8*>(bitmap->getPixels()), |
+ video_frame->visible_rect().width(), |
+ video_frame->visible_rect().height(), |
+ video_frame->stride(media::VideoFrame::kYPlane), |
+ video_frame->stride(media::VideoFrame::kUPlane), |
+ video_frame->stride(media::VideoFrame::kAPlane), |
+ bitmap->rowBytes(), |
+ media::YV12); |
+ break; |
+ |
+ case media::VideoFrame::YV24: |
+ libyuv::I444ToARGB( |
+ video_frame->data(media::VideoFrame::kYPlane) + y_offset, |
+ video_frame->stride(media::VideoFrame::kYPlane), |
+ video_frame->data(media::VideoFrame::kUPlane) + uv_offset, |
+ video_frame->stride(media::VideoFrame::kUPlane), |
+ video_frame->data(media::VideoFrame::kVPlane) + uv_offset, |
+ video_frame->stride(media::VideoFrame::kVPlane), |
+ static_cast<uint8*>(bitmap->getPixels()), |
+ bitmap->rowBytes(), |
+ video_frame->visible_rect().width(), |
+ video_frame->visible_rect().height()); |
+#if SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ |
+ SK_A32_SHIFT == 24 |
+ libyuv::ARGBToABGR(static_cast<uint8*>(bitmap->getPixels()), |
+ bitmap->rowBytes(), |
+ static_cast<uint8*>(bitmap->getPixels()), |
+ bitmap->rowBytes(), |
+ video_frame->visible_rect().width(), |
+ video_frame->visible_rect().height()); |
+#endif |
+ break; |
+ |
+ case media::VideoFrame::NATIVE_TEXTURE: |
+ DCHECK_EQ(video_frame->format(), media::VideoFrame::NATIVE_TEXTURE); |
+ video_frame->ReadPixelsFromNativeTexture(*bitmap); |
+ break; |
+ |
+ default: |
+ NOTREACHED(); |
+ break; |
+ } |
+ bitmap->notifyPixelsChanged(); |
+ bitmap->unlockPixels(); |
+} |
+ |
+bool EnsureTextureBackedSkBitmap(GrContext* gr, |
+ SkBitmap* bitmap, |
+ const gfx::Size& size, |
+ GrSurfaceOrigin origin, |
+ GrPixelConfig config) { |
+ if (!bitmap->getTexture() || bitmap->width() != size.width() || |
+ bitmap->height() != size.height()) { |
+ if (!gr) |
+ return false; |
+ GrTextureDesc desc; |
+ desc.fConfig = config; |
+ desc.fFlags = kRenderTarget_GrTextureFlagBit | kNoStencil_GrTextureFlagBit; |
+ desc.fSampleCnt = 0; |
+ desc.fOrigin = origin; |
+ desc.fWidth = size.width(); |
+ desc.fHeight = size.height(); |
+ skia::RefPtr<GrTexture> texture; |
+ texture = skia::AdoptRef(gr->createUncachedTexture(desc, 0, 0)); |
+ if (!texture.get()) |
+ return false; |
+ |
+ SkImageInfo info = SkImageInfo::MakeN32Premul(desc.fWidth, desc.fHeight); |
+ SkGrPixelRef* pixelRef = SkNEW_ARGS(SkGrPixelRef, (info, texture.get())); |
+ if (!pixelRef) |
+ return false; |
+ bitmap->setInfo(info); |
+ bitmap->setPixelRef(pixelRef)->unref(); |
+ } |
+ |
+ return true; |
+} |
+ |
+bool ConvertVideoFrameToTexture(media::VideoFrame* video_frame, |
+ SkBitmap* bitmap, |
+ ContextProvider* context_provider) { |
+ DCHECK(context_provider && |
+ video_frame->format() == media::VideoFrame::NATIVE_TEXTURE); |
+ gpu::gles2::GLES2Interface* gl = context_provider->ContextGL(); |
+ DCHECK(gl); |
+ |
+ // Check if we could reuse existing texture based bitmap. |
+ // Otherwise, release existing texture based bitmap and allocate |
+ // a new one based on video size. |
+ if (!EnsureTextureBackedSkBitmap(context_provider->GrContext(), |
+ bitmap, |
+ video_frame->visible_rect().size(), |
+ kTopLeft_GrSurfaceOrigin, |
+ kSkia8888_GrPixelConfig)) { |
+ return false; |
+ } |
+ |
+ unsigned textureId = |
+ static_cast<unsigned>((bitmap->getTexture())->getTextureHandle()); |
+ SkCanvasVideoRenderer::CopyVideoFrameToTexture( |
+ gl, video_frame, textureId, 0, GL_RGBA, GL_UNSIGNED_BYTE, true, false); |
+ return true; |
+} |
+ |
+class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { |
+ public: |
+ explicit SyncPointClientImpl(gpu::gles2::GLES2Interface* gl) : gl_(gl) {} |
+ virtual ~SyncPointClientImpl() {} |
+ virtual uint32 InsertSyncPoint() OVERRIDE { |
+ return gl_->InsertSyncPointCHROMIUM(); |
+ } |
+ virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { |
+ gl_->WaitSyncPointCHROMIUM(sync_point); |
+ } |
+ |
+ private: |
+ gpu::gles2::GLES2Interface* gl_; |
+}; |
+ |
+} // anonymous namespace |
+ |
+SkCanvasVideoRenderer::SkCanvasVideoRenderer() |
+ : last_frame_timestamp_(media::kNoTimestamp()), |
+ accelerated_last_frame_timestamp_(media::kNoTimestamp()) { |
+} |
+ |
+SkCanvasVideoRenderer::~SkCanvasVideoRenderer() { |
+} |
+ |
+void SkCanvasVideoRenderer::Paint(media::VideoFrame* video_frame, |
+ SkCanvas* canvas, |
+ const gfx::RectF& dest_rect, |
+ uint8 alpha, |
+ ContextProvider* context_provider) { |
+ if (alpha == 0) { |
+ return; |
+ } |
+ |
+ SkRect dest; |
+ dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom()); |
+ |
+ SkPaint paint; |
+ paint.setAlpha(alpha); |
+ |
+ // Paint black rectangle if there isn't a frame available or the |
+ // frame has an unexpected format. |
+ if (!video_frame || video_frame->natural_size().IsEmpty() || |
+ !IsYUVOrNative(video_frame->format())) { |
+ canvas->drawRect(dest, paint); |
+ return; |
+ } |
+ |
+ bool accelerated = false; |
+ if (context_provider && |
+ video_frame->format() == media::VideoFrame::NATIVE_TEXTURE && |
+ canvas->getGrContext()) { |
+ // TODO(dshwang): Android video decoder doesn't update the timestamp on a |
+ // VideoFrame. To reduce redundant copy, Android should update the |
+ // timestamp. |
+ if (video_frame->timestamp() != accelerated_last_frame_timestamp_ || |
+ video_frame->timestamp() == base::TimeDelta()) { |
+ accelerated = ConvertVideoFrameToTexture( |
+ video_frame, &accelerated_last_frame_, context_provider); |
+ if (accelerated) { |
+ accelerated_last_frame_timestamp_ = video_frame->timestamp(); |
+ } |
+ } else { |
+ DCHECK(accelerated_last_frame_.getTexture()); |
+ accelerated = true; |
+ } |
+ } |
+ |
+ // Check if we should convert and update |last_frame_|. |
+ if (!accelerated && video_frame->timestamp() != last_frame_timestamp_) { |
+ ConvertVideoFrameToBitmap(video_frame, &last_frame_); |
+ last_frame_timestamp_ = video_frame->timestamp(); |
+ } |
+ |
+ canvas->drawBitmapRect( |
+ accelerated ? accelerated_last_frame_ : last_frame_, NULL, dest, &paint); |
+ |
+ CleanUpTemporaryBuffers(); |
+} |
+ |
+// If a buffer is not used by 3 sec, remove it. |
+void SkCanvasVideoRenderer::CleanUpTemporaryBuffers() { |
+ static const base::TimeDelta buffer_time = base::TimeDelta::FromSeconds(3); |
+ base::TimeDelta last_timestamp = |
+ accelerated_last_frame_timestamp_ > last_frame_timestamp_ |
+ ? accelerated_last_frame_timestamp_ |
+ : last_frame_timestamp_; |
+ if (last_timestamp > last_frame_timestamp_ + buffer_time && |
+ !last_frame_.isNull()) |
+ last_frame_.reset(); |
+ if (last_timestamp > accelerated_last_frame_timestamp_ + buffer_time && |
+ !accelerated_last_frame_.isNull()) |
+ accelerated_last_frame_.reset(); |
+} |
+ |
+// static |
+void SkCanvasVideoRenderer::CopyVideoFrameToTexture( |
+ gpu::gles2::GLES2Interface* gl, |
+ media::VideoFrame* video_frame, |
+ unsigned int texture, |
+ unsigned int level, |
+ unsigned int internal_format, |
+ unsigned int type, |
+ bool premultiply_alpha, |
+ bool flip_y) { |
+ DCHECK(video_frame && |
+ video_frame->format() == media::VideoFrame::NATIVE_TEXTURE); |
+ const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); |
+ DCHECK(mailbox_holder->texture_target == GL_TEXTURE_2D || |
+ mailbox_holder->texture_target == GL_TEXTURE_EXTERNAL_OES); |
+ |
+ gl->WaitSyncPointCHROMIUM(mailbox_holder->sync_point); |
+ uint32 source_texture = gl->CreateAndConsumeTextureCHROMIUM( |
+ mailbox_holder->texture_target, mailbox_holder->mailbox.name); |
+ |
+ // The video is stored in a unmultiplied format, so premultiply |
+ // if necessary. |
+ gl->PixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, premultiply_alpha); |
+ // Application itself needs to take care of setting the right flip_y |
+ // value down to get the expected result. |
+ // flip_y==true means to reverse the video orientation while |
+ // flip_y==false means to keep the intrinsic orientation. |
+ gl->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); |
+ gl->CopyTextureCHROMIUM( |
+ GL_TEXTURE_2D, source_texture, texture, level, internal_format, type); |
+ gl->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false); |
+ gl->PixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, false); |
+ |
+ gl->DeleteTextures(1, &source_texture); |
+ gl->Flush(); |
+ |
+ SyncPointClientImpl client(gl); |
+ video_frame->UpdateReleaseSyncPoint(&client); |
+} |
+ |
+} // namespace cc |