| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "media/filters/skcanvas_video_renderer.h" | |
| 6 | |
| 7 #include "gpu/GLES2/gl2extchromium.h" | |
| 8 #include "gpu/command_buffer/client/gles2_interface.h" | |
| 9 #include "gpu/command_buffer/common/mailbox_holder.h" | |
| 10 #include "media/base/video_frame.h" | |
| 11 #include "media/base/yuv_convert.h" | |
| 12 #include "skia/ext/refptr.h" | |
| 13 #include "third_party/libyuv/include/libyuv.h" | |
| 14 #include "third_party/skia/include/core/SkCanvas.h" | |
| 15 #include "third_party/skia/include/core/SkImageGenerator.h" | |
| 16 #include "third_party/skia/include/gpu/GrContext.h" | |
| 17 #include "third_party/skia/include/gpu/SkGrPixelRef.h" | |
| 18 #include "ui/gfx/skbitmap_operations.h" | |
| 19 | |
| 20 // Skia internal format depends on a platform. On Android it is ABGR, on others | |
| 21 // it is ARGB. | |
| 22 #if SK_B32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_R32_SHIFT == 16 && \ | |
| 23 SK_A32_SHIFT == 24 | |
| 24 #define LIBYUV_I420_TO_ARGB libyuv::I420ToARGB | |
| 25 #define LIBYUV_I422_TO_ARGB libyuv::I422ToARGB | |
| 26 #elif SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ | |
| 27 SK_A32_SHIFT == 24 | |
| 28 #define LIBYUV_I420_TO_ARGB libyuv::I420ToABGR | |
| 29 #define LIBYUV_I422_TO_ARGB libyuv::I422ToABGR | |
| 30 #else | |
| 31 #error Unexpected Skia ARGB_8888 layout! | |
| 32 #endif | |
| 33 | |
| 34 namespace media { | |
| 35 | |
| 36 namespace { | |
| 37 | |
| 38 // This class keeps two temporary resources; software bitmap, hardware bitmap. | |
| 39 // If both bitmap are created and then only software bitmap is updated every | |
| 40 // frame, hardware bitmap outlives until the media player dies. So we delete | |
| 41 // a temporary resource if it is not used for 3 sec. | |
| 42 const int kTemporaryResourceDeletionDelay = 3; // Seconds; | |
| 43 | |
| 44 bool IsYUV(media::VideoFrame::Format format) { | |
| 45 switch (format) { | |
| 46 case VideoFrame::YV12: | |
| 47 case VideoFrame::YV16: | |
| 48 case VideoFrame::I420: | |
| 49 case VideoFrame::YV12A: | |
| 50 case VideoFrame::YV12J: | |
| 51 case VideoFrame::YV12HD: | |
| 52 case VideoFrame::YV24: | |
| 53 case VideoFrame::NV12: | |
| 54 return true; | |
| 55 case VideoFrame::UNKNOWN: | |
| 56 case VideoFrame::NATIVE_TEXTURE: | |
| 57 #if defined(VIDEO_HOLE) | |
| 58 case VideoFrame::HOLE: | |
| 59 #endif // defined(VIDEO_HOLE) | |
| 60 case VideoFrame::ARGB: | |
| 61 return false; | |
| 62 } | |
| 63 NOTREACHED() << "Invalid videoframe format provided: " << format; | |
| 64 return false; | |
| 65 } | |
| 66 | |
| 67 bool IsJPEGColorSpace(media::VideoFrame::Format format) { | |
| 68 switch (format) { | |
| 69 case VideoFrame::YV12J: | |
| 70 return true; | |
| 71 case VideoFrame::YV12: | |
| 72 case VideoFrame::YV12HD: | |
| 73 case VideoFrame::YV16: | |
| 74 case VideoFrame::I420: | |
| 75 case VideoFrame::YV12A: | |
| 76 case VideoFrame::YV24: | |
| 77 case VideoFrame::NV12: | |
| 78 case VideoFrame::UNKNOWN: | |
| 79 case VideoFrame::NATIVE_TEXTURE: | |
| 80 #if defined(VIDEO_HOLE) | |
| 81 case VideoFrame::HOLE: | |
| 82 #endif // defined(VIDEO_HOLE) | |
| 83 case VideoFrame::ARGB: | |
| 84 return false; | |
| 85 } | |
| 86 NOTREACHED() << "Invalid videoframe format provided: " << format; | |
| 87 return false; | |
| 88 } | |
| 89 | |
| 90 bool IsYUVOrNative(media::VideoFrame::Format format) { | |
| 91 return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE; | |
| 92 } | |
| 93 | |
| 94 bool IsSkBitmapProperlySizedTexture(const SkBitmap* bitmap, | |
| 95 const gfx::Size& size) { | |
| 96 return bitmap->getTexture() && bitmap->width() == size.width() && | |
| 97 bitmap->height() == size.height(); | |
| 98 } | |
| 99 | |
| 100 bool AllocateSkBitmapTexture(GrContext* gr, | |
| 101 SkBitmap* bitmap, | |
| 102 const gfx::Size& size) { | |
| 103 DCHECK(gr); | |
| 104 GrTextureDesc desc; | |
| 105 // Use kRGBA_8888_GrPixelConfig, not kSkia8888_GrPixelConfig, to avoid | |
| 106 // RGBA to BGRA conversion. | |
| 107 desc.fConfig = kRGBA_8888_GrPixelConfig; | |
| 108 desc.fFlags = kRenderTarget_GrTextureFlagBit | kNoStencil_GrTextureFlagBit; | |
| 109 desc.fSampleCnt = 0; | |
| 110 desc.fOrigin = kTopLeft_GrSurfaceOrigin; | |
| 111 desc.fWidth = size.width(); | |
| 112 desc.fHeight = size.height(); | |
| 113 skia::RefPtr<GrTexture> texture = skia::AdoptRef( | |
| 114 gr->refScratchTexture(desc, GrContext::kExact_ScratchTexMatch)); | |
| 115 if (!texture.get()) | |
| 116 return false; | |
| 117 | |
| 118 SkImageInfo info = SkImageInfo::MakeN32Premul(desc.fWidth, desc.fHeight); | |
| 119 SkGrPixelRef* pixel_ref = SkNEW_ARGS(SkGrPixelRef, (info, texture.get())); | |
| 120 if (!pixel_ref) | |
| 121 return false; | |
| 122 bitmap->setInfo(info); | |
| 123 bitmap->setPixelRef(pixel_ref)->unref(); | |
| 124 return true; | |
| 125 } | |
| 126 | |
| 127 bool CopyVideoFrameTextureToSkBitmapTexture(VideoFrame* video_frame, | |
| 128 SkBitmap* bitmap, | |
| 129 const Context3D& context_3d) { | |
| 130 // Check if we could reuse existing texture based bitmap. | |
| 131 // Otherwise, release existing texture based bitmap and allocate | |
| 132 // a new one based on video size. | |
| 133 if (!IsSkBitmapProperlySizedTexture(bitmap, | |
| 134 video_frame->visible_rect().size())) { | |
| 135 if (!AllocateSkBitmapTexture(context_3d.gr_context, bitmap, | |
| 136 video_frame->visible_rect().size())) { | |
| 137 return false; | |
| 138 } | |
| 139 } | |
| 140 | |
| 141 unsigned texture_id = | |
| 142 static_cast<unsigned>((bitmap->getTexture())->getTextureHandle()); | |
| 143 // If CopyVideoFrameTextureToGLTexture() changes the state of the | |
| 144 // |texture_id|, it's needed to invalidate the state cached in skia, | |
| 145 // but currently the state isn't changed. | |
| 146 SkCanvasVideoRenderer::CopyVideoFrameTextureToGLTexture( | |
| 147 context_3d.gl, video_frame, texture_id, 0, GL_RGBA, GL_UNSIGNED_BYTE, | |
| 148 true, false); | |
| 149 bitmap->notifyPixelsChanged(); | |
| 150 return true; | |
| 151 } | |
| 152 | |
| 153 class SyncPointClientImpl : public VideoFrame::SyncPointClient { | |
| 154 public: | |
| 155 explicit SyncPointClientImpl(gpu::gles2::GLES2Interface* gl) : gl_(gl) {} | |
| 156 ~SyncPointClientImpl() override {} | |
| 157 uint32 InsertSyncPoint() override { return gl_->InsertSyncPointCHROMIUM(); } | |
| 158 void WaitSyncPoint(uint32 sync_point) override { | |
| 159 gl_->WaitSyncPointCHROMIUM(sync_point); | |
| 160 } | |
| 161 | |
| 162 private: | |
| 163 gpu::gles2::GLES2Interface* gl_; | |
| 164 | |
| 165 DISALLOW_IMPLICIT_CONSTRUCTORS(SyncPointClientImpl); | |
| 166 }; | |
| 167 | |
| 168 } // anonymous namespace | |
| 169 | |
| 170 // Generates an RGB image from a VideoFrame. Convert YUV to RGB plain on GPU. | |
| 171 class VideoImageGenerator : public SkImageGenerator { | |
| 172 public: | |
| 173 VideoImageGenerator(const scoped_refptr<VideoFrame>& frame) : frame_(frame) { | |
| 174 DCHECK(frame_.get()); | |
| 175 } | |
| 176 ~VideoImageGenerator() override {} | |
| 177 | |
| 178 void set_frame(const scoped_refptr<VideoFrame>& frame) { frame_ = frame; } | |
| 179 | |
| 180 protected: | |
| 181 bool onGetInfo(SkImageInfo* info) override { | |
| 182 info->fWidth = frame_->visible_rect().width(); | |
| 183 info->fHeight = frame_->visible_rect().height(); | |
| 184 info->fColorType = kN32_SkColorType; | |
| 185 info->fAlphaType = kPremul_SkAlphaType; | |
| 186 return true; | |
| 187 } | |
| 188 | |
| 189 bool onGetPixels(const SkImageInfo& info, | |
| 190 void* pixels, | |
| 191 size_t row_bytes, | |
| 192 SkPMColor ctable[], | |
| 193 int* ctable_count) override { | |
| 194 if (!frame_.get()) | |
| 195 return false; | |
| 196 if (!pixels) | |
| 197 return false; | |
| 198 // If skia couldn't do the YUV conversion on GPU, we will on CPU. | |
| 199 SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( | |
| 200 frame_, pixels, row_bytes); | |
| 201 return true; | |
| 202 } | |
| 203 | |
| 204 bool onGetYUV8Planes(SkISize sizes[3], | |
| 205 void* planes[3], | |
| 206 size_t row_bytes[3], | |
| 207 SkYUVColorSpace* color_space) override { | |
| 208 if (!frame_.get() || !IsYUV(frame_->format()) || | |
| 209 // TODO(rileya): Skia currently doesn't support Rec709 YUV conversion, | |
| 210 // or YUVA conversion. Remove this case once it does. As-is we will | |
| 211 // fall back on the pure-software path in this case. | |
| 212 frame_->format() == VideoFrame::YV12HD || | |
| 213 frame_->format() == VideoFrame::YV12A) { | |
| 214 return false; | |
| 215 } | |
| 216 | |
| 217 if (color_space) { | |
| 218 if (IsJPEGColorSpace(frame_->format())) | |
| 219 *color_space = kJPEG_SkYUVColorSpace; | |
| 220 else | |
| 221 *color_space = kRec601_SkYUVColorSpace; | |
| 222 } | |
| 223 | |
| 224 for (int plane = VideoFrame::kYPlane; plane <= VideoFrame::kVPlane; | |
| 225 ++plane) { | |
| 226 if (sizes) { | |
| 227 gfx::Size size; | |
| 228 size = | |
| 229 VideoFrame::PlaneSize(frame_->format(), | |
| 230 plane, | |
| 231 gfx::Size(frame_->visible_rect().width(), | |
| 232 frame_->visible_rect().height())); | |
| 233 sizes[plane].set(size.width(), size.height()); | |
| 234 } | |
| 235 if (row_bytes && planes) { | |
| 236 size_t offset; | |
| 237 int y_shift = (frame_->format() == media::VideoFrame::YV16) ? 0 : 1; | |
| 238 if (plane == media::VideoFrame::kYPlane) { | |
| 239 offset = (frame_->stride(media::VideoFrame::kYPlane) * | |
| 240 frame_->visible_rect().y()) + | |
| 241 frame_->visible_rect().x(); | |
| 242 } else { | |
| 243 offset = (frame_->stride(media::VideoFrame::kUPlane) * | |
| 244 (frame_->visible_rect().y() >> y_shift)) + | |
| 245 (frame_->visible_rect().x() >> 1); | |
| 246 } | |
| 247 | |
| 248 // Copy the frame to the supplied memory. | |
| 249 // TODO: Find a way (API change?) to avoid this copy. | |
| 250 char* out_line = static_cast<char*>(planes[plane]); | |
| 251 int out_line_stride = row_bytes[plane]; | |
| 252 uint8* in_line = frame_->data(plane) + offset; | |
| 253 int in_line_stride = frame_->stride(plane); | |
| 254 int plane_height = sizes[plane].height(); | |
| 255 if (in_line_stride == out_line_stride) { | |
| 256 memcpy(out_line, in_line, plane_height * in_line_stride); | |
| 257 } else { | |
| 258 // Different line padding so need to copy one line at a time. | |
| 259 int bytes_to_copy_per_line = out_line_stride < in_line_stride | |
| 260 ? out_line_stride | |
| 261 : in_line_stride; | |
| 262 for (int line_no = 0; line_no < plane_height; line_no++) { | |
| 263 memcpy(out_line, in_line, bytes_to_copy_per_line); | |
| 264 in_line += in_line_stride; | |
| 265 out_line += out_line_stride; | |
| 266 } | |
| 267 } | |
| 268 } | |
| 269 } | |
| 270 return true; | |
| 271 } | |
| 272 | |
| 273 private: | |
| 274 scoped_refptr<VideoFrame> frame_; | |
| 275 | |
| 276 DISALLOW_IMPLICIT_CONSTRUCTORS(VideoImageGenerator); | |
| 277 }; | |
| 278 | |
| 279 SkCanvasVideoRenderer::SkCanvasVideoRenderer() | |
| 280 : last_frame_timestamp_(media::kNoTimestamp()), | |
| 281 frame_deleting_timer_( | |
| 282 FROM_HERE, | |
| 283 base::TimeDelta::FromSeconds(kTemporaryResourceDeletionDelay), | |
| 284 this, | |
| 285 &SkCanvasVideoRenderer::ResetLastFrame), | |
| 286 accelerated_generator_(nullptr), | |
| 287 accelerated_last_frame_timestamp_(media::kNoTimestamp()), | |
| 288 accelerated_frame_deleting_timer_( | |
| 289 FROM_HERE, | |
| 290 base::TimeDelta::FromSeconds(kTemporaryResourceDeletionDelay), | |
| 291 this, | |
| 292 &SkCanvasVideoRenderer::ResetAcceleratedLastFrame) { | |
| 293 last_frame_.setIsVolatile(true); | |
| 294 } | |
| 295 | |
| 296 SkCanvasVideoRenderer::~SkCanvasVideoRenderer() {} | |
| 297 | |
| 298 void SkCanvasVideoRenderer::Paint(const scoped_refptr<VideoFrame>& video_frame, | |
| 299 SkCanvas* canvas, | |
| 300 const gfx::RectF& dest_rect, | |
| 301 uint8 alpha, | |
| 302 SkXfermode::Mode mode, | |
| 303 VideoRotation video_rotation, | |
| 304 const Context3D& context_3d) { | |
| 305 if (alpha == 0) { | |
| 306 return; | |
| 307 } | |
| 308 | |
| 309 SkRect dest; | |
| 310 dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom()); | |
| 311 | |
| 312 SkPaint paint; | |
| 313 paint.setAlpha(alpha); | |
| 314 | |
| 315 // Paint black rectangle if there isn't a frame available or the | |
| 316 // frame has an unexpected format. | |
| 317 if (!video_frame.get() || video_frame->natural_size().IsEmpty() || | |
| 318 !IsYUVOrNative(video_frame->format())) { | |
| 319 canvas->drawRect(dest, paint); | |
| 320 canvas->flush(); | |
| 321 return; | |
| 322 } | |
| 323 | |
| 324 SkBitmap* target_frame = nullptr; | |
| 325 | |
| 326 if (video_frame->format() == VideoFrame::NATIVE_TEXTURE) { | |
| 327 // Draw HW Video on both SW and HW Canvas. | |
| 328 // In SW Canvas case, rely on skia drawing Ganesh SkBitmap on SW SkCanvas. | |
| 329 if (accelerated_last_frame_.isNull() || | |
| 330 video_frame->timestamp() != accelerated_last_frame_timestamp_) { | |
| 331 DCHECK(context_3d.gl); | |
| 332 DCHECK(context_3d.gr_context); | |
| 333 if (accelerated_generator_) { | |
| 334 // Reset SkBitmap used in SWVideo-to-HWCanvas path. | |
| 335 accelerated_last_frame_.reset(); | |
| 336 accelerated_generator_ = nullptr; | |
| 337 } | |
| 338 if (!CopyVideoFrameTextureToSkBitmapTexture( | |
| 339 video_frame.get(), &accelerated_last_frame_, context_3d)) { | |
| 340 NOTREACHED(); | |
| 341 return; | |
| 342 } | |
| 343 DCHECK(video_frame->visible_rect().width() == | |
| 344 accelerated_last_frame_.width() && | |
| 345 video_frame->visible_rect().height() == | |
| 346 accelerated_last_frame_.height()); | |
| 347 | |
| 348 accelerated_last_frame_timestamp_ = video_frame->timestamp(); | |
| 349 } | |
| 350 target_frame = &accelerated_last_frame_; | |
| 351 accelerated_frame_deleting_timer_.Reset(); | |
| 352 } else if (canvas->getGrContext()) { | |
| 353 DCHECK(video_frame->format() != VideoFrame::NATIVE_TEXTURE); | |
| 354 if (accelerated_last_frame_.isNull() || | |
| 355 video_frame->timestamp() != accelerated_last_frame_timestamp_) { | |
| 356 // Draw SW Video on HW Canvas. | |
| 357 if (!accelerated_generator_ && !accelerated_last_frame_.isNull()) { | |
| 358 // Reset SkBitmap used in HWVideo-to-HWCanvas path. | |
| 359 accelerated_last_frame_.reset(); | |
| 360 } | |
| 361 accelerated_generator_ = new VideoImageGenerator(video_frame); | |
| 362 | |
| 363 // Note: This takes ownership of |accelerated_generator_|. | |
| 364 if (!SkInstallDiscardablePixelRef(accelerated_generator_, | |
| 365 &accelerated_last_frame_)) { | |
| 366 NOTREACHED(); | |
| 367 return; | |
| 368 } | |
| 369 DCHECK(video_frame->visible_rect().width() == | |
| 370 accelerated_last_frame_.width() && | |
| 371 video_frame->visible_rect().height() == | |
| 372 accelerated_last_frame_.height()); | |
| 373 | |
| 374 accelerated_last_frame_timestamp_ = video_frame->timestamp(); | |
| 375 } else if (accelerated_generator_) { | |
| 376 accelerated_generator_->set_frame(video_frame); | |
| 377 } | |
| 378 target_frame = &accelerated_last_frame_; | |
| 379 accelerated_frame_deleting_timer_.Reset(); | |
| 380 } else { | |
| 381 // Draw SW Video on SW Canvas. | |
| 382 DCHECK(video_frame->format() != VideoFrame::NATIVE_TEXTURE); | |
| 383 if (last_frame_.isNull() || | |
| 384 video_frame->timestamp() != last_frame_timestamp_) { | |
| 385 // Check if |bitmap| needs to be (re)allocated. | |
| 386 if (last_frame_.isNull() || | |
| 387 last_frame_.width() != video_frame->visible_rect().width() || | |
| 388 last_frame_.height() != video_frame->visible_rect().height()) { | |
| 389 last_frame_.allocN32Pixels(video_frame->visible_rect().width(), | |
| 390 video_frame->visible_rect().height()); | |
| 391 last_frame_.setIsVolatile(true); | |
| 392 } | |
| 393 last_frame_.lockPixels(); | |
| 394 ConvertVideoFrameToRGBPixels( | |
| 395 video_frame, last_frame_.getPixels(), last_frame_.rowBytes()); | |
| 396 last_frame_.notifyPixelsChanged(); | |
| 397 last_frame_.unlockPixels(); | |
| 398 last_frame_timestamp_ = video_frame->timestamp(); | |
| 399 } | |
| 400 target_frame = &last_frame_; | |
| 401 frame_deleting_timer_.Reset(); | |
| 402 } | |
| 403 | |
| 404 paint.setXfermodeMode(mode); | |
| 405 paint.setFilterLevel(SkPaint::kLow_FilterLevel); | |
| 406 | |
| 407 bool need_transform = | |
| 408 video_rotation != VIDEO_ROTATION_0 || | |
| 409 dest_rect.size() != video_frame->visible_rect().size() || | |
| 410 !dest_rect.origin().IsOrigin(); | |
| 411 if (need_transform) { | |
| 412 canvas->save(); | |
| 413 canvas->translate( | |
| 414 SkFloatToScalar(dest_rect.x() + (dest_rect.width() * 0.5f)), | |
| 415 SkFloatToScalar(dest_rect.y() + (dest_rect.height() * 0.5f))); | |
| 416 SkScalar angle = SkFloatToScalar(0.0f); | |
| 417 switch (video_rotation) { | |
| 418 case VIDEO_ROTATION_0: | |
| 419 break; | |
| 420 case VIDEO_ROTATION_90: | |
| 421 angle = SkFloatToScalar(90.0f); | |
| 422 break; | |
| 423 case VIDEO_ROTATION_180: | |
| 424 angle = SkFloatToScalar(180.0f); | |
| 425 break; | |
| 426 case VIDEO_ROTATION_270: | |
| 427 angle = SkFloatToScalar(270.0f); | |
| 428 break; | |
| 429 } | |
| 430 canvas->rotate(angle); | |
| 431 | |
| 432 gfx::SizeF rotated_dest_size = dest_rect.size(); | |
| 433 if (video_rotation == VIDEO_ROTATION_90 || | |
| 434 video_rotation == VIDEO_ROTATION_270) { | |
| 435 rotated_dest_size = | |
| 436 gfx::SizeF(rotated_dest_size.height(), rotated_dest_size.width()); | |
| 437 } | |
| 438 canvas->scale( | |
| 439 SkFloatToScalar(rotated_dest_size.width() / target_frame->width()), | |
| 440 SkFloatToScalar(rotated_dest_size.height() / target_frame->height())); | |
| 441 canvas->translate(-SkFloatToScalar(target_frame->width() * 0.5f), | |
| 442 -SkFloatToScalar(target_frame->height() * 0.5f)); | |
| 443 } | |
| 444 canvas->drawBitmap(*target_frame, 0, 0, &paint); | |
| 445 if (need_transform) | |
| 446 canvas->restore(); | |
| 447 canvas->flush(); | |
| 448 // SkCanvas::flush() causes the generator to generate SkImage, so delete | |
| 449 // |video_frame| not to be outlived. | |
| 450 if (canvas->getGrContext() && accelerated_generator_) | |
| 451 accelerated_generator_->set_frame(nullptr); | |
| 452 } | |
| 453 | |
| 454 void SkCanvasVideoRenderer::Copy(const scoped_refptr<VideoFrame>& video_frame, | |
| 455 SkCanvas* canvas, | |
| 456 const Context3D& context_3d) { | |
| 457 Paint(video_frame, canvas, video_frame->visible_rect(), 0xff, | |
| 458 SkXfermode::kSrc_Mode, media::VIDEO_ROTATION_0, context_3d); | |
| 459 } | |
| 460 | |
| 461 // static | |
| 462 void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( | |
| 463 const scoped_refptr<media::VideoFrame>& video_frame, | |
| 464 void* rgb_pixels, | |
| 465 size_t row_bytes) { | |
| 466 DCHECK(IsYUVOrNative(video_frame->format())) | |
| 467 << video_frame->format(); | |
| 468 if (IsYUV(video_frame->format())) { | |
| 469 DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), | |
| 470 video_frame->stride(media::VideoFrame::kVPlane)); | |
| 471 } | |
| 472 | |
| 473 size_t y_offset = 0; | |
| 474 size_t uv_offset = 0; | |
| 475 if (IsYUV(video_frame->format())) { | |
| 476 int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1; | |
| 477 // Use the "left" and "top" of the destination rect to locate the offset | |
| 478 // in Y, U and V planes. | |
| 479 y_offset = (video_frame->stride(media::VideoFrame::kYPlane) * | |
| 480 video_frame->visible_rect().y()) + | |
| 481 video_frame->visible_rect().x(); | |
| 482 // For format YV12, there is one U, V value per 2x2 block. | |
| 483 // For format YV16, there is one U, V value per 2x1 block. | |
| 484 uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) * | |
| 485 (video_frame->visible_rect().y() >> y_shift)) + | |
| 486 (video_frame->visible_rect().x() >> 1); | |
| 487 } | |
| 488 | |
| 489 switch (video_frame->format()) { | |
| 490 case VideoFrame::YV12: | |
| 491 case VideoFrame::I420: | |
| 492 LIBYUV_I420_TO_ARGB( | |
| 493 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
| 494 video_frame->stride(VideoFrame::kYPlane), | |
| 495 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
| 496 video_frame->stride(VideoFrame::kUPlane), | |
| 497 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
| 498 video_frame->stride(VideoFrame::kVPlane), | |
| 499 static_cast<uint8*>(rgb_pixels), | |
| 500 row_bytes, | |
| 501 video_frame->visible_rect().width(), | |
| 502 video_frame->visible_rect().height()); | |
| 503 break; | |
| 504 | |
| 505 case VideoFrame::YV12J: | |
| 506 ConvertYUVToRGB32( | |
| 507 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
| 508 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
| 509 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
| 510 static_cast<uint8*>(rgb_pixels), | |
| 511 video_frame->visible_rect().width(), | |
| 512 video_frame->visible_rect().height(), | |
| 513 video_frame->stride(VideoFrame::kYPlane), | |
| 514 video_frame->stride(VideoFrame::kUPlane), | |
| 515 row_bytes, | |
| 516 YV12J); | |
| 517 break; | |
| 518 | |
| 519 case VideoFrame::YV12HD: | |
| 520 ConvertYUVToRGB32( | |
| 521 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
| 522 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
| 523 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
| 524 static_cast<uint8*>(rgb_pixels), | |
| 525 video_frame->visible_rect().width(), | |
| 526 video_frame->visible_rect().height(), | |
| 527 video_frame->stride(VideoFrame::kYPlane), | |
| 528 video_frame->stride(VideoFrame::kUPlane), | |
| 529 row_bytes, | |
| 530 YV12HD); | |
| 531 break; | |
| 532 | |
| 533 case VideoFrame::YV16: | |
| 534 LIBYUV_I422_TO_ARGB( | |
| 535 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
| 536 video_frame->stride(VideoFrame::kYPlane), | |
| 537 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
| 538 video_frame->stride(VideoFrame::kUPlane), | |
| 539 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
| 540 video_frame->stride(VideoFrame::kVPlane), | |
| 541 static_cast<uint8*>(rgb_pixels), | |
| 542 row_bytes, | |
| 543 video_frame->visible_rect().width(), | |
| 544 video_frame->visible_rect().height()); | |
| 545 break; | |
| 546 | |
| 547 case VideoFrame::YV12A: | |
| 548 // Since libyuv doesn't support YUVA, fallback to media, which is not ARM | |
| 549 // optimized. | |
| 550 // TODO(fbarchard, mtomasz): Use libyuv, then copy the alpha channel. | |
| 551 ConvertYUVAToARGB( | |
| 552 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
| 553 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
| 554 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
| 555 video_frame->data(VideoFrame::kAPlane), | |
| 556 static_cast<uint8*>(rgb_pixels), | |
| 557 video_frame->visible_rect().width(), | |
| 558 video_frame->visible_rect().height(), | |
| 559 video_frame->stride(VideoFrame::kYPlane), | |
| 560 video_frame->stride(VideoFrame::kUPlane), | |
| 561 video_frame->stride(VideoFrame::kAPlane), | |
| 562 row_bytes, | |
| 563 YV12); | |
| 564 break; | |
| 565 | |
| 566 case VideoFrame::YV24: | |
| 567 libyuv::I444ToARGB( | |
| 568 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
| 569 video_frame->stride(VideoFrame::kYPlane), | |
| 570 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
| 571 video_frame->stride(VideoFrame::kUPlane), | |
| 572 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
| 573 video_frame->stride(VideoFrame::kVPlane), | |
| 574 static_cast<uint8*>(rgb_pixels), | |
| 575 row_bytes, | |
| 576 video_frame->visible_rect().width(), | |
| 577 video_frame->visible_rect().height()); | |
| 578 #if SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ | |
| 579 SK_A32_SHIFT == 24 | |
| 580 libyuv::ARGBToABGR(static_cast<uint8*>(rgb_pixels), | |
| 581 row_bytes, | |
| 582 static_cast<uint8*>(rgb_pixels), | |
| 583 row_bytes, | |
| 584 video_frame->visible_rect().width(), | |
| 585 video_frame->visible_rect().height()); | |
| 586 #endif | |
| 587 break; | |
| 588 | |
| 589 case VideoFrame::NATIVE_TEXTURE: | |
| 590 NOTREACHED(); | |
| 591 break; | |
| 592 #if defined(VIDEO_HOLE) | |
| 593 case VideoFrame::HOLE: | |
| 594 #endif // defined(VIDEO_HOLE) | |
| 595 case VideoFrame::ARGB: | |
| 596 case VideoFrame::UNKNOWN: | |
| 597 case VideoFrame::NV12: | |
| 598 NOTREACHED(); | |
| 599 } | |
| 600 } | |
| 601 | |
| 602 // static | |
| 603 void SkCanvasVideoRenderer::CopyVideoFrameTextureToGLTexture( | |
| 604 gpu::gles2::GLES2Interface* gl, | |
| 605 VideoFrame* video_frame, | |
| 606 unsigned int texture, | |
| 607 unsigned int level, | |
| 608 unsigned int internal_format, | |
| 609 unsigned int type, | |
| 610 bool premultiply_alpha, | |
| 611 bool flip_y) { | |
| 612 DCHECK(video_frame && video_frame->format() == VideoFrame::NATIVE_TEXTURE); | |
| 613 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); | |
| 614 DCHECK(mailbox_holder->texture_target == GL_TEXTURE_2D || | |
| 615 mailbox_holder->texture_target == GL_TEXTURE_RECTANGLE_ARB || | |
| 616 mailbox_holder->texture_target == GL_TEXTURE_EXTERNAL_OES); | |
| 617 | |
| 618 gl->WaitSyncPointCHROMIUM(mailbox_holder->sync_point); | |
| 619 uint32 source_texture = gl->CreateAndConsumeTextureCHROMIUM( | |
| 620 mailbox_holder->texture_target, mailbox_holder->mailbox.name); | |
| 621 | |
| 622 // The video is stored in a unmultiplied format, so premultiply | |
| 623 // if necessary. | |
| 624 gl->PixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, premultiply_alpha); | |
| 625 // Application itself needs to take care of setting the right |flip_y| | |
| 626 // value down to get the expected result. | |
| 627 // "flip_y == true" means to reverse the video orientation while | |
| 628 // "flip_y == false" means to keep the intrinsic orientation. | |
| 629 gl->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); | |
| 630 gl->CopyTextureCHROMIUM(GL_TEXTURE_2D, source_texture, texture, level, | |
| 631 internal_format, type); | |
| 632 gl->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false); | |
| 633 gl->PixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, false); | |
| 634 | |
| 635 gl->DeleteTextures(1, &source_texture); | |
| 636 gl->Flush(); | |
| 637 | |
| 638 SyncPointClientImpl client(gl); | |
| 639 video_frame->UpdateReleaseSyncPoint(&client); | |
| 640 } | |
| 641 | |
| 642 void SkCanvasVideoRenderer::ResetLastFrame() { | |
| 643 last_frame_.reset(); | |
| 644 last_frame_timestamp_ = media::kNoTimestamp(); | |
| 645 } | |
| 646 | |
| 647 void SkCanvasVideoRenderer::ResetAcceleratedLastFrame() { | |
| 648 accelerated_last_frame_.reset(); | |
| 649 accelerated_generator_ = nullptr; | |
| 650 accelerated_last_frame_timestamp_ = media::kNoTimestamp(); | |
| 651 } | |
| 652 | |
| 653 } // namespace media | |
| OLD | NEW |