Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: media/blink/skcanvas_video_renderer.cc

Issue 1154153003: Relanding 1143663007: VideoFrame: Separate Pixel Format from Storage Type. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Added NV12 support in CrOS Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/base/video_frame_unittest.cc ('k') | media/blink/video_frame_compositor.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/blink/skcanvas_video_renderer.h" 5 #include "media/blink/skcanvas_video_renderer.h"
6 6
7 #include "gpu/GLES2/gl2extchromium.h" 7 #include "gpu/GLES2/gl2extchromium.h"
8 #include "gpu/command_buffer/client/gles2_interface.h" 8 #include "gpu/command_buffer/client/gles2_interface.h"
9 #include "gpu/command_buffer/common/mailbox_holder.h" 9 #include "gpu/command_buffer/common/mailbox_holder.h"
10 #include "media/base/video_frame.h" 10 #include "media/base/video_frame.h"
(...skipping 24 matching lines...) Expand all
35 namespace media { 35 namespace media {
36 36
37 namespace { 37 namespace {
38 38
39 // This class keeps two temporary resources; software bitmap, hardware bitmap. 39 // This class keeps two temporary resources; software bitmap, hardware bitmap.
40 // If both bitmap are created and then only software bitmap is updated every 40 // If both bitmap are created and then only software bitmap is updated every
41 // frame, hardware bitmap outlives until the media player dies. So we delete 41 // frame, hardware bitmap outlives until the media player dies. So we delete
42 // a temporary resource if it is not used for 3 sec. 42 // a temporary resource if it is not used for 3 sec.
43 const int kTemporaryResourceDeletionDelay = 3; // Seconds; 43 const int kTemporaryResourceDeletionDelay = 3; // Seconds;
44 44
45 bool IsYUV(media::VideoFrame::Format format) {
46 switch (format) {
47 case VideoFrame::YV12:
48 case VideoFrame::YV16:
49 case VideoFrame::I420:
50 case VideoFrame::YV12A:
51 case VideoFrame::YV24:
52 case VideoFrame::NV12:
53 return true;
54 case VideoFrame::UNKNOWN:
55 case VideoFrame::NATIVE_TEXTURE:
56 #if defined(VIDEO_HOLE)
57 case VideoFrame::HOLE:
58 #endif // defined(VIDEO_HOLE)
59 case VideoFrame::ARGB:
60 return false;
61 }
62 NOTREACHED() << "Invalid videoframe format provided: " << format;
63 return false;
64 }
65
66 bool CheckColorSpace(const scoped_refptr<VideoFrame>& video_frame, 45 bool CheckColorSpace(const scoped_refptr<VideoFrame>& video_frame,
67 VideoFrame::ColorSpace color_space) { 46 VideoFrame::ColorSpace color_space) {
68 int result; 47 int result;
69 return video_frame->metadata()->GetInteger( 48 return video_frame->metadata()->GetInteger(
70 media::VideoFrameMetadata::COLOR_SPACE, &result) && 49 media::VideoFrameMetadata::COLOR_SPACE, &result) &&
71 result == color_space; 50 result == color_space;
72 } 51 }
73 52
74 bool IsYUVOrNative(media::VideoFrame::Format format) {
75 return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE;
76 }
77
78 bool IsSkBitmapProperlySizedTexture(const SkBitmap* bitmap, 53 bool IsSkBitmapProperlySizedTexture(const SkBitmap* bitmap,
79 const gfx::Size& size) { 54 const gfx::Size& size) {
80 return bitmap->getTexture() && bitmap->width() == size.width() && 55 return bitmap->getTexture() && bitmap->width() == size.width() &&
81 bitmap->height() == size.height(); 56 bitmap->height() == size.height();
82 } 57 }
83 58
84 bool AllocateSkBitmapTexture(GrContext* gr, 59 bool AllocateSkBitmapTexture(GrContext* gr,
85 SkBitmap* bitmap, 60 SkBitmap* bitmap,
86 const gfx::Size& size) { 61 const gfx::Size& size) {
87 DCHECK(gr); 62 DCHECK(gr);
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
177 // If skia couldn't do the YUV conversion on GPU, we will on CPU. 152 // If skia couldn't do the YUV conversion on GPU, we will on CPU.
178 SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( 153 SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
179 frame_, pixels, row_bytes); 154 frame_, pixels, row_bytes);
180 return kSuccess; 155 return kSuccess;
181 } 156 }
182 157
183 bool onGetYUV8Planes(SkISize sizes[3], 158 bool onGetYUV8Planes(SkISize sizes[3],
184 void* planes[3], 159 void* planes[3],
185 size_t row_bytes[3], 160 size_t row_bytes[3],
186 SkYUVColorSpace* color_space) override { 161 SkYUVColorSpace* color_space) override {
187 if (!frame_.get() || !IsYUV(frame_->format()) || 162 if (!frame_.get() || !media::VideoFrame::IsYuvPlanar(frame_->format()) ||
188 // TODO(rileya): Skia currently doesn't support Rec709 YUV conversion, 163 // TODO(rileya): Skia currently doesn't support Rec709 YUV conversion,
189 // or YUVA conversion. Remove this case once it does. As-is we will 164 // or YUVA conversion. Remove this case once it does. As-is we will
190 // fall back on the pure-software path in this case. 165 // fall back on the pure-software path in this case.
191 CheckColorSpace(frame_, VideoFrame::COLOR_SPACE_HD_REC709) || 166 CheckColorSpace(frame_, VideoFrame::COLOR_SPACE_HD_REC709) ||
192 frame_->format() == VideoFrame::YV12A) { 167 frame_->format() == VideoFrame::YV12A) {
193 return false; 168 return false;
194 } 169 }
195 170
196 if (color_space) { 171 if (color_space) {
197 if (CheckColorSpace(frame_, VideoFrame::COLOR_SPACE_JPEG)) 172 if (CheckColorSpace(frame_, VideoFrame::COLOR_SPACE_JPEG))
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
287 262
288 SkRect dest; 263 SkRect dest;
289 dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom()); 264 dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom());
290 265
291 SkPaint paint; 266 SkPaint paint;
292 paint.setAlpha(alpha); 267 paint.setAlpha(alpha);
293 268
294 // Paint black rectangle if there isn't a frame available or the 269 // Paint black rectangle if there isn't a frame available or the
295 // frame has an unexpected format. 270 // frame has an unexpected format.
296 if (!video_frame.get() || video_frame->natural_size().IsEmpty() || 271 if (!video_frame.get() || video_frame->natural_size().IsEmpty() ||
297 !IsYUVOrNative(video_frame->format())) { 272 !(media::VideoFrame::IsYuvPlanar(video_frame->format()) ||
273 (video_frame->storage_type() == media::VideoFrame::STORAGE_TEXTURE))) {
298 canvas->drawRect(dest, paint); 274 canvas->drawRect(dest, paint);
299 canvas->flush(); 275 canvas->flush();
300 return; 276 return;
301 } 277 }
302 278
303 SkBitmap* target_frame = nullptr; 279 SkBitmap* target_frame = nullptr;
304 280
305 if (video_frame->format() == VideoFrame::NATIVE_TEXTURE) { 281 if (video_frame->storage_type() == VideoFrame::STORAGE_TEXTURE) {
306 // Draw HW Video on both SW and HW Canvas. 282 // Draw HW Video on both SW and HW Canvas.
307 // In SW Canvas case, rely on skia drawing Ganesh SkBitmap on SW SkCanvas. 283 // In SW Canvas case, rely on skia drawing Ganesh SkBitmap on SW SkCanvas.
308 if (accelerated_last_frame_.isNull() || 284 if (accelerated_last_frame_.isNull() ||
309 video_frame->timestamp() != accelerated_last_frame_timestamp_) { 285 video_frame->timestamp() != accelerated_last_frame_timestamp_) {
310 DCHECK(context_3d.gl); 286 DCHECK(context_3d.gl);
311 DCHECK(context_3d.gr_context); 287 DCHECK(context_3d.gr_context);
312 if (accelerated_generator_) { 288 if (accelerated_generator_) {
313 // Reset SkBitmap used in SWVideo-to-HWCanvas path. 289 // Reset SkBitmap used in SWVideo-to-HWCanvas path.
314 accelerated_last_frame_.reset(); 290 accelerated_last_frame_.reset();
315 accelerated_generator_ = nullptr; 291 accelerated_generator_ = nullptr;
316 } 292 }
317 if (!CopyVideoFrameTextureToSkBitmapTexture( 293 if (!CopyVideoFrameTextureToSkBitmapTexture(
318 video_frame.get(), &accelerated_last_frame_, context_3d)) { 294 video_frame.get(), &accelerated_last_frame_, context_3d)) {
319 NOTREACHED(); 295 NOTREACHED();
320 return; 296 return;
321 } 297 }
322 DCHECK(video_frame->visible_rect().width() == 298 DCHECK(video_frame->visible_rect().width() ==
323 accelerated_last_frame_.width() && 299 accelerated_last_frame_.width() &&
324 video_frame->visible_rect().height() == 300 video_frame->visible_rect().height() ==
325 accelerated_last_frame_.height()); 301 accelerated_last_frame_.height());
326 302
327 accelerated_last_frame_timestamp_ = video_frame->timestamp(); 303 accelerated_last_frame_timestamp_ = video_frame->timestamp();
328 } 304 }
329 target_frame = &accelerated_last_frame_; 305 target_frame = &accelerated_last_frame_;
330 accelerated_frame_deleting_timer_.Reset(); 306 accelerated_frame_deleting_timer_.Reset();
331 } else if (canvas->getGrContext()) { 307 } else if (canvas->getGrContext()) {
332 DCHECK(video_frame->format() != VideoFrame::NATIVE_TEXTURE);
333 if (accelerated_last_frame_.isNull() || 308 if (accelerated_last_frame_.isNull() ||
334 video_frame->timestamp() != accelerated_last_frame_timestamp_) { 309 video_frame->timestamp() != accelerated_last_frame_timestamp_) {
335 // Draw SW Video on HW Canvas. 310 // Draw SW Video on HW Canvas.
336 if (!accelerated_generator_ && !accelerated_last_frame_.isNull()) { 311 if (!accelerated_generator_ && !accelerated_last_frame_.isNull()) {
337 // Reset SkBitmap used in HWVideo-to-HWCanvas path. 312 // Reset SkBitmap used in HWVideo-to-HWCanvas path.
338 accelerated_last_frame_.reset(); 313 accelerated_last_frame_.reset();
339 } 314 }
340 accelerated_generator_ = new VideoImageGenerator(video_frame); 315 accelerated_generator_ = new VideoImageGenerator(video_frame);
341 316
342 // Note: This takes ownership of |accelerated_generator_|. 317 // Note: This takes ownership of |accelerated_generator_|.
343 if (!SkInstallDiscardablePixelRef(accelerated_generator_, 318 if (!SkInstallDiscardablePixelRef(accelerated_generator_,
344 &accelerated_last_frame_)) { 319 &accelerated_last_frame_)) {
345 NOTREACHED(); 320 NOTREACHED();
346 return; 321 return;
347 } 322 }
348 DCHECK(video_frame->visible_rect().width() == 323 DCHECK(video_frame->visible_rect().width() ==
349 accelerated_last_frame_.width() && 324 accelerated_last_frame_.width() &&
350 video_frame->visible_rect().height() == 325 video_frame->visible_rect().height() ==
351 accelerated_last_frame_.height()); 326 accelerated_last_frame_.height());
352 327
353 accelerated_last_frame_timestamp_ = video_frame->timestamp(); 328 accelerated_last_frame_timestamp_ = video_frame->timestamp();
354 } else if (accelerated_generator_) { 329 } else if (accelerated_generator_) {
355 accelerated_generator_->set_frame(video_frame); 330 accelerated_generator_->set_frame(video_frame);
356 } 331 }
357 target_frame = &accelerated_last_frame_; 332 target_frame = &accelerated_last_frame_;
358 accelerated_frame_deleting_timer_.Reset(); 333 accelerated_frame_deleting_timer_.Reset();
359 } else { 334 } else {
360 // Draw SW Video on SW Canvas. 335 // Draw SW Video on SW Canvas.
361 DCHECK(video_frame->format() != VideoFrame::NATIVE_TEXTURE); 336 DCHECK(VideoFrame::IsMappable(video_frame->storage_type()));
362 if (last_frame_.isNull() || 337 if (last_frame_.isNull() ||
363 video_frame->timestamp() != last_frame_timestamp_) { 338 video_frame->timestamp() != last_frame_timestamp_) {
364 // Check if |bitmap| needs to be (re)allocated. 339 // Check if |bitmap| needs to be (re)allocated.
365 if (last_frame_.isNull() || 340 if (last_frame_.isNull() ||
366 last_frame_.width() != video_frame->visible_rect().width() || 341 last_frame_.width() != video_frame->visible_rect().width() ||
367 last_frame_.height() != video_frame->visible_rect().height()) { 342 last_frame_.height() != video_frame->visible_rect().height()) {
368 last_frame_.allocN32Pixels(video_frame->visible_rect().width(), 343 last_frame_.allocN32Pixels(video_frame->visible_rect().width(),
369 video_frame->visible_rect().height()); 344 video_frame->visible_rect().height());
370 last_frame_.setIsVolatile(true); 345 last_frame_.setIsVolatile(true);
371 } 346 }
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
435 const Context3D& context_3d) { 410 const Context3D& context_3d) {
436 Paint(video_frame, canvas, video_frame->visible_rect(), 0xff, 411 Paint(video_frame, canvas, video_frame->visible_rect(), 0xff,
437 SkXfermode::kSrc_Mode, media::VIDEO_ROTATION_0, context_3d); 412 SkXfermode::kSrc_Mode, media::VIDEO_ROTATION_0, context_3d);
438 } 413 }
439 414
440 // static 415 // static
441 void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( 416 void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
442 const scoped_refptr<media::VideoFrame>& video_frame, 417 const scoped_refptr<media::VideoFrame>& video_frame,
443 void* rgb_pixels, 418 void* rgb_pixels,
444 size_t row_bytes) { 419 size_t row_bytes) {
445 DCHECK(IsYUVOrNative(video_frame->format())) 420 if (!VideoFrame::IsMappable(video_frame->storage_type())) {
446 << video_frame->format(); 421 NOTREACHED() << "Cannot extract pixels from non-CPU frame formats.";
447 if (IsYUV(video_frame->format())) { 422 return;
448 DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), 423 }
449 video_frame->stride(media::VideoFrame::kVPlane)); 424 if (!media::VideoFrame::IsYuvPlanar(video_frame->format())) {
425 NOTREACHED() << "Non YUV formats are not supported";
426 return;
450 } 427 }
451 428
452 size_t y_offset = 0; 429 DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane),
453 size_t uv_offset = 0; 430 video_frame->stride(media::VideoFrame::kVPlane));
454 if (IsYUV(video_frame->format())) { 431
455 int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1; 432 const int y_shift =
456 // Use the "left" and "top" of the destination rect to locate the offset 433 (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1;
457 // in Y, U and V planes. 434 // Use the "left" and "top" of the destination rect to locate the offset
458 y_offset = (video_frame->stride(media::VideoFrame::kYPlane) * 435 // in Y, U and V planes.
459 video_frame->visible_rect().y()) + 436 const size_t y_offset = (video_frame->stride(media::VideoFrame::kYPlane) *
460 video_frame->visible_rect().x(); 437 video_frame->visible_rect().y()) +
461 // For format YV12, there is one U, V value per 2x2 block. 438 video_frame->visible_rect().x();
462 // For format YV16, there is one U, V value per 2x1 block. 439 // For format YV12, there is one U, V value per 2x2 block.
463 uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) * 440 // For format YV16, there is one U, V value per 2x1 block.
464 (video_frame->visible_rect().y() >> y_shift)) + 441 const size_t uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) *
465 (video_frame->visible_rect().x() >> 1); 442 (video_frame->visible_rect().y() >> y_shift)) +
466 } 443 (video_frame->visible_rect().x() >> 1);
467 444
468 switch (video_frame->format()) { 445 switch (video_frame->format()) {
469 case VideoFrame::YV12: 446 case VideoFrame::YV12:
470 case VideoFrame::I420: 447 case VideoFrame::I420:
471 if (CheckColorSpace(video_frame, VideoFrame::COLOR_SPACE_JPEG)) { 448 if (CheckColorSpace(video_frame, VideoFrame::COLOR_SPACE_JPEG)) {
472 ConvertYUVToRGB32( 449 ConvertYUVToRGB32(
473 video_frame->data(VideoFrame::kYPlane) + y_offset, 450 video_frame->data(VideoFrame::kYPlane) + y_offset,
474 video_frame->data(VideoFrame::kUPlane) + uv_offset, 451 video_frame->data(VideoFrame::kUPlane) + uv_offset,
475 video_frame->data(VideoFrame::kVPlane) + uv_offset, 452 video_frame->data(VideoFrame::kVPlane) + uv_offset,
476 static_cast<uint8*>(rgb_pixels), 453 static_cast<uint8*>(rgb_pixels),
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
553 #if SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ 530 #if SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \
554 SK_A32_SHIFT == 24 531 SK_A32_SHIFT == 24
555 libyuv::ARGBToABGR(static_cast<uint8*>(rgb_pixels), 532 libyuv::ARGBToABGR(static_cast<uint8*>(rgb_pixels),
556 row_bytes, 533 row_bytes,
557 static_cast<uint8*>(rgb_pixels), 534 static_cast<uint8*>(rgb_pixels),
558 row_bytes, 535 row_bytes,
559 video_frame->visible_rect().width(), 536 video_frame->visible_rect().width(),
560 video_frame->visible_rect().height()); 537 video_frame->visible_rect().height());
561 #endif 538 #endif
562 break; 539 break;
563 540 #if defined(OS_MACOSX) || defined(OS_CHROMEOS)
564 case VideoFrame::NATIVE_TEXTURE: 541 case VideoFrame::NV12:
565 NOTREACHED(); 542 #endif
566 break;
567 #if defined(VIDEO_HOLE)
568 case VideoFrame::HOLE:
569 #endif // defined(VIDEO_HOLE)
570 case VideoFrame::ARGB: 543 case VideoFrame::ARGB:
544 case VideoFrame::XRGB:
571 case VideoFrame::UNKNOWN: 545 case VideoFrame::UNKNOWN:
572 case VideoFrame::NV12:
573 NOTREACHED(); 546 NOTREACHED();
574 } 547 }
575 } 548 }
576 549
577 // static 550 // static
578 void SkCanvasVideoRenderer::CopyVideoFrameTextureToGLTexture( 551 void SkCanvasVideoRenderer::CopyVideoFrameTextureToGLTexture(
579 gpu::gles2::GLES2Interface* gl, 552 gpu::gles2::GLES2Interface* gl,
580 VideoFrame* video_frame, 553 VideoFrame* video_frame,
581 unsigned int texture, 554 unsigned int texture,
582 unsigned int internal_format, 555 unsigned int internal_format,
583 unsigned int type, 556 unsigned int type,
584 bool premultiply_alpha, 557 bool premultiply_alpha,
585 bool flip_y) { 558 bool flip_y) {
586 DCHECK(video_frame && video_frame->format() == VideoFrame::NATIVE_TEXTURE); 559 DCHECK(video_frame);
587 DCHECK_EQ(1u, VideoFrame::NumTextures(video_frame->texture_format())); 560 DCHECK_EQ(video_frame->storage_type(), VideoFrame::STORAGE_TEXTURE);
561 DCHECK_EQ(1u, VideoFrame::NumPlanes(video_frame->format()));
562
588 const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0); 563 const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0);
589 DCHECK(mailbox_holder.texture_target == GL_TEXTURE_2D || 564 DCHECK(mailbox_holder.texture_target == GL_TEXTURE_2D ||
590 mailbox_holder.texture_target == GL_TEXTURE_RECTANGLE_ARB || 565 mailbox_holder.texture_target == GL_TEXTURE_RECTANGLE_ARB ||
591 mailbox_holder.texture_target == GL_TEXTURE_EXTERNAL_OES); 566 mailbox_holder.texture_target == GL_TEXTURE_EXTERNAL_OES);
592 567
593 gl->WaitSyncPointCHROMIUM(mailbox_holder.sync_point); 568 gl->WaitSyncPointCHROMIUM(mailbox_holder.sync_point);
594 uint32 source_texture = gl->CreateAndConsumeTextureCHROMIUM( 569 uint32 source_texture = gl->CreateAndConsumeTextureCHROMIUM(
595 mailbox_holder.texture_target, mailbox_holder.mailbox.name); 570 mailbox_holder.texture_target, mailbox_holder.mailbox.name);
596 571
597 // The video is stored in a unmultiplied format, so premultiply 572 // The video is stored in a unmultiplied format, so premultiply
(...skipping 21 matching lines...) Expand all
619 last_frame_timestamp_ = media::kNoTimestamp(); 594 last_frame_timestamp_ = media::kNoTimestamp();
620 } 595 }
621 596
622 void SkCanvasVideoRenderer::ResetAcceleratedLastFrame() { 597 void SkCanvasVideoRenderer::ResetAcceleratedLastFrame() {
623 accelerated_last_frame_.reset(); 598 accelerated_last_frame_.reset();
624 accelerated_generator_ = nullptr; 599 accelerated_generator_ = nullptr;
625 accelerated_last_frame_timestamp_ = media::kNoTimestamp(); 600 accelerated_last_frame_timestamp_ = media::kNoTimestamp();
626 } 601 }
627 602
628 } // namespace media 603 } // namespace media
OLDNEW
« no previous file with comments | « media/base/video_frame_unittest.cc ('k') | media/blink/video_frame_compositor.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698