| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/renderers/skcanvas_video_renderer.h" | 5 #include "media/renderers/skcanvas_video_renderer.h" |
| 6 | 6 |
| 7 #include <limits> | 7 #include <limits> |
| 8 | 8 |
| 9 #include "base/macros.h" | 9 #include "base/macros.h" |
| 10 #include "gpu/GLES2/gl2extchromium.h" | 10 #include "gpu/GLES2/gl2extchromium.h" |
| (...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 218 } | 218 } |
| 219 ~VideoImageGenerator() override {} | 219 ~VideoImageGenerator() override {} |
| 220 | 220 |
| 221 protected: | 221 protected: |
| 222 bool onGetPixels(const SkImageInfo& info, | 222 bool onGetPixels(const SkImageInfo& info, |
| 223 void* pixels, | 223 void* pixels, |
| 224 size_t row_bytes, | 224 size_t row_bytes, |
| 225 SkPMColor ctable[], | 225 SkPMColor ctable[], |
| 226 int* ctable_count) override { | 226 int* ctable_count) override { |
| 227 // If skia couldn't do the YUV conversion on GPU, we will on CPU. | 227 // If skia couldn't do the YUV conversion on GPU, we will on CPU. |
| 228 SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(frame_.get(), pixels, | 228 SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( |
| 229 row_bytes); | 229 frame_.get(), SkCanvasVideoRenderer::ConvertingSize::VISUAL, pixels, |
| 230 row_bytes); |
| 230 return true; | 231 return true; |
| 231 } | 232 } |
| 232 | 233 |
| 233 bool onQueryYUV8(SkYUVSizeInfo* sizeInfo, | 234 bool onQueryYUV8(SkYUVSizeInfo* sizeInfo, |
| 234 SkYUVColorSpace* color_space) const override { | 235 SkYUVColorSpace* color_space) const override { |
| 235 if (!media::IsYuvPlanar(frame_->format()) || | 236 if (!media::IsYuvPlanar(frame_->format()) || |
| 236 // TODO(rileya): Skia currently doesn't support YUVA conversion. Remove | 237 // TODO(rileya): Skia currently doesn't support YUVA conversion. Remove |
| 237 // this case once it does. As-is we will fall back on the pure-software | 238 // this case once it does. As-is we will fall back on the pure-software |
| 238 // path in this case. | 239 // path in this case. |
| 239 frame_->format() == PIXEL_FORMAT_YV12A) { | 240 frame_->format() == PIXEL_FORMAT_YV12A) { |
| (...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 515 for (int x = 0; x < width; x++) { | 516 for (int x = 0; x < width; x++) { |
| 516 dst[x] = src[x] >> shift; | 517 dst[x] = src[x] >> shift; |
| 517 } | 518 } |
| 518 src += video_frame->stride(plane) / 2; | 519 src += video_frame->stride(plane) / 2; |
| 519 dst += ret->stride(plane); | 520 dst += ret->stride(plane); |
| 520 } | 521 } |
| 521 } | 522 } |
| 522 return ret; | 523 return ret; |
| 523 } | 524 } |
| 524 | 525 |
| 526 const uint8_t* FrameData(const VideoFrame* video_frame, |
| 527 SkCanvasVideoRenderer::ConvertingSize size_type, |
| 528 size_t plane) { |
| 529 if (size_type == SkCanvasVideoRenderer::ConvertingSize::VISUAL) |
| 530 return video_frame->visible_data(plane); |
| 531 DCHECK(size_type == SkCanvasVideoRenderer::ConvertingSize::CODED); |
| 532 return video_frame->data(plane); |
| 533 } |
| 534 |
| 525 // We take the upper 8 bits of 16-bit data and convert it as luminance to ARGB. | 535 // We take the upper 8 bits of 16-bit data and convert it as luminance to ARGB. |
| 526 // We loose the precision here, but it is important not to render Y16 as RG_88. | 536 // We loose the precision here, but it is important not to render Y16 as RG_88. |
| 527 // To get the full precision use float textures with WebGL1 and e.g. R16UI or | 537 // To get the full precision use float textures with WebGL1 and e.g. R16UI or |
| 528 // R32F textures with WebGL2. | 538 // R32F textures with WebGL2. |
| 529 void ConvertY16ToARGB(const VideoFrame* video_frame, | 539 void ConvertY16ToARGB(const VideoFrame* video_frame, |
| 540 SkCanvasVideoRenderer::ConvertingSize size_type, |
| 530 void* argb_pixels, | 541 void* argb_pixels, |
| 531 size_t argb_row_bytes) { | 542 size_t argb_row_bytes) { |
| 532 const uint8_t* row_head = video_frame->visible_data(0); | 543 const uint8_t* row_head = |
| 544 FrameData(video_frame, size_type, VideoFrame::kYPlane); |
| 533 uint8_t* out = static_cast<uint8_t*>(argb_pixels); | 545 uint8_t* out = static_cast<uint8_t*>(argb_pixels); |
| 534 const size_t stride = video_frame->stride(0); | 546 const size_t stride = video_frame->stride(0); |
| 535 for (int i = 0; i < video_frame->visible_rect().height(); ++i) { | 547 gfx::Size frame_size = video_frame->coded_size(); |
| 548 if (size_type == SkCanvasVideoRenderer::ConvertingSize::VISUAL) |
| 549 frame_size = video_frame->visible_rect().size(); |
| 550 for (int i = 0; i < frame_size.height(); ++i) { |
| 536 uint32_t* rgba = reinterpret_cast<uint32_t*>(out); | 551 uint32_t* rgba = reinterpret_cast<uint32_t*>(out); |
| 537 const uint8_t* row_end = row_head + video_frame->visible_rect().width() * 2; | 552 const uint8_t* row_end = row_head + frame_size.width() * 2; |
| 538 for (const uint8_t* row = row_head; row < row_end; ++row) { | 553 for (const uint8_t* row = row_head; row < row_end; ++row) { |
| 539 uint32_t gray_value = *++row; | 554 uint32_t gray_value = *++row; |
| 540 *rgba++ = SkColorSetRGB(gray_value, gray_value, gray_value); | 555 *rgba++ = SkColorSetRGB(gray_value, gray_value, gray_value); |
| 541 } | 556 } |
| 542 out += argb_row_bytes; | 557 out += argb_row_bytes; |
| 543 row_head += stride; | 558 row_head += stride; |
| 544 } | 559 } |
| 545 } | 560 } |
| 546 | 561 |
| 547 } // anonymous namespace | 562 } // namespace |
| 548 | 563 |
| 549 // static | 564 // static |
| 550 void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( | 565 void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( |
| 551 const VideoFrame* video_frame, | 566 const VideoFrame* video_frame, |
| 567 ConvertingSize size_type, |
| 552 void* rgb_pixels, | 568 void* rgb_pixels, |
| 553 size_t row_bytes) { | 569 size_t row_bytes) { |
| 554 if (!video_frame->IsMappable()) { | 570 if (!video_frame->IsMappable()) { |
| 555 NOTREACHED() << "Cannot extract pixels from non-CPU frame formats."; | 571 NOTREACHED() << "Cannot extract pixels from non-CPU frame formats."; |
| 556 return; | 572 return; |
| 557 } | 573 } |
| 558 | 574 |
| 575 gfx::Size frame_size = video_frame->coded_size(); |
| 576 if (size_type == SkCanvasVideoRenderer::ConvertingSize::VISUAL) |
| 577 frame_size = video_frame->visible_rect().size(); |
| 578 |
| 559 switch (video_frame->format()) { | 579 switch (video_frame->format()) { |
| 560 case PIXEL_FORMAT_YV12: | 580 case PIXEL_FORMAT_YV12: |
| 561 case PIXEL_FORMAT_I420: | 581 case PIXEL_FORMAT_I420: |
| 562 if (CheckColorSpace(video_frame, COLOR_SPACE_JPEG)) { | 582 if (CheckColorSpace(video_frame, COLOR_SPACE_JPEG)) { |
| 563 LIBYUV_J420_TO_ARGB(video_frame->visible_data(VideoFrame::kYPlane), | 583 LIBYUV_J420_TO_ARGB( |
| 564 video_frame->stride(VideoFrame::kYPlane), | 584 FrameData(video_frame, size_type, VideoFrame::kYPlane), |
| 565 video_frame->visible_data(VideoFrame::kUPlane), | 585 video_frame->stride(VideoFrame::kYPlane), |
| 566 video_frame->stride(VideoFrame::kUPlane), | 586 FrameData(video_frame, size_type, VideoFrame::kUPlane), |
| 567 video_frame->visible_data(VideoFrame::kVPlane), | 587 video_frame->stride(VideoFrame::kUPlane), |
| 568 video_frame->stride(VideoFrame::kVPlane), | 588 FrameData(video_frame, size_type, VideoFrame::kVPlane), |
| 569 static_cast<uint8_t*>(rgb_pixels), row_bytes, | 589 video_frame->stride(VideoFrame::kVPlane), |
| 570 video_frame->visible_rect().width(), | 590 static_cast<uint8_t*>(rgb_pixels), row_bytes, frame_size.width(), |
| 571 video_frame->visible_rect().height()); | 591 frame_size.height()); |
| 572 } else if (CheckColorSpace(video_frame, COLOR_SPACE_HD_REC709)) { | 592 } else if (CheckColorSpace(video_frame, COLOR_SPACE_HD_REC709)) { |
| 573 LIBYUV_H420_TO_ARGB(video_frame->visible_data(VideoFrame::kYPlane), | 593 LIBYUV_H420_TO_ARGB( |
| 574 video_frame->stride(VideoFrame::kYPlane), | 594 FrameData(video_frame, size_type, VideoFrame::kYPlane), |
| 575 video_frame->visible_data(VideoFrame::kUPlane), | 595 video_frame->stride(VideoFrame::kYPlane), |
| 576 video_frame->stride(VideoFrame::kUPlane), | 596 FrameData(video_frame, size_type, VideoFrame::kUPlane), |
| 577 video_frame->visible_data(VideoFrame::kVPlane), | 597 video_frame->stride(VideoFrame::kUPlane), |
| 578 video_frame->stride(VideoFrame::kVPlane), | 598 FrameData(video_frame, size_type, VideoFrame::kVPlane), |
| 579 static_cast<uint8_t*>(rgb_pixels), row_bytes, | 599 video_frame->stride(VideoFrame::kVPlane), |
| 580 video_frame->visible_rect().width(), | 600 static_cast<uint8_t*>(rgb_pixels), row_bytes, frame_size.width(), |
| 581 video_frame->visible_rect().height()); | 601 frame_size.height()); |
| 582 } else { | 602 } else { |
| 583 LIBYUV_I420_TO_ARGB(video_frame->visible_data(VideoFrame::kYPlane), | 603 LIBYUV_I420_TO_ARGB( |
| 584 video_frame->stride(VideoFrame::kYPlane), | 604 FrameData(video_frame, size_type, VideoFrame::kYPlane), |
| 585 video_frame->visible_data(VideoFrame::kUPlane), | 605 video_frame->stride(VideoFrame::kYPlane), |
| 586 video_frame->stride(VideoFrame::kUPlane), | 606 FrameData(video_frame, size_type, VideoFrame::kUPlane), |
| 587 video_frame->visible_data(VideoFrame::kVPlane), | 607 video_frame->stride(VideoFrame::kUPlane), |
| 588 video_frame->stride(VideoFrame::kVPlane), | 608 FrameData(video_frame, size_type, VideoFrame::kVPlane), |
| 589 static_cast<uint8_t*>(rgb_pixels), row_bytes, | 609 video_frame->stride(VideoFrame::kVPlane), |
| 590 video_frame->visible_rect().width(), | 610 static_cast<uint8_t*>(rgb_pixels), row_bytes, frame_size.width(), |
| 591 video_frame->visible_rect().height()); | 611 frame_size.height()); |
| 592 } | 612 } |
| 593 break; | 613 break; |
| 594 case PIXEL_FORMAT_YV16: | 614 case PIXEL_FORMAT_YV16: |
| 595 LIBYUV_I422_TO_ARGB(video_frame->visible_data(VideoFrame::kYPlane), | 615 LIBYUV_I422_TO_ARGB( |
| 596 video_frame->stride(VideoFrame::kYPlane), | 616 FrameData(video_frame, size_type, VideoFrame::kYPlane), |
| 597 video_frame->visible_data(VideoFrame::kUPlane), | 617 video_frame->stride(VideoFrame::kYPlane), |
| 598 video_frame->stride(VideoFrame::kUPlane), | 618 FrameData(video_frame, size_type, VideoFrame::kUPlane), |
| 599 video_frame->visible_data(VideoFrame::kVPlane), | 619 video_frame->stride(VideoFrame::kUPlane), |
| 600 video_frame->stride(VideoFrame::kVPlane), | 620 FrameData(video_frame, size_type, VideoFrame::kVPlane), |
| 601 static_cast<uint8_t*>(rgb_pixels), row_bytes, | 621 video_frame->stride(VideoFrame::kVPlane), |
| 602 video_frame->visible_rect().width(), | 622 static_cast<uint8_t*>(rgb_pixels), row_bytes, frame_size.width(), |
| 603 video_frame->visible_rect().height()); | 623 frame_size.height()); |
| 604 break; | 624 break; |
| 605 | 625 |
| 606 case PIXEL_FORMAT_YV12A: | 626 case PIXEL_FORMAT_YV12A: |
| 607 LIBYUV_I420ALPHA_TO_ARGB( | 627 LIBYUV_I420ALPHA_TO_ARGB( |
| 608 video_frame->visible_data(VideoFrame::kYPlane), | 628 FrameData(video_frame, size_type, VideoFrame::kYPlane), |
| 609 video_frame->stride(VideoFrame::kYPlane), | 629 video_frame->stride(VideoFrame::kYPlane), |
| 610 video_frame->visible_data(VideoFrame::kUPlane), | 630 FrameData(video_frame, size_type, VideoFrame::kUPlane), |
| 611 video_frame->stride(VideoFrame::kUPlane), | 631 video_frame->stride(VideoFrame::kUPlane), |
| 612 video_frame->visible_data(VideoFrame::kVPlane), | 632 FrameData(video_frame, size_type, VideoFrame::kVPlane), |
| 613 video_frame->stride(VideoFrame::kVPlane), | 633 video_frame->stride(VideoFrame::kVPlane), |
| 614 video_frame->visible_data(VideoFrame::kAPlane), | 634 FrameData(video_frame, size_type, VideoFrame::kAPlane), |
| 615 video_frame->stride(VideoFrame::kAPlane), | 635 video_frame->stride(VideoFrame::kAPlane), |
| 616 static_cast<uint8_t*>(rgb_pixels), row_bytes, | 636 static_cast<uint8_t*>(rgb_pixels), row_bytes, frame_size.width(), |
| 617 video_frame->visible_rect().width(), | 637 frame_size.height(), |
| 618 video_frame->visible_rect().height(), | |
| 619 1); // 1 = enable RGB premultiplication by Alpha. | 638 1); // 1 = enable RGB premultiplication by Alpha. |
| 620 break; | 639 break; |
| 621 | 640 |
| 622 case PIXEL_FORMAT_YV24: | 641 case PIXEL_FORMAT_YV24: |
| 623 LIBYUV_I444_TO_ARGB(video_frame->visible_data(VideoFrame::kYPlane), | 642 LIBYUV_I444_TO_ARGB( |
| 624 video_frame->stride(VideoFrame::kYPlane), | 643 FrameData(video_frame, size_type, VideoFrame::kYPlane), |
| 625 video_frame->visible_data(VideoFrame::kUPlane), | 644 video_frame->stride(VideoFrame::kYPlane), |
| 626 video_frame->stride(VideoFrame::kUPlane), | 645 FrameData(video_frame, size_type, VideoFrame::kUPlane), |
| 627 video_frame->visible_data(VideoFrame::kVPlane), | 646 video_frame->stride(VideoFrame::kUPlane), |
| 628 video_frame->stride(VideoFrame::kVPlane), | 647 FrameData(video_frame, size_type, VideoFrame::kVPlane), |
| 629 static_cast<uint8_t*>(rgb_pixels), row_bytes, | 648 video_frame->stride(VideoFrame::kVPlane), |
| 630 video_frame->visible_rect().width(), | 649 static_cast<uint8_t*>(rgb_pixels), row_bytes, frame_size.width(), |
| 631 video_frame->visible_rect().height()); | 650 frame_size.height()); |
| 632 break; | 651 break; |
| 633 | 652 |
| 634 case PIXEL_FORMAT_YUV420P9: | 653 case PIXEL_FORMAT_YUV420P9: |
| 635 case PIXEL_FORMAT_YUV422P9: | 654 case PIXEL_FORMAT_YUV422P9: |
| 636 case PIXEL_FORMAT_YUV444P9: | 655 case PIXEL_FORMAT_YUV444P9: |
| 637 case PIXEL_FORMAT_YUV420P10: | 656 case PIXEL_FORMAT_YUV420P10: |
| 638 case PIXEL_FORMAT_YUV422P10: | 657 case PIXEL_FORMAT_YUV422P10: |
| 639 case PIXEL_FORMAT_YUV444P10: | 658 case PIXEL_FORMAT_YUV444P10: |
| 640 case PIXEL_FORMAT_YUV420P12: | 659 case PIXEL_FORMAT_YUV420P12: |
| 641 case PIXEL_FORMAT_YUV422P12: | 660 case PIXEL_FORMAT_YUV422P12: |
| 642 case PIXEL_FORMAT_YUV444P12: { | 661 case PIXEL_FORMAT_YUV444P12: { |
| 643 scoped_refptr<VideoFrame> temporary_frame = | 662 scoped_refptr<VideoFrame> temporary_frame = |
| 644 DownShiftHighbitVideoFrame(video_frame); | 663 DownShiftHighbitVideoFrame(video_frame); |
| 645 ConvertVideoFrameToRGBPixels(temporary_frame.get(), rgb_pixels, | 664 ConvertVideoFrameToRGBPixels(temporary_frame.get(), size_type, rgb_pixels, |
| 646 row_bytes); | 665 row_bytes); |
| 647 break; | 666 break; |
| 648 } | 667 } |
| 649 | 668 |
| 650 case PIXEL_FORMAT_Y16: | 669 case PIXEL_FORMAT_Y16: |
| 651 ConvertY16ToARGB(video_frame, rgb_pixels, row_bytes); | 670 ConvertY16ToARGB(video_frame, size_type, rgb_pixels, row_bytes); |
| 652 break; | 671 break; |
| 653 | 672 |
| 654 case PIXEL_FORMAT_NV12: | 673 case PIXEL_FORMAT_NV12: |
| 655 case PIXEL_FORMAT_NV21: | 674 case PIXEL_FORMAT_NV21: |
| 656 case PIXEL_FORMAT_UYVY: | 675 case PIXEL_FORMAT_UYVY: |
| 657 case PIXEL_FORMAT_YUY2: | 676 case PIXEL_FORMAT_YUY2: |
| 658 case PIXEL_FORMAT_ARGB: | 677 case PIXEL_FORMAT_ARGB: |
| 659 case PIXEL_FORMAT_XRGB: | 678 case PIXEL_FORMAT_XRGB: |
| 660 case PIXEL_FORMAT_RGB24: | 679 case PIXEL_FORMAT_RGB24: |
| 661 case PIXEL_FORMAT_RGB32: | 680 case PIXEL_FORMAT_RGB32: |
| (...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 822 last_image_->bounds().contains(visible_rect)) { | 841 last_image_->bounds().contains(visible_rect)) { |
| 823 last_image_ = last_image_->makeSubset(visible_rect); | 842 last_image_ = last_image_->makeSubset(visible_rect); |
| 824 } | 843 } |
| 825 } | 844 } |
| 826 | 845 |
| 827 SkISize SkCanvasVideoRenderer::LastImageDimensionsForTesting() { | 846 SkISize SkCanvasVideoRenderer::LastImageDimensionsForTesting() { |
| 828 return last_image_dimensions_for_testing_; | 847 return last_image_dimensions_for_testing_; |
| 829 } | 848 } |
| 830 | 849 |
| 831 } // namespace media | 850 } // namespace media |
| OLD | NEW |