OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/renderers/skcanvas_video_renderer.h" | 5 #include "media/renderers/skcanvas_video_renderer.h" |
6 | 6 |
| 7 #include <GLES3/gl3.h> |
7 #include <limits> | 8 #include <limits> |
8 | 9 |
9 #include "base/macros.h" | 10 #include "base/macros.h" |
10 #include "gpu/GLES2/gl2extchromium.h" | 11 #include "gpu/GLES2/gl2extchromium.h" |
11 #include "gpu/command_buffer/client/gles2_interface.h" | 12 #include "gpu/command_buffer/client/gles2_interface.h" |
12 #include "gpu/command_buffer/common/mailbox_holder.h" | 13 #include "gpu/command_buffer/common/mailbox_holder.h" |
| 14 #include "media/base/data_buffer.h" |
13 #include "media/base/video_frame.h" | 15 #include "media/base/video_frame.h" |
14 #include "media/base/yuv_convert.h" | 16 #include "media/base/yuv_convert.h" |
15 #include "skia/ext/texture_handle.h" | 17 #include "skia/ext/texture_handle.h" |
16 #include "third_party/libyuv/include/libyuv.h" | 18 #include "third_party/libyuv/include/libyuv.h" |
17 #include "third_party/skia/include/core/SkCanvas.h" | 19 #include "third_party/skia/include/core/SkCanvas.h" |
18 #include "third_party/skia/include/core/SkImage.h" | 20 #include "third_party/skia/include/core/SkImage.h" |
19 #include "third_party/skia/include/core/SkImageGenerator.h" | 21 #include "third_party/skia/include/core/SkImageGenerator.h" |
20 #include "third_party/skia/include/gpu/GrContext.h" | 22 #include "third_party/skia/include/gpu/GrContext.h" |
21 #include "third_party/skia/include/gpu/GrPaint.h" | 23 #include "third_party/skia/include/gpu/GrPaint.h" |
22 #include "third_party/skia/include/gpu/GrTexture.h" | 24 #include "third_party/skia/include/gpu/GrTexture.h" |
(...skipping 317 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
340 return; | 342 return; |
341 } | 343 } |
342 | 344 |
343 SkRect dest; | 345 SkRect dest; |
344 dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom()); | 346 dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom()); |
345 | 347 |
346 // Paint black rectangle if there isn't a frame available or the | 348 // Paint black rectangle if there isn't a frame available or the |
347 // frame has an unexpected format. | 349 // frame has an unexpected format. |
348 if (!video_frame.get() || video_frame->natural_size().IsEmpty() || | 350 if (!video_frame.get() || video_frame->natural_size().IsEmpty() || |
349 !(media::IsYuvPlanar(video_frame->format()) || | 351 !(media::IsYuvPlanar(video_frame->format()) || |
| 352 video_frame->format() == media::PIXEL_FORMAT_Y16 || |
350 video_frame->HasTextures())) { | 353 video_frame->HasTextures())) { |
351 SkPaint blackWithAlphaPaint; | 354 SkPaint blackWithAlphaPaint; |
352 blackWithAlphaPaint.setAlpha(paint.getAlpha()); | 355 blackWithAlphaPaint.setAlpha(paint.getAlpha()); |
353 canvas->drawRect(dest, blackWithAlphaPaint); | 356 canvas->drawRect(dest, blackWithAlphaPaint); |
354 canvas->flush(); | 357 canvas->flush(); |
355 return; | 358 return; |
356 } | 359 } |
357 | 360 |
358 gpu::gles2::GLES2Interface* gl = context_3d.gl; | 361 gpu::gles2::GLES2Interface* gl = context_3d.gl; |
359 if (!UpdateLastImage(video_frame, context_3d)) | 362 if (!UpdateLastImage(video_frame, context_3d)) |
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
513 for (int row = 0; row < video_frame->rows(plane); row++) { | 516 for (int row = 0; row < video_frame->rows(plane); row++) { |
514 for (int x = 0; x < width; x++) { | 517 for (int x = 0; x < width; x++) { |
515 dst[x] = src[x] >> shift; | 518 dst[x] = src[x] >> shift; |
516 } | 519 } |
517 src += video_frame->stride(plane) / 2; | 520 src += video_frame->stride(plane) / 2; |
518 dst += ret->stride(plane); | 521 dst += ret->stride(plane); |
519 } | 522 } |
520 } | 523 } |
521 return ret; | 524 return ret; |
522 } | 525 } |
| 526 |
| 527 void ConvertY16ToARGB(const VideoFrame* video_frame, |
| 528 void* argb_pixels, |
| 529 size_t argb_row_bytes) { |
| 530 const uint8_t* source = |
| 531 reinterpret_cast<const uint8_t*>(video_frame->visible_data(0)); |
| 532 uint8_t* out = reinterpret_cast<uint8_t*>(argb_pixels); |
| 533 const size_t stride = video_frame->stride(0); |
| 534 for (int i = 0; i < video_frame->visible_rect().height(); ++i) { |
| 535 const uint8_t* row = source; |
| 536 uint32_t* rgba = reinterpret_cast<uint32_t*>(out); |
| 537 for (const uint8_t* row_end = row + video_frame->row_bytes(0); |
| 538 row < row_end; ++row) { |
| 539 // We loose the precision here and take only upper 8 bits of 16 bit data. |
| 540 // It is important not to render Y16 as RG_88. To get the full precision |
| 541 // use float textures with WebGL1 and e.g. R16UI or R32F textures with |
| 542 // WebGL2. |
| 543 uint32_t green = *++row; |
| 544 *rgba++ = SkColorSetARGB(0xFF, green, green, green); |
| 545 } |
| 546 out += argb_row_bytes; |
| 547 source += stride; |
| 548 } |
| 549 } |
| 550 |
| 551 void FlipAndConvertY16(const uint8_t* input, |
| 552 uint8_t* output, |
| 553 unsigned format, |
| 554 unsigned type, |
| 555 bool flip_y, |
| 556 size_t row_bytes, |
| 557 size_t stride, |
| 558 size_t output_row_bytes, |
| 559 size_t height) { |
| 560 DCHECK(input != output); |
| 561 for (size_t i = 0; i < height; ++i) { |
| 562 const uint16_t* in = reinterpret_cast<const uint16_t*>(input + i * stride); |
| 563 uint8_t* out = flip_y ? output + output_row_bytes * (height - i - 1) |
| 564 : output + output_row_bytes * i; |
| 565 if (type == GL_FLOAT) { |
| 566 float* out_row = reinterpret_cast<float*>(out); |
| 567 const uint16_t* in_end = in + row_bytes / 2; |
| 568 if (format == GL_RGBA) { |
| 569 while (in < in_end) { |
| 570 float red = *in++ / 65536.f; |
| 571 *out_row++ = red; |
| 572 *out_row++ = red; |
| 573 *out_row++ = red; |
| 574 *out_row++ = 1.0f; |
| 575 } |
| 576 } else if (format == GL_RGB) { |
| 577 while (in < in_end) { |
| 578 float red = *in++ / 65536.f; |
| 579 *out_row++ = red; |
| 580 *out_row++ = red; |
| 581 *out_row++ = red; |
| 582 } |
| 583 } else if (type == GL_RED) { |
| 584 while (in < in_end) |
| 585 *out_row++ = *in++ / 65536.f; |
| 586 } else { |
| 587 NOTREACHED(); |
| 588 } |
| 589 } else if ((format == GL_RG && type == GL_UNSIGNED_BYTE) || |
| 590 (format == GL_RED_INTEGER && type == GL_UNSIGNED_SHORT)) { |
| 591 memcpy(out, input + i * stride, row_bytes); |
| 592 } else { |
| 593 NOTREACHED(); |
| 594 } |
| 595 } |
| 596 } |
523 } | 597 } |
524 | 598 |
525 // static | 599 // static |
526 void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( | 600 void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( |
527 const VideoFrame* video_frame, | 601 const VideoFrame* video_frame, |
528 void* rgb_pixels, | 602 void* rgb_pixels, |
529 size_t row_bytes) { | 603 size_t row_bytes) { |
530 if (!video_frame->IsMappable()) { | 604 if (!video_frame->IsMappable()) { |
531 NOTREACHED() << "Cannot extract pixels from non-CPU frame formats."; | 605 NOTREACHED() << "Cannot extract pixels from non-CPU frame formats."; |
532 return; | 606 return; |
533 } | 607 } |
534 if (!media::IsYuvPlanar(video_frame->format())) { | |
535 NOTREACHED() << "Non YUV formats are not supported"; | |
536 return; | |
537 } | |
538 | 608 |
539 switch (video_frame->format()) { | 609 switch (video_frame->format()) { |
540 case PIXEL_FORMAT_YV12: | 610 case PIXEL_FORMAT_YV12: |
541 case PIXEL_FORMAT_I420: | 611 case PIXEL_FORMAT_I420: |
542 if (CheckColorSpace(video_frame, COLOR_SPACE_JPEG)) { | 612 if (CheckColorSpace(video_frame, COLOR_SPACE_JPEG)) { |
543 LIBYUV_J420_TO_ARGB(video_frame->visible_data(VideoFrame::kYPlane), | 613 LIBYUV_J420_TO_ARGB(video_frame->visible_data(VideoFrame::kYPlane), |
544 video_frame->stride(VideoFrame::kYPlane), | 614 video_frame->stride(VideoFrame::kYPlane), |
545 video_frame->visible_data(VideoFrame::kUPlane), | 615 video_frame->visible_data(VideoFrame::kUPlane), |
546 video_frame->stride(VideoFrame::kUPlane), | 616 video_frame->stride(VideoFrame::kUPlane), |
547 video_frame->visible_data(VideoFrame::kVPlane), | 617 video_frame->visible_data(VideoFrame::kVPlane), |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
620 case PIXEL_FORMAT_YUV420P12: | 690 case PIXEL_FORMAT_YUV420P12: |
621 case PIXEL_FORMAT_YUV422P12: | 691 case PIXEL_FORMAT_YUV422P12: |
622 case PIXEL_FORMAT_YUV444P12: { | 692 case PIXEL_FORMAT_YUV444P12: { |
623 scoped_refptr<VideoFrame> temporary_frame = | 693 scoped_refptr<VideoFrame> temporary_frame = |
624 DownShiftHighbitVideoFrame(video_frame); | 694 DownShiftHighbitVideoFrame(video_frame); |
625 ConvertVideoFrameToRGBPixels(temporary_frame.get(), rgb_pixels, | 695 ConvertVideoFrameToRGBPixels(temporary_frame.get(), rgb_pixels, |
626 row_bytes); | 696 row_bytes); |
627 break; | 697 break; |
628 } | 698 } |
629 | 699 |
| 700 case PIXEL_FORMAT_Y16: |
| 701 ConvertY16ToARGB(video_frame, rgb_pixels, row_bytes); |
| 702 break; |
| 703 |
630 case PIXEL_FORMAT_NV12: | 704 case PIXEL_FORMAT_NV12: |
631 case PIXEL_FORMAT_NV21: | 705 case PIXEL_FORMAT_NV21: |
632 case PIXEL_FORMAT_UYVY: | 706 case PIXEL_FORMAT_UYVY: |
633 case PIXEL_FORMAT_YUY2: | 707 case PIXEL_FORMAT_YUY2: |
634 case PIXEL_FORMAT_ARGB: | 708 case PIXEL_FORMAT_ARGB: |
635 case PIXEL_FORMAT_XRGB: | 709 case PIXEL_FORMAT_XRGB: |
636 case PIXEL_FORMAT_RGB24: | 710 case PIXEL_FORMAT_RGB24: |
637 case PIXEL_FORMAT_RGB32: | 711 case PIXEL_FORMAT_RGB32: |
638 case PIXEL_FORMAT_MJPEG: | 712 case PIXEL_FORMAT_MJPEG: |
639 case PIXEL_FORMAT_MT21: | 713 case PIXEL_FORMAT_MT21: |
640 // TODO(dshwang): Use either I400ToARGB or J400ToARGB depending if we want | |
641 // BT.601 constrained range of 16 to 240, or JPEG full range BT.601 | |
642 // coefficients. Implement it when Y8/16 foramt is supported. | |
643 // crbug.com/624436 | |
644 case PIXEL_FORMAT_Y8: | 714 case PIXEL_FORMAT_Y8: |
645 case PIXEL_FORMAT_Y16: | |
646 case PIXEL_FORMAT_UNKNOWN: | 715 case PIXEL_FORMAT_UNKNOWN: |
647 NOTREACHED(); | 716 NOTREACHED() << "Only YUV formats and Y16 are supported."; |
648 } | 717 } |
649 } | 718 } |
650 | 719 |
651 // static | 720 // static |
652 void SkCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture( | 721 void SkCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture( |
653 gpu::gles2::GLES2Interface* gl, | 722 gpu::gles2::GLES2Interface* gl, |
654 VideoFrame* video_frame, | 723 VideoFrame* video_frame, |
655 unsigned int texture, | 724 unsigned int texture, |
656 unsigned int internal_format, | 725 unsigned int internal_format, |
657 unsigned int type, | 726 unsigned int type, |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
742 dest_sync_token.GetData()); | 811 dest_sync_token.GetData()); |
743 canvas_gl->WaitSyncTokenCHROMIUM(dest_sync_token.GetConstData()); | 812 canvas_gl->WaitSyncTokenCHROMIUM(dest_sync_token.GetConstData()); |
744 | 813 |
745 SyncTokenClientImpl client(canvas_gl); | 814 SyncTokenClientImpl client(canvas_gl); |
746 video_frame->UpdateReleaseSyncToken(&client); | 815 video_frame->UpdateReleaseSyncToken(&client); |
747 } else { | 816 } else { |
748 CopyVideoFrameSingleTextureToGLTexture(destination_gl, video_frame.get(), | 817 CopyVideoFrameSingleTextureToGLTexture(destination_gl, video_frame.get(), |
749 texture, internal_format, type, | 818 texture, internal_format, type, |
750 premultiply_alpha, flip_y); | 819 premultiply_alpha, flip_y); |
751 } | 820 } |
752 | |
753 return true; | 821 return true; |
754 } | 822 } |
755 | 823 |
| 824 bool SkCanvasVideoRenderer::TexImageImpl(const char* functionID, |
| 825 unsigned target, |
| 826 gpu::gles2::GLES2Interface* gl, |
| 827 VideoFrame* frame, |
| 828 int level, |
| 829 int internalformat, |
| 830 unsigned format, |
| 831 unsigned type, |
| 832 int xoffset, |
| 833 int yoffset, |
| 834 int zoffset, |
| 835 bool flip_y, |
| 836 bool premultiplyAlpha) { |
| 837 DCHECK(frame); |
| 838 DCHECK(!frame->HasTextures()); |
| 839 |
| 840 bool has_alpha = false; |
| 841 unsigned output_bytes_per_pixel; |
| 842 switch (frame->format()) { |
| 843 case PIXEL_FORMAT_Y16: |
| 844 // Allow reinterpreting RG8 buffer here as R component in FLOAT. |
| 845 switch (format) { |
| 846 case GL_RGBA: |
| 847 if (type == GL_FLOAT) { |
| 848 output_bytes_per_pixel = 4 * sizeof(GLfloat); |
| 849 break; |
| 850 } |
| 851 // Pass through. |
| 852 case GL_RGB: |
| 853 if (type == GL_FLOAT) { |
| 854 output_bytes_per_pixel = 3 * sizeof(GLfloat); |
| 855 break; |
| 856 } |
| 857 // Pass through. |
| 858 default: |
| 859 return false; |
| 860 } |
| 861 break; |
| 862 default: |
| 863 return false; |
| 864 } |
| 865 unsigned source_bytes_per_pixel = |
| 866 VideoFrame::PlaneBitsPerPixel(frame->format(), 0) / 8; |
| 867 DCHECK_EQ(VideoFrame::PlaneBitsPerPixel(frame->format(), 0) % 8, 0); |
| 868 |
| 869 if (has_alpha && premultiplyAlpha) { |
| 870 NOTREACHED() << "Premultiply alpha is not supported."; |
| 871 return false; |
| 872 } |
| 873 if (xoffset || yoffset || zoffset) { |
| 874 NOTREACHED() << "Offsets are not supported."; |
| 875 return false; |
| 876 } |
| 877 |
| 878 uint8_t* data; |
| 879 scoped_refptr<DataBuffer> temp_buffer; |
| 880 size_t width = frame->visible_rect().width(); |
| 881 size_t height = frame->visible_rect().height(); |
| 882 size_t output_row_bytes = |
| 883 frame->row_bytes(0) * output_bytes_per_pixel / source_bytes_per_pixel; |
| 884 temp_buffer = new DataBuffer(output_row_bytes * height); |
| 885 data = temp_buffer->writable_data(); |
| 886 DCHECK_EQ(frame->format(), PIXEL_FORMAT_Y16); |
| 887 FlipAndConvertY16(frame->visible_data(0), data, format, type, flip_y, |
| 888 frame->row_bytes(0), frame->stride(0), output_row_bytes, |
| 889 height); |
| 890 |
| 891 if (!strcmp(functionID, "texImage2D")) { |
| 892 gl->TexImage2D(target, level, internalformat, width, height, 0, format, |
| 893 type, data); |
| 894 } else if (!strcmp(functionID, "texSubImage2D")) { |
| 895 gl->TexSubImage2D(target, level, xoffset, yoffset, width, height, format, |
| 896 type, data); |
| 897 } else { |
| 898 DCHECK(!strcmp(functionID, "texSubImage3D")); |
| 899 gl->TexSubImage3D(target, level, xoffset, yoffset, zoffset, width, height, |
| 900 1, format, type, data); |
| 901 } |
| 902 return true; |
| 903 } |
| 904 |
756 void SkCanvasVideoRenderer::ResetCache() { | 905 void SkCanvasVideoRenderer::ResetCache() { |
757 DCHECK(thread_checker_.CalledOnValidThread()); | 906 DCHECK(thread_checker_.CalledOnValidThread()); |
758 // Clear cached values. | 907 // Clear cached values. |
759 last_image_ = nullptr; | 908 last_image_ = nullptr; |
760 last_timestamp_ = kNoTimestamp; | 909 last_timestamp_ = kNoTimestamp; |
761 } | 910 } |
762 | 911 |
763 bool SkCanvasVideoRenderer::UpdateLastImage( | 912 bool SkCanvasVideoRenderer::UpdateLastImage( |
764 const scoped_refptr<VideoFrame>& video_frame, | 913 const scoped_refptr<VideoFrame>& video_frame, |
765 const Context3D& context_3d) { | 914 const Context3D& context_3d) { |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
803 last_image_->bounds().contains(visible_rect)) { | 952 last_image_->bounds().contains(visible_rect)) { |
804 last_image_ = last_image_->makeSubset(visible_rect); | 953 last_image_ = last_image_->makeSubset(visible_rect); |
805 } | 954 } |
806 } | 955 } |
807 | 956 |
808 SkISize SkCanvasVideoRenderer::LastImageDimensionsForTesting() { | 957 SkISize SkCanvasVideoRenderer::LastImageDimensionsForTesting() { |
809 return last_image_dimensions_for_testing_; | 958 return last_image_dimensions_for_testing_; |
810 } | 959 } |
811 | 960 |
812 } // namespace media | 961 } // namespace media |
OLD | NEW |