Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(88)

Side by Side Diff: cc/resources/video_resource_updater.cc

Issue 2121043002: 16 bpp video stream capture, render and WebGL usage - Realsense R200 & SR300 support. Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: rebase Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "cc/resources/video_resource_updater.h" 5 #include "cc/resources/video_resource_updater.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <stdint.h> 8 #include <stdint.h>
9 9
10 #include <algorithm> 10 #include <algorithm>
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
63 case GL_TEXTURE_EXTERNAL_OES: 63 case GL_TEXTURE_EXTERNAL_OES:
64 case GL_TEXTURE_2D: 64 case GL_TEXTURE_2D:
65 return VideoFrameExternalResources::YUV_RESOURCE; 65 return VideoFrameExternalResources::YUV_RESOURCE;
66 case GL_TEXTURE_RECTANGLE_ARB: 66 case GL_TEXTURE_RECTANGLE_ARB:
67 return VideoFrameExternalResources::RGB_RESOURCE; 67 return VideoFrameExternalResources::RGB_RESOURCE;
68 default: 68 default:
69 NOTREACHED(); 69 NOTREACHED();
70 break; 70 break;
71 } 71 }
72 break; 72 break;
73 case media::PIXEL_FORMAT_Y16:
74 return VideoFrameExternalResources::Y_RESOURCE;
75 break;
73 case media::PIXEL_FORMAT_YV12: 76 case media::PIXEL_FORMAT_YV12:
74 case media::PIXEL_FORMAT_YV16: 77 case media::PIXEL_FORMAT_YV16:
75 case media::PIXEL_FORMAT_YV24: 78 case media::PIXEL_FORMAT_YV24:
76 case media::PIXEL_FORMAT_YV12A: 79 case media::PIXEL_FORMAT_YV12A:
77 case media::PIXEL_FORMAT_NV21: 80 case media::PIXEL_FORMAT_NV21:
78 case media::PIXEL_FORMAT_YUY2: 81 case media::PIXEL_FORMAT_YUY2:
79 case media::PIXEL_FORMAT_RGB24: 82 case media::PIXEL_FORMAT_RGB24:
80 case media::PIXEL_FORMAT_RGB32: 83 case media::PIXEL_FORMAT_RGB32:
81 case media::PIXEL_FORMAT_MJPEG: 84 case media::PIXEL_FORMAT_MJPEG:
82 case media::PIXEL_FORMAT_MT21: 85 case media::PIXEL_FORMAT_MT21:
83 case media::PIXEL_FORMAT_YUV420P9: 86 case media::PIXEL_FORMAT_YUV420P9:
84 case media::PIXEL_FORMAT_YUV422P9: 87 case media::PIXEL_FORMAT_YUV422P9:
85 case media::PIXEL_FORMAT_YUV444P9: 88 case media::PIXEL_FORMAT_YUV444P9:
86 case media::PIXEL_FORMAT_YUV420P10: 89 case media::PIXEL_FORMAT_YUV420P10:
87 case media::PIXEL_FORMAT_YUV422P10: 90 case media::PIXEL_FORMAT_YUV422P10:
88 case media::PIXEL_FORMAT_YUV444P10: 91 case media::PIXEL_FORMAT_YUV444P10:
89 case media::PIXEL_FORMAT_YUV420P12: 92 case media::PIXEL_FORMAT_YUV420P12:
90 case media::PIXEL_FORMAT_YUV422P12: 93 case media::PIXEL_FORMAT_YUV422P12:
91 case media::PIXEL_FORMAT_YUV444P12: 94 case media::PIXEL_FORMAT_YUV444P12:
92 case media::PIXEL_FORMAT_Y8: 95 case media::PIXEL_FORMAT_Y8:
93 case media::PIXEL_FORMAT_Y16:
94 case media::PIXEL_FORMAT_UNKNOWN: 96 case media::PIXEL_FORMAT_UNKNOWN:
95 break; 97 break;
96 } 98 }
97 return VideoFrameExternalResources::NONE; 99 return VideoFrameExternalResources::NONE;
98 } 100 }
99 101
100 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient { 102 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient {
101 public: 103 public:
102 SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl, 104 SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl,
103 const gpu::SyncToken& sync_token) 105 const gpu::SyncToken& sync_token)
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after
306 // Maximum value used in |src|. 308 // Maximum value used in |src|.
307 int max_value = (1 << bits_per_channel) - 1; 309 int max_value = (1 << bits_per_channel) - 1;
308 int rows = 1; 310 int rows = 1;
309 libyuv::HalfFloatPlane(src, stride, dst, stride, 1.0f / max_value, num, rows); 311 libyuv::HalfFloatPlane(src, stride, dst, stride, 1.0f / max_value, num, rows);
310 } 312 }
311 313
312 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( 314 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
313 scoped_refptr<media::VideoFrame> video_frame) { 315 scoped_refptr<media::VideoFrame> video_frame) {
314 TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes"); 316 TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes");
315 const media::VideoPixelFormat input_frame_format = video_frame->format(); 317 const media::VideoPixelFormat input_frame_format = video_frame->format();
316
317 // TODO(hubbe): Make this a video frame method. 318 // TODO(hubbe): Make this a video frame method.
318 int bits_per_channel = 0; 319 int bits_per_channel = 0;
319 switch (input_frame_format) { 320 switch (input_frame_format) {
320 case media::PIXEL_FORMAT_UNKNOWN: 321 case media::PIXEL_FORMAT_UNKNOWN:
321 NOTREACHED(); 322 NOTREACHED();
322 // Fall through! 323 // Fall through!
323 case media::PIXEL_FORMAT_I420: 324 case media::PIXEL_FORMAT_I420:
324 case media::PIXEL_FORMAT_YV12: 325 case media::PIXEL_FORMAT_YV12:
325 case media::PIXEL_FORMAT_YV16: 326 case media::PIXEL_FORMAT_YV16:
326 case media::PIXEL_FORMAT_YV12A: 327 case media::PIXEL_FORMAT_YV12A:
(...skipping 24 matching lines...) Expand all
351 case media::PIXEL_FORMAT_YUV420P12: 352 case media::PIXEL_FORMAT_YUV420P12:
352 case media::PIXEL_FORMAT_YUV422P12: 353 case media::PIXEL_FORMAT_YUV422P12:
353 case media::PIXEL_FORMAT_YUV444P12: 354 case media::PIXEL_FORMAT_YUV444P12:
354 bits_per_channel = 12; 355 bits_per_channel = 12;
355 break; 356 break;
356 case media::PIXEL_FORMAT_Y16: 357 case media::PIXEL_FORMAT_Y16:
357 bits_per_channel = 16; 358 bits_per_channel = 16;
358 break; 359 break;
359 } 360 }
360 361
361 // TODO(dshwang): support PIXEL_FORMAT_Y16. crbug.com/624436 362 // Only YUV and Y16 software video frames are supported.
362 DCHECK_NE(bits_per_channel, 16); 363 const bool isYuvPlanar = media::IsYuvPlanar(input_frame_format);
363 364 if (!(isYuvPlanar || input_frame_format == media::PIXEL_FORMAT_Y16)) {
364 // Only YUV software video frames are supported.
365 if (!media::IsYuvPlanar(input_frame_format)) {
366 NOTREACHED() << media::VideoPixelFormatToString(input_frame_format); 365 NOTREACHED() << media::VideoPixelFormatToString(input_frame_format);
367 return VideoFrameExternalResources(); 366 return VideoFrameExternalResources();
368 } 367 }
369 368
370 const bool software_compositor = context_provider_ == NULL; 369 const bool software_compositor = context_provider_ == NULL;
371 370
372 ResourceFormat output_resource_format = 371 ResourceFormat output_resource_format =
373 resource_provider_->YuvResourceFormat(bits_per_channel); 372 (input_frame_format == media::PIXEL_FORMAT_Y16)
373 ? resource_provider_->Y16ResourceFormat()
374 : resource_provider_->YuvResourceFormat(bits_per_channel);
374 375
375 // If GPU compositing is enabled, but the output resource format 376 // If GPU compositing is enabled, but the output resource format
376 // returned by the resource provider is RGBA_8888, then a GPU driver 377 // returned by the resource provider is RGBA_8888, then a GPU driver
377 // bug workaround requires that YUV frames must be converted to RGB 378 // bug workaround requires that YUV frames must be converted to RGB
378 // before texture upload. 379 // before texture upload.
379 bool texture_needs_rgb_conversion = 380 bool texture_needs_rgb_conversion =
380 !software_compositor && 381 !software_compositor &&
381 output_resource_format == ResourceFormat::RGBA_8888; 382 output_resource_format == ResourceFormat::RGBA_8888 &&
383 input_frame_format != media::PIXEL_FORMAT_Y16;
382 size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format); 384 size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format);
383 385
384 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB 386 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB
385 // conversion here. That involves an extra copy of each frame to a bitmap. 387 // conversion here. That involves an extra copy of each frame to a bitmap.
386 // Obviously, this is suboptimal and should be addressed once ubercompositor 388 // Obviously, this is suboptimal and should be addressed once ubercompositor
387 // starts shaping up. 389 // starts shaping up.
388 if (software_compositor || texture_needs_rgb_conversion) { 390 if (software_compositor || texture_needs_rgb_conversion) {
389 output_resource_format = kRGBResourceFormat; 391 output_resource_format = kRGBResourceFormat;
390 output_plane_count = 1; 392 output_plane_count = 1;
391 bits_per_channel = 8; 393 bits_per_channel = 8;
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
480 external_resources.release_callbacks.push_back(base::Bind( 482 external_resources.release_callbacks.push_back(base::Bind(
481 &RecycleResource, AsWeakPtr(), plane_resource.resource_id())); 483 &RecycleResource, AsWeakPtr(), plane_resource.resource_id()));
482 external_resources.type = VideoFrameExternalResources::RGBA_RESOURCE; 484 external_resources.type = VideoFrameExternalResources::RGBA_RESOURCE;
483 } 485 }
484 return external_resources; 486 return external_resources;
485 } 487 }
486 488
487 for (size_t i = 0; i < plane_resources.size(); ++i) { 489 for (size_t i = 0; i < plane_resources.size(); ++i) {
488 PlaneResource& plane_resource = *plane_resources[i]; 490 PlaneResource& plane_resource = *plane_resources[i];
489 // Update each plane's resource id with its content. 491 // Update each plane's resource id with its content.
490 DCHECK_EQ(plane_resource.resource_format(), 492 DCHECK_EQ(plane_resource.resource_format(), output_resource_format);
491 resource_provider_->YuvResourceFormat(bits_per_channel));
492 493
493 if (!plane_resource.Matches(video_frame->unique_id(), i)) { 494 if (!plane_resource.Matches(video_frame->unique_id(), i)) {
494 // TODO(hubbe): Move all conversion (and upload?) code to media/. 495 // TODO(hubbe): Move all conversion (and upload?) code to media/.
495 // We need to transfer data from |video_frame| to the plane resource. 496 // We need to transfer data from |video_frame| to the plane resource.
496 // TODO(reveman): Can use GpuMemoryBuffers here to improve performance. 497 // TODO(reveman): Can use GpuMemoryBuffers here to improve performance.
497 498
498 // The |resource_size_pixels| is the size of the resource we want to 499 // The |resource_size_pixels| is the size of the resource we want to
499 // upload to. 500 // upload to.
500 gfx::Size resource_size_pixels = plane_resource.resource_size(); 501 gfx::Size resource_size_pixels = plane_resource.resource_size();
501 // The |video_stride_bytes| is the width of the video frame we are 502 // The |video_stride_bytes| is the width of the video frame we are
502 // uploading (including non-frame data to fill in the stride). 503 // uploading (including non-frame data to fill in the stride).
503 int video_stride_bytes = video_frame->stride(i); 504 int video_stride_bytes = video_frame->stride(i);
504 505
505 size_t bytes_per_row = ResourceUtil::CheckedWidthInBytes<size_t>( 506 size_t bytes_per_row = ResourceUtil::CheckedWidthInBytes<size_t>(
506 resource_size_pixels.width(), plane_resource.resource_format()); 507 resource_size_pixels.width(), plane_resource.resource_format());
507 // Use 4-byte row alignment (OpenGL default) for upload performance. 508 // Use 4-byte row alignment (OpenGL default) for upload performance.
508 // Assuming that GL_UNPACK_ALIGNMENT has not changed from default. 509 // Assuming that GL_UNPACK_ALIGNMENT has not changed from default.
509 size_t upload_image_stride = 510 size_t upload_image_stride =
510 MathUtil::CheckedRoundUp<size_t>(bytes_per_row, 4u); 511 MathUtil::CheckedRoundUp<size_t>(bytes_per_row, 4u);
511 512
512 bool needs_conversion = false; 513 bool needs_conversion = false;
513 int shift = 0; 514 int shift = 0;
514 515
515 // LUMINANCE_F16 uses half-floats, so we always need a conversion step. 516 // LUMINANCE_F16 uses half-floats, so we always need a conversion step.
516 if (plane_resource.resource_format() == LUMINANCE_F16) { 517 if (plane_resource.resource_format() == LUMINANCE_F16) {
517 needs_conversion = true; 518 needs_conversion = true;
519 } else if (input_frame_format == media::PIXEL_FORMAT_Y16) {
520 if (plane_resource.resource_format() == RGBA_8888)
521 needs_conversion = true;
518 } else if (bits_per_channel > 8) { 522 } else if (bits_per_channel > 8) {
519 // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to 523 // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to
520 // shift the data down and create an 8-bit texture. 524 // shift the data down and create an 8-bit texture.
521 needs_conversion = true; 525 needs_conversion = true;
522 shift = bits_per_channel - 8; 526 shift = bits_per_channel - 8;
523 } 527 }
524 const uint8_t* pixels; 528 const uint8_t* pixels;
525 if (static_cast<int>(upload_image_stride) == video_stride_bytes && 529 if (static_cast<int>(upload_image_stride) == video_stride_bytes &&
526 !needs_conversion) { 530 !needs_conversion) {
527 pixels = video_frame->data(i); 531 pixels = video_frame->data(i);
(...skipping 22 matching lines...) Expand all
550 MakeHalfFloats(src, bits_per_channel, bytes_per_row / 2, dst); 554 MakeHalfFloats(src, bits_per_channel, bytes_per_row / 2, dst);
551 } 555 }
552 } else if (shift != 0) { 556 } else if (shift != 0) {
553 // We have more-than-8-bit input which we need to shift 557 // We have more-than-8-bit input which we need to shift
554 // down to fit it into an 8-bit texture. 558 // down to fit it into an 8-bit texture.
555 uint8_t* dst = &upload_pixels_[upload_image_stride * row]; 559 uint8_t* dst = &upload_pixels_[upload_image_stride * row];
556 const uint16_t* src = reinterpret_cast<uint16_t*>( 560 const uint16_t* src = reinterpret_cast<uint16_t*>(
557 video_frame->data(i) + (video_stride_bytes * row)); 561 video_frame->data(i) + (video_stride_bytes * row));
558 for (size_t i = 0; i < bytes_per_row; i++) 562 for (size_t i = 0; i < bytes_per_row; i++)
559 dst[i] = src[i] >> shift; 563 dst[i] = src[i] >> shift;
564 } else if (input_frame_format == media::PIXEL_FORMAT_Y16 &&
565 plane_resource.resource_format() == RGBA_8888) {
566 uint32_t* dst = reinterpret_cast<uint32_t*>(
567 &upload_pixels_[upload_image_stride * row]);
568 const uint16_t* src = reinterpret_cast<uint16_t*>(
569 video_frame->data(i) + (video_stride_bytes * row));
570 for (size_t i = 0; i < bytes_per_row / 4; ++i)
571 *dst++ = *src++;
560 } else { 572 } else {
561 // Input and output are the same size and format, but 573 // Input and output are the same size and format, but
562 // differ in stride, copy one row at a time. 574 // differ in stride, copy one row at a time.
563 uint8_t* dst = &upload_pixels_[upload_image_stride * row]; 575 uint8_t* dst = &upload_pixels_[upload_image_stride * row];
564 const uint8_t* src = 576 const uint8_t* src =
565 video_frame->data(i) + (video_stride_bytes * row); 577 video_frame->data(i) + (video_stride_bytes * row);
566 memcpy(dst, src, bytes_per_row); 578 memcpy(dst, src, bytes_per_row);
567 } 579 }
568 } 580 }
569 pixels = &upload_pixels_[0]; 581 pixels = &upload_pixels_[0];
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
608 // sync token is not required. 620 // sync token is not required.
609 TextureMailbox mailbox(plane_resource.mailbox(), gpu::SyncToken(), 621 TextureMailbox mailbox(plane_resource.mailbox(), gpu::SyncToken(),
610 resource_provider_->GetResourceTextureTarget( 622 resource_provider_->GetResourceTextureTarget(
611 plane_resource.resource_id())); 623 plane_resource.resource_id()));
612 mailbox.set_color_space(video_frame->ColorSpace()); 624 mailbox.set_color_space(video_frame->ColorSpace());
613 external_resources.mailboxes.push_back(mailbox); 625 external_resources.mailboxes.push_back(mailbox);
614 external_resources.release_callbacks.push_back(base::Bind( 626 external_resources.release_callbacks.push_back(base::Bind(
615 &RecycleResource, AsWeakPtr(), plane_resource.resource_id())); 627 &RecycleResource, AsWeakPtr(), plane_resource.resource_id()));
616 } 628 }
617 629
618 external_resources.type = VideoFrameExternalResources::YUV_RESOURCE; 630 external_resources.type = (input_frame_format == media::PIXEL_FORMAT_Y16)
631 ? VideoFrameExternalResources::Y_RESOURCE
632 : VideoFrameExternalResources::YUV_RESOURCE;
619 return external_resources; 633 return external_resources;
620 } 634 }
621 635
622 // static 636 // static
623 void VideoResourceUpdater::ReturnTexture( 637 void VideoResourceUpdater::ReturnTexture(
624 base::WeakPtr<VideoResourceUpdater> updater, 638 base::WeakPtr<VideoResourceUpdater> updater,
625 const scoped_refptr<media::VideoFrame>& video_frame, 639 const scoped_refptr<media::VideoFrame>& video_frame,
626 const gpu::SyncToken& sync_token, 640 const gpu::SyncToken& sync_token,
627 bool lost_resource, 641 bool lost_resource,
628 BlockingTaskRunner* main_thread_task_runner) { 642 BlockingTaskRunner* main_thread_task_runner) {
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
725 video_frame->coded_size(), 739 video_frame->coded_size(),
726 video_frame->metadata()->IsTrue( 740 video_frame->metadata()->IsTrue(
727 media::VideoFrameMetadata::ALLOW_OVERLAY), 741 media::VideoFrameMetadata::ALLOW_OVERLAY),
728 false); 742 false);
729 mailbox.set_color_space(video_frame->ColorSpace()); 743 mailbox.set_color_space(video_frame->ColorSpace());
730 external_resources.mailboxes.push_back(mailbox); 744 external_resources.mailboxes.push_back(mailbox);
731 external_resources.release_callbacks.push_back( 745 external_resources.release_callbacks.push_back(
732 base::Bind(&ReturnTexture, AsWeakPtr(), video_frame)); 746 base::Bind(&ReturnTexture, AsWeakPtr(), video_frame));
733 } 747 }
734 } 748 }
749
750 external_resources.bits_per_channel =
751 (video_frame->format() == media::PIXEL_FORMAT_Y16) ? 16 : 8;
735 return external_resources; 752 return external_resources;
736 } 753 }
737 754
738 // static 755 // static
739 void VideoResourceUpdater::RecycleResource( 756 void VideoResourceUpdater::RecycleResource(
740 base::WeakPtr<VideoResourceUpdater> updater, 757 base::WeakPtr<VideoResourceUpdater> updater,
741 ResourceId resource_id, 758 ResourceId resource_id,
742 const gpu::SyncToken& sync_token, 759 const gpu::SyncToken& sync_token,
743 bool lost_resource, 760 bool lost_resource,
744 BlockingTaskRunner* main_thread_task_runner) { 761 BlockingTaskRunner* main_thread_task_runner) {
(...skipping 18 matching lines...) Expand all
763 if (lost_resource) { 780 if (lost_resource) {
764 resource_it->clear_refs(); 781 resource_it->clear_refs();
765 updater->DeleteResource(resource_it); 782 updater->DeleteResource(resource_it);
766 return; 783 return;
767 } 784 }
768 785
769 resource_it->remove_ref(); 786 resource_it->remove_ref();
770 } 787 }
771 788
772 } // namespace cc 789 } // namespace cc
OLDNEW
« no previous file with comments | « cc/resources/video_resource_updater.h ('k') | cc/test/data/intersecting_light_dark_squares_video.png » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698