Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(419)

Side by Side Diff: cc/resources/video_resource_updater.cc

Issue 2121043002: 16 bpp video stream capture, render and WebGL usage - Realsense R200 & SR300 support. Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: scoping Y8 out. Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "cc/resources/video_resource_updater.h" 5 #include "cc/resources/video_resource_updater.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <stdint.h> 8 #include <stdint.h>
9 9
10 #include <algorithm> 10 #include <algorithm>
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
62 switch (video_frame->mailbox_holder(0).texture_target) { 62 switch (video_frame->mailbox_holder(0).texture_target) {
63 case GL_TEXTURE_EXTERNAL_OES: 63 case GL_TEXTURE_EXTERNAL_OES:
64 return VideoFrameExternalResources::YUV_RESOURCE; 64 return VideoFrameExternalResources::YUV_RESOURCE;
65 case GL_TEXTURE_RECTANGLE_ARB: 65 case GL_TEXTURE_RECTANGLE_ARB:
66 return VideoFrameExternalResources::RGB_RESOURCE; 66 return VideoFrameExternalResources::RGB_RESOURCE;
67 default: 67 default:
68 NOTREACHED(); 68 NOTREACHED();
69 break; 69 break;
70 } 70 }
71 break; 71 break;
72 case media::PIXEL_FORMAT_Y16:
73 return VideoFrameExternalResources::Y_RESOURCE;
74 break;
72 case media::PIXEL_FORMAT_YV12: 75 case media::PIXEL_FORMAT_YV12:
73 case media::PIXEL_FORMAT_YV16: 76 case media::PIXEL_FORMAT_YV16:
74 case media::PIXEL_FORMAT_YV24: 77 case media::PIXEL_FORMAT_YV24:
75 case media::PIXEL_FORMAT_YV12A: 78 case media::PIXEL_FORMAT_YV12A:
76 case media::PIXEL_FORMAT_NV21: 79 case media::PIXEL_FORMAT_NV21:
77 case media::PIXEL_FORMAT_YUY2: 80 case media::PIXEL_FORMAT_YUY2:
78 case media::PIXEL_FORMAT_RGB24: 81 case media::PIXEL_FORMAT_RGB24:
79 case media::PIXEL_FORMAT_RGB32: 82 case media::PIXEL_FORMAT_RGB32:
80 case media::PIXEL_FORMAT_MJPEG: 83 case media::PIXEL_FORMAT_MJPEG:
81 case media::PIXEL_FORMAT_MT21: 84 case media::PIXEL_FORMAT_MT21:
82 case media::PIXEL_FORMAT_YUV420P9: 85 case media::PIXEL_FORMAT_YUV420P9:
83 case media::PIXEL_FORMAT_YUV422P9: 86 case media::PIXEL_FORMAT_YUV422P9:
84 case media::PIXEL_FORMAT_YUV444P9: 87 case media::PIXEL_FORMAT_YUV444P9:
85 case media::PIXEL_FORMAT_YUV420P10: 88 case media::PIXEL_FORMAT_YUV420P10:
86 case media::PIXEL_FORMAT_YUV422P10: 89 case media::PIXEL_FORMAT_YUV422P10:
87 case media::PIXEL_FORMAT_YUV444P10: 90 case media::PIXEL_FORMAT_YUV444P10:
88 case media::PIXEL_FORMAT_YUV420P12: 91 case media::PIXEL_FORMAT_YUV420P12:
89 case media::PIXEL_FORMAT_YUV422P12: 92 case media::PIXEL_FORMAT_YUV422P12:
90 case media::PIXEL_FORMAT_YUV444P12: 93 case media::PIXEL_FORMAT_YUV444P12:
91 case media::PIXEL_FORMAT_Y8: 94 case media::PIXEL_FORMAT_Y8:
92 case media::PIXEL_FORMAT_Y16:
93 case media::PIXEL_FORMAT_UNKNOWN: 95 case media::PIXEL_FORMAT_UNKNOWN:
94 break; 96 break;
95 } 97 }
96 return VideoFrameExternalResources::NONE; 98 return VideoFrameExternalResources::NONE;
97 } 99 }
98 100
99 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient { 101 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient {
100 public: 102 public:
101 SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl, 103 SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl,
102 const gpu::SyncToken& sync_token) 104 const gpu::SyncToken& sync_token)
(...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after
305 // Maximum value used in |src|. 307 // Maximum value used in |src|.
306 int max_value = (1 << bits_per_channel) - 1; 308 int max_value = (1 << bits_per_channel) - 1;
307 int rows = 1; 309 int rows = 1;
308 libyuv::HalfFloatPlane(src, stride, dst, stride, 1.0f / max_value, num, rows); 310 libyuv::HalfFloatPlane(src, stride, dst, stride, 1.0f / max_value, num, rows);
309 } 311 }
310 312
311 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( 313 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
312 scoped_refptr<media::VideoFrame> video_frame) { 314 scoped_refptr<media::VideoFrame> video_frame) {
313 TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes"); 315 TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes");
314 const media::VideoPixelFormat input_frame_format = video_frame->format(); 316 const media::VideoPixelFormat input_frame_format = video_frame->format();
315
316 // TODO(hubbe): Make this a video frame method. 317 // TODO(hubbe): Make this a video frame method.
317 int bits_per_channel = 0; 318 int bits_per_channel = 0;
318 switch (input_frame_format) { 319 switch (input_frame_format) {
319 case media::PIXEL_FORMAT_UNKNOWN: 320 case media::PIXEL_FORMAT_UNKNOWN:
320 NOTREACHED(); 321 NOTREACHED();
321 // Fall through! 322 // Fall through!
322 case media::PIXEL_FORMAT_I420: 323 case media::PIXEL_FORMAT_I420:
323 case media::PIXEL_FORMAT_YV12: 324 case media::PIXEL_FORMAT_YV12:
324 case media::PIXEL_FORMAT_YV16: 325 case media::PIXEL_FORMAT_YV16:
325 case media::PIXEL_FORMAT_YV12A: 326 case media::PIXEL_FORMAT_YV12A:
(...skipping 24 matching lines...) Expand all
350 case media::PIXEL_FORMAT_YUV420P12: 351 case media::PIXEL_FORMAT_YUV420P12:
351 case media::PIXEL_FORMAT_YUV422P12: 352 case media::PIXEL_FORMAT_YUV422P12:
352 case media::PIXEL_FORMAT_YUV444P12: 353 case media::PIXEL_FORMAT_YUV444P12:
353 bits_per_channel = 12; 354 bits_per_channel = 12;
354 break; 355 break;
355 case media::PIXEL_FORMAT_Y16: 356 case media::PIXEL_FORMAT_Y16:
356 bits_per_channel = 16; 357 bits_per_channel = 16;
357 break; 358 break;
358 } 359 }
359 360
360 // TODO(dshwang): support PIXEL_FORMAT_Y16. crbug.com/624436 361 // Only YUV and Y16 software video frames are supported.
361 DCHECK_NE(bits_per_channel, 16); 362 const bool isYuvPlanar = media::IsYuvPlanar(input_frame_format);
362 363 if (!(isYuvPlanar || input_frame_format == media::PIXEL_FORMAT_Y16)) {
363 // Only YUV software video frames are supported.
364 if (!media::IsYuvPlanar(input_frame_format)) {
365 NOTREACHED() << media::VideoPixelFormatToString(input_frame_format); 364 NOTREACHED() << media::VideoPixelFormatToString(input_frame_format);
366 return VideoFrameExternalResources(); 365 return VideoFrameExternalResources();
367 } 366 }
368 367
369 const bool software_compositor = context_provider_ == NULL; 368 const bool software_compositor = context_provider_ == NULL;
370 369
371 ResourceFormat output_resource_format = 370 ResourceFormat output_resource_format =
372 resource_provider_->YuvResourceFormat(bits_per_channel); 371 (input_frame_format == media::PIXEL_FORMAT_Y16)
372 ? resource_provider_->Y16ResourceFormat()
373 : resource_provider_->YuvResourceFormat(bits_per_channel);
373 374
374 // If GPU compositing is enabled, but the output resource format 375 // If GPU compositing is enabled, but the output resource format
375 // returned by the resource provider is RGBA_8888, then a GPU driver 376 // returned by the resource provider is RGBA_8888, then a GPU driver
376 // bug workaround requires that YUV frames must be converted to RGB 377 // bug workaround requires that YUV frames must be converted to RGB
377 // before texture upload. 378 // before texture upload.
378 bool texture_needs_rgb_conversion = 379 bool texture_needs_rgb_conversion =
379 !software_compositor && 380 !software_compositor &&
380 output_resource_format == ResourceFormat::RGBA_8888; 381 output_resource_format == ResourceFormat::RGBA_8888 &&
382 input_frame_format != media::PIXEL_FORMAT_Y16;
381 size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format); 383 size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format);
382 384
383 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB 385 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB
384 // conversion here. That involves an extra copy of each frame to a bitmap. 386 // conversion here. That involves an extra copy of each frame to a bitmap.
385 // Obviously, this is suboptimal and should be addressed once ubercompositor 387 // Obviously, this is suboptimal and should be addressed once ubercompositor
386 // starts shaping up. 388 // starts shaping up.
387 if (software_compositor || texture_needs_rgb_conversion) { 389 if (software_compositor || texture_needs_rgb_conversion) {
388 output_resource_format = kRGBResourceFormat; 390 output_resource_format = kRGBResourceFormat;
389 output_plane_count = 1; 391 output_plane_count = 1;
390 bits_per_channel = 8; 392 bits_per_channel = 8;
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
479 external_resources.release_callbacks.push_back(base::Bind( 481 external_resources.release_callbacks.push_back(base::Bind(
480 &RecycleResource, AsWeakPtr(), plane_resource.resource_id())); 482 &RecycleResource, AsWeakPtr(), plane_resource.resource_id()));
481 external_resources.type = VideoFrameExternalResources::RGBA_RESOURCE; 483 external_resources.type = VideoFrameExternalResources::RGBA_RESOURCE;
482 } 484 }
483 return external_resources; 485 return external_resources;
484 } 486 }
485 487
486 for (size_t i = 0; i < plane_resources.size(); ++i) { 488 for (size_t i = 0; i < plane_resources.size(); ++i) {
487 PlaneResource& plane_resource = *plane_resources[i]; 489 PlaneResource& plane_resource = *plane_resources[i];
488 // Update each plane's resource id with its content. 490 // Update each plane's resource id with its content.
489 DCHECK_EQ(plane_resource.resource_format(), 491 DCHECK_EQ(plane_resource.resource_format(), output_resource_format);
490 resource_provider_->YuvResourceFormat(bits_per_channel));
491 492
492 if (!plane_resource.Matches(video_frame->unique_id(), i)) { 493 if (!plane_resource.Matches(video_frame->unique_id(), i)) {
493 // TODO(hubbe): Move all conversion (and upload?) code to media/. 494 // TODO(hubbe): Move all conversion (and upload?) code to media/.
494 // We need to transfer data from |video_frame| to the plane resource. 495 // We need to transfer data from |video_frame| to the plane resource.
495 // TODO(reveman): Can use GpuMemoryBuffers here to improve performance. 496 // TODO(reveman): Can use GpuMemoryBuffers here to improve performance.
496 497
497 // The |resource_size_pixels| is the size of the resource we want to 498 // The |resource_size_pixels| is the size of the resource we want to
498 // upload to. 499 // upload to.
499 gfx::Size resource_size_pixels = plane_resource.resource_size(); 500 gfx::Size resource_size_pixels = plane_resource.resource_size();
500 // The |video_stride_bytes| is the width of the video frame we are 501 // The |video_stride_bytes| is the width of the video frame we are
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
535 // https://en.wikipedia.org/wiki/Half-precision_floating-point_format 536 // https://en.wikipedia.org/wiki/Half-precision_floating-point_format
536 // 537 //
537 // PLEASE NOTE: 538 // PLEASE NOTE:
538 // All planes are assumed to use the same multiplier/offset. 539 // All planes are assumed to use the same multiplier/offset.
539 external_resources.offset = 0.5f; 540 external_resources.offset = 0.5f;
540 // Max value from input data. 541 // Max value from input data.
541 int max_input_value = (1 << bits_per_channel) - 1; 542 int max_input_value = (1 << bits_per_channel) - 1;
542 // 2 << 11 = 2048 would be 1.0 with our exponent. 543 // 2 << 11 = 2048 would be 1.0 with our exponent.
543 external_resources.multiplier = 2048.0 / max_input_value; 544 external_resources.multiplier = 2048.0 / max_input_value;
544 } 545 }
546 } else if (input_frame_format == media::PIXEL_FORMAT_Y16) {
547 if (plane_resource.resource_format() == RGBA_8888)
548 needs_conversion = true;
545 } else if (bits_per_channel > 8) { 549 } else if (bits_per_channel > 8) {
546 // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to 550 // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to
547 // shift the data down and create an 8-bit texture. 551 // shift the data down and create an 8-bit texture.
548 needs_conversion = true; 552 needs_conversion = true;
549 shift = bits_per_channel - 8; 553 shift = bits_per_channel - 8;
550 } 554 }
551 const uint8_t* pixels; 555 const uint8_t* pixels;
552 if (static_cast<int>(upload_image_stride) == video_stride_bytes && 556 if (static_cast<int>(upload_image_stride) == video_stride_bytes &&
553 !needs_conversion) { 557 !needs_conversion) {
554 pixels = video_frame->data(i); 558 pixels = video_frame->data(i);
(...skipping 22 matching lines...) Expand all
577 MakeHalfFloats(src, bits_per_channel, bytes_per_row / 2, dst); 581 MakeHalfFloats(src, bits_per_channel, bytes_per_row / 2, dst);
578 } 582 }
579 } else if (shift != 0) { 583 } else if (shift != 0) {
580 // We have more-than-8-bit input which we need to shift 584 // We have more-than-8-bit input which we need to shift
581 // down to fit it into an 8-bit texture. 585 // down to fit it into an 8-bit texture.
582 uint8_t* dst = &upload_pixels_[upload_image_stride * row]; 586 uint8_t* dst = &upload_pixels_[upload_image_stride * row];
583 const uint16_t* src = reinterpret_cast<uint16_t*>( 587 const uint16_t* src = reinterpret_cast<uint16_t*>(
584 video_frame->data(i) + (video_stride_bytes * row)); 588 video_frame->data(i) + (video_stride_bytes * row));
585 for (size_t i = 0; i < bytes_per_row; i++) 589 for (size_t i = 0; i < bytes_per_row; i++)
586 dst[i] = src[i] >> shift; 590 dst[i] = src[i] >> shift;
591 } else if (input_frame_format == media::PIXEL_FORMAT_Y16 &&
592 plane_resource.resource_format() == RGBA_8888) {
593 uint32_t* dst = reinterpret_cast<uint32_t*>(
594 &upload_pixels_[upload_image_stride * row]);
595 const uint16_t* src = reinterpret_cast<uint16_t*>(
596 video_frame->data(i) + (video_stride_bytes * row));
597 for (size_t i = 0; i < bytes_per_row / 4; ++i)
598 *dst++ = *src++;
587 } else { 599 } else {
588 // Input and output are the same size and format, but 600 // Input and output are the same size and format, but
589 // differ in stride, copy one row at a time. 601 // differ in stride, copy one row at a time.
590 uint8_t* dst = &upload_pixels_[upload_image_stride * row]; 602 uint8_t* dst = &upload_pixels_[upload_image_stride * row];
591 const uint8_t* src = 603 const uint8_t* src =
592 video_frame->data(i) + (video_stride_bytes * row); 604 video_frame->data(i) + (video_stride_bytes * row);
593 memcpy(dst, src, bytes_per_row); 605 memcpy(dst, src, bytes_per_row);
594 } 606 }
595 } 607 }
596 pixels = &upload_pixels_[0]; 608 pixels = &upload_pixels_[0];
597 } 609 }
598 610
599 resource_provider_->CopyToResource(plane_resource.resource_id(), pixels, 611 resource_provider_->CopyToResource(plane_resource.resource_id(), pixels,
600 resource_size_pixels); 612 resource_size_pixels);
601 plane_resource.SetUniqueId(video_frame->unique_id(), i); 613 plane_resource.SetUniqueId(video_frame->unique_id(), i);
602 } 614 }
603 615
604 616
605 // VideoResourceUpdater shares a context with the compositor so a 617 // VideoResourceUpdater shares a context with the compositor so a
606 // sync token is not required. 618 // sync token is not required.
607 TextureMailbox mailbox(plane_resource.mailbox(), gpu::SyncToken(), 619 TextureMailbox mailbox(plane_resource.mailbox(), gpu::SyncToken(),
608 resource_provider_->GetResourceTextureTarget( 620 resource_provider_->GetResourceTextureTarget(
609 plane_resource.resource_id())); 621 plane_resource.resource_id()));
610 mailbox.set_color_space(video_frame->ColorSpace()); 622 mailbox.set_color_space(video_frame->ColorSpace());
611 external_resources.mailboxes.push_back(mailbox); 623 external_resources.mailboxes.push_back(mailbox);
612 external_resources.release_callbacks.push_back(base::Bind( 624 external_resources.release_callbacks.push_back(base::Bind(
613 &RecycleResource, AsWeakPtr(), plane_resource.resource_id())); 625 &RecycleResource, AsWeakPtr(), plane_resource.resource_id()));
614 } 626 }
615 627
616 external_resources.type = VideoFrameExternalResources::YUV_RESOURCE; 628 external_resources.type = (input_frame_format == media::PIXEL_FORMAT_Y16)
629 ? VideoFrameExternalResources::Y_RESOURCE
630 : VideoFrameExternalResources::YUV_RESOURCE;
617 return external_resources; 631 return external_resources;
618 } 632 }
619 633
620 // static 634 // static
621 void VideoResourceUpdater::ReturnTexture( 635 void VideoResourceUpdater::ReturnTexture(
622 base::WeakPtr<VideoResourceUpdater> updater, 636 base::WeakPtr<VideoResourceUpdater> updater,
623 const scoped_refptr<media::VideoFrame>& video_frame, 637 const scoped_refptr<media::VideoFrame>& video_frame,
624 const gpu::SyncToken& sync_token, 638 const gpu::SyncToken& sync_token,
625 bool lost_resource, 639 bool lost_resource,
626 BlockingTaskRunner* main_thread_task_runner) { 640 BlockingTaskRunner* main_thread_task_runner) {
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after
723 video_frame->coded_size(), 737 video_frame->coded_size(),
724 video_frame->metadata()->IsTrue( 738 video_frame->metadata()->IsTrue(
725 media::VideoFrameMetadata::ALLOW_OVERLAY), 739 media::VideoFrameMetadata::ALLOW_OVERLAY),
726 false); 740 false);
727 mailbox.set_color_space(video_frame->ColorSpace()); 741 mailbox.set_color_space(video_frame->ColorSpace());
728 external_resources.mailboxes.push_back(mailbox); 742 external_resources.mailboxes.push_back(mailbox);
729 external_resources.release_callbacks.push_back( 743 external_resources.release_callbacks.push_back(
730 base::Bind(&ReturnTexture, AsWeakPtr(), video_frame)); 744 base::Bind(&ReturnTexture, AsWeakPtr(), video_frame));
731 } 745 }
732 } 746 }
747
748 external_resources.bits_per_channel =
749 (video_frame->format() == media::PIXEL_FORMAT_Y16) ? 16 : 8;
733 return external_resources; 750 return external_resources;
734 } 751 }
735 752
736 // static 753 // static
737 void VideoResourceUpdater::RecycleResource( 754 void VideoResourceUpdater::RecycleResource(
738 base::WeakPtr<VideoResourceUpdater> updater, 755 base::WeakPtr<VideoResourceUpdater> updater,
739 ResourceId resource_id, 756 ResourceId resource_id,
740 const gpu::SyncToken& sync_token, 757 const gpu::SyncToken& sync_token,
741 bool lost_resource, 758 bool lost_resource,
742 BlockingTaskRunner* main_thread_task_runner) { 759 BlockingTaskRunner* main_thread_task_runner) {
(...skipping 18 matching lines...) Expand all
761 if (lost_resource) { 778 if (lost_resource) {
762 resource_it->clear_refs(); 779 resource_it->clear_refs();
763 updater->DeleteResource(resource_it); 780 updater->DeleteResource(resource_it);
764 return; 781 return;
765 } 782 }
766 783
767 resource_it->remove_ref(); 784 resource_it->remove_ref();
768 } 785 }
769 786
770 } // namespace cc 787 } // namespace cc
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698