Index: cc/resources/video_resource_updater.cc |
diff --git a/cc/resources/video_resource_updater.cc b/cc/resources/video_resource_updater.cc |
index cdeb363e1ecdd4d6a3144fe6dfc105986996f61a..90c7acdbc63de995bb61fe2061a94ed55fdfcdaa 100644 |
--- a/cc/resources/video_resource_updater.cc |
+++ b/cc/resources/video_resource_updater.cc |
@@ -67,6 +67,10 @@ VideoFrameExternalResources::ResourceType ResourceTypeForVideoFrame( |
break; |
} |
break; |
+ case media::PIXEL_FORMAT_Y8: |
+ case media::PIXEL_FORMAT_Y16: |
+ return VideoFrameExternalResources::Y_RESOURCE; |
+ break; |
case media::PIXEL_FORMAT_YV12: |
case media::PIXEL_FORMAT_YV16: |
case media::PIXEL_FORMAT_YV24: |
@@ -89,6 +93,26 @@ VideoFrameExternalResources::ResourceType ResourceTypeForVideoFrame( |
return VideoFrameExternalResources::NONE; |
} |
+static const uint8_t exp_lookup_table_lower[256] = { |
+#define LT(n) n, n, n, n, n, n, n, n, n, n, n, n, n, n, n, n |
+ 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, |
+ 2, 2, 2, 2, 2, LT(3), LT(4), LT(4), LT(5), LT(5), LT(5), |
+ LT(5), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6)}; |
+ |
+static const uint8_t exp_lookup_table_upper[256] = { |
+#define LT(n) n, n, n, n, n, n, n, n, n, n, n, n, n, n, n, n |
+ 7, 7, 8, 8, 9, 9, 9, 9, |
+ 10, 10, 10, 10, 10, 10, 10, 10, |
+ LT(11), LT(12), LT(12), LT(13), LT(13), LT(13), LT(13), LT(14), |
+ LT(14), LT(14), LT(14), LT(14), LT(14), LT(14), LT(14)}; |
+ |
+uint16_t ushort_to_half_float(uint16_t value) { |
+ unsigned short upper = value >> 8; |
+ unsigned short exponent = |
+ upper ? exp_lookup_table_upper[upper] : exp_lookup_table_lower[value]; |
+ return (exponent << 10) | (((value << (15 - exponent)) >> 6) & 0x3FF); |
+} |
+ |
class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient { |
public: |
SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl, |
@@ -250,7 +274,6 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
scoped_refptr<media::VideoFrame> video_frame) { |
TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes"); |
const media::VideoPixelFormat input_frame_format = video_frame->format(); |
- |
// TODO(hubbe): Make this a video frame method. |
int bits_per_channel = 0; |
switch (input_frame_format) { |
@@ -272,6 +295,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
case media::PIXEL_FORMAT_RGB32: |
case media::PIXEL_FORMAT_MJPEG: |
case media::PIXEL_FORMAT_MT21: |
+ case media::PIXEL_FORMAT_Y8: |
bits_per_channel = 8; |
break; |
case media::PIXEL_FORMAT_YUV420P9: |
@@ -284,18 +308,33 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
case media::PIXEL_FORMAT_YUV444P10: |
bits_per_channel = 10; |
break; |
+ case media::PIXEL_FORMAT_Y16: |
+ bits_per_channel = 16; |
+ break; |
} |
- // Only YUV software video frames are supported. |
- if (!media::IsYuvPlanar(input_frame_format)) { |
+ // Only YUV, Y8 and Y16 software video frames are supported. |
+ const bool isYuvPlanar = media::IsYuvPlanar(input_frame_format); |
+ if (!(isYuvPlanar || input_frame_format == media::PIXEL_FORMAT_Y16 || |
+ input_frame_format == media::PIXEL_FORMAT_Y8)) { |
NOTREACHED() << media::VideoPixelFormatToString(input_frame_format); |
return VideoFrameExternalResources(); |
} |
const bool software_compositor = context_provider_ == NULL; |
+ if ((input_frame_format == media::PIXEL_FORMAT_Y8 || |
+ input_frame_format == media::PIXEL_FORMAT_Y16) && |
+ software_compositor) { |
+ // TODO(astojilj) Y8 and Y16 software compositor support. |
+ NOTREACHED() << "Software compositor doesn't support PIXEL_FORMAT_Y8/Y16"; |
+ return VideoFrameExternalResources(); |
+ } |
+ |
ResourceFormat output_resource_format = |
- resource_provider_->YuvResourceFormat(bits_per_channel); |
+ (input_frame_format == media::PIXEL_FORMAT_Y16) |
+ ? resource_provider_->Y16ResourceFormat() |
+ : resource_provider_->YuvResourceFormat(bits_per_channel); |
size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format); |
@@ -410,8 +449,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
for (size_t i = 0; i < plane_resources.size(); ++i) { |
PlaneResource& plane_resource = *plane_resources[i]; |
// Update each plane's resource id with its content. |
- DCHECK_EQ(plane_resource.resource_format(), |
- resource_provider_->YuvResourceFormat(bits_per_channel)); |
+ DCHECK_EQ(plane_resource.resource_format(), output_resource_format); |
if (!plane_resource.Matches(video_frame->unique_id(), i)) { |
// We need to transfer data from |video_frame| to the plane resource. |
@@ -439,7 +477,10 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
needs_conversion = true; |
// Note that the current method of converting integers to half-floats |
// stops working if you have more than 10 bits of data. |
- DCHECK_LE(bits_per_channel, 10); |
+ DCHECK(bits_per_channel <= 10 || !isYuvPlanar); |
+ } else if (input_frame_format == media::PIXEL_FORMAT_Y16) { |
+ if (plane_resource.resource_format() == RGBA_8888) |
+ needs_conversion = true; |
} else if (bits_per_channel > 8) { |
// If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to |
// shift the data down and create an 8-bit texture. |
@@ -463,6 +504,12 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
&upload_pixels_[upload_image_stride * row]); |
const uint16_t* src = reinterpret_cast<uint16_t*>( |
video_frame->data(i) + (video_stride_bytes * row)); |
+ |
+ if (input_frame_format == media::PIXEL_FORMAT_Y16) { |
+ for (size_t i = 0; i < bytes_per_row / 2; i++) |
+ dst[i] = ushort_to_half_float(src[i]); |
+ continue; |
+ } |
// Micro-benchmarking indicates that the compiler does |
// a good enough job of optimizing this loop that trying |
// to manually operate on one uint64 at a time is not |
@@ -478,6 +525,14 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
video_frame->data(i) + (video_stride_bytes * row)); |
for (size_t i = 0; i < bytes_per_row; i++) |
dst[i] = src[i] >> shift; |
+ } else if (input_frame_format == media::PIXEL_FORMAT_Y16 && |
+ plane_resource.resource_format() == RGBA_8888) { |
+ uint32_t* dst = reinterpret_cast<uint32_t*>( |
+ &upload_pixels_[upload_image_stride * row]); |
+ const uint16_t* src = reinterpret_cast<uint16_t*>( |
+ video_frame->data(i) + (video_stride_bytes * row)); |
+ for (size_t i = 0; i < bytes_per_row / 4; ++i) |
+ *dst++ = *src++; |
} else { |
// Input and output are the same size and format, but |
// differ in stride, copy one row at a time. |
@@ -495,7 +550,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
plane_resource.SetUniqueId(video_frame->unique_id(), i); |
} |
- if (plane_resource.resource_format() == LUMINANCE_F16) { |
+ if (plane_resource.resource_format() == LUMINANCE_F16 && isYuvPlanar) { |
// By OR-ing with 0x3800, 10-bit numbers become half-floats in the |
// range [0.5..1) and 9-bit numbers get the range [0.5..0.75). |
// |
@@ -527,7 +582,11 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
&RecycleResource, AsWeakPtr(), plane_resource.resource_id())); |
} |
- external_resources.type = VideoFrameExternalResources::YUV_RESOURCE; |
+ external_resources.type = |
+ (input_frame_format == media::PIXEL_FORMAT_Y16) |
+ ? VideoFrameExternalResources::Y_RESOURCE |
+ : (isYuvPlanar ? VideoFrameExternalResources::YUV_RESOURCE |
+ : VideoFrameExternalResources::RGB_RESOURCE); |
return external_resources; |
} |
@@ -664,6 +723,9 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes( |
base::Bind(&ReturnTexture, AsWeakPtr(), video_frame)); |
} |
} |
+ |
+ external_resources.bits_per_channel = |
+ (video_frame->format() == media::PIXEL_FORMAT_Y16) ? 16 : 8; |
return external_resources; |
} |