| Index: content/renderer/media/video_capture_impl.cc
|
| diff --git a/content/renderer/media/video_capture_impl.cc b/content/renderer/media/video_capture_impl.cc
|
| index 583fb97bb9e68b9fffc02977cecb96879447632e..35020eac6ed68341a0d020a4d3cfe450cc7024b6 100644
|
| --- a/content/renderer/media/video_capture_impl.cc
|
| +++ b/content/renderer/media/video_capture_impl.cc
|
| @@ -53,18 +53,19 @@ class VideoCaptureImpl::ClientBuffer
|
| class VideoCaptureImpl::ClientBuffer2
|
| : public base::RefCountedThreadSafe<ClientBuffer2> {
|
| public:
|
| - ClientBuffer2(
|
| - const std::vector<gfx::GpuMemoryBufferHandle>& client_handles,
|
| - const gfx::Size& size)
|
| - : handles_(client_handles),
|
| - size_(size) {
|
| - const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420;
|
| + ClientBuffer2(const std::vector<gfx::GpuMemoryBufferHandle>& client_handles,
|
| + const gfx::Size& size,
|
| + media::VideoPixelFormat format)
|
| + : handles_(client_handles) {
|
| + DCHECK(format == media::PIXEL_FORMAT_I420 ||
|
| + format == media::PIXEL_FORMAT_Y16);
|
| DCHECK_EQ(handles_.size(), media::VideoFrame::NumPlanes(format));
|
| for (size_t i = 0; i < handles_.size(); ++i) {
|
| - const size_t width = media::VideoFrame::Columns(i, format, size_.width());
|
| - const size_t height = media::VideoFrame::Rows(i, format, size_.height());
|
| + const size_t width = media::VideoFrame::Columns(i, format, size.width());
|
| + const size_t height = media::VideoFrame::Rows(i, format, size.height());
|
| buffers_.push_back(gpu::GpuMemoryBufferImpl::CreateFromHandle(
|
| - handles_[i], gfx::Size(width, height), gfx::BufferFormat::R_8,
|
| + handles_[i], gfx::Size(width, height),
|
| + media::VideoFrame::BufferFormat(format),
|
| gfx::BufferUsage::GPU_READ_CPU_READ_WRITE,
|
| base::Bind(&ClientBuffer2::DestroyGpuMemoryBuffer,
|
| base::Unretained(this))));
|
| @@ -92,7 +93,6 @@ class VideoCaptureImpl::ClientBuffer2
|
| void DestroyGpuMemoryBuffer(const gpu::SyncToken& sync_token) {}
|
|
|
| const std::vector<gfx::GpuMemoryBufferHandle> handles_;
|
| - const gfx::Size size_;
|
| ScopedVector<gfx::GpuMemoryBuffer> buffers_;
|
| uint8_t* data_[media::VideoFrame::kMaxPlanes];
|
| int32_t strides_[media::VideoFrame::kMaxPlanes];
|
| @@ -259,6 +259,7 @@ void VideoCaptureImpl::OnBufferCreated(base::SharedMemoryHandle handle,
|
| void VideoCaptureImpl::OnBufferCreated2(
|
| const std::vector<gfx::GpuMemoryBufferHandle>& handles,
|
| const gfx::Size& size,
|
| + media::VideoPixelFormat format,
|
| int buffer_id) {
|
| DCHECK(io_task_runner_->BelongsToCurrentThread());
|
|
|
| @@ -268,8 +269,9 @@ void VideoCaptureImpl::OnBufferCreated2(
|
| return;
|
|
|
| const bool inserted =
|
| - client_buffer2s_.insert(std::make_pair(buffer_id,
|
| - new ClientBuffer2(handles, size)))
|
| + client_buffer2s_
|
| + .insert(std::make_pair(buffer_id,
|
| + new ClientBuffer2(handles, size, format)))
|
| .second;
|
| DCHECK(inserted);
|
| }
|
| @@ -302,12 +304,14 @@ void VideoCaptureImpl::OnBufferReceived(
|
| const gfx::Rect& visible_rect) {
|
| DCHECK(io_task_runner_->BelongsToCurrentThread());
|
| if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ ||
|
| - pixel_format != media::PIXEL_FORMAT_I420 ||
|
| + (pixel_format != media::PIXEL_FORMAT_I420 &&
|
| + pixel_format != media::PIXEL_FORMAT_Y16) ||
|
| (storage_type != media::VideoFrame::STORAGE_SHMEM &&
|
| storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) {
|
| // Crash in debug builds since the host should not have provided a buffer
|
| // with an unsupported pixel format or storage type.
|
| - DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
|
| + DCHECK(media::PIXEL_FORMAT_I420 == pixel_format ||
|
| + media::PIXEL_FORMAT_Y16 == pixel_format);
|
| DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM ||
|
| storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS);
|
| Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
|
| @@ -349,17 +353,22 @@ void VideoCaptureImpl::OnBufferReceived(
|
| DCHECK(iter != client_buffer2s_.end());
|
| scoped_refptr<ClientBuffer2> buffer = iter->second;
|
| const auto& handles = buffer->gpu_memory_buffer_handles();
|
| - frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers(
|
| - media::PIXEL_FORMAT_I420, coded_size, gfx::Rect(coded_size),
|
| - coded_size, buffer->stride(media::VideoFrame::kYPlane),
|
| - buffer->stride(media::VideoFrame::kUPlane),
|
| - buffer->stride(media::VideoFrame::kVPlane),
|
| - buffer->data(media::VideoFrame::kYPlane),
|
| - buffer->data(media::VideoFrame::kUPlane),
|
| - buffer->data(media::VideoFrame::kVPlane),
|
| - handles[media::VideoFrame::kYPlane],
|
| - handles[media::VideoFrame::kUPlane],
|
| - handles[media::VideoFrame::kVPlane], timestamp);
|
| + frame =
|
| + (pixel_format == media::PIXEL_FORMAT_I420)
|
| + ? media::VideoFrame::WrapExternalYuvGpuMemoryBuffers(
|
| + media::PIXEL_FORMAT_I420, coded_size, gfx::Rect(coded_size),
|
| + coded_size, buffer->stride(media::VideoFrame::kYPlane),
|
| + buffer->stride(media::VideoFrame::kUPlane),
|
| + buffer->stride(media::VideoFrame::kVPlane),
|
| + buffer->data(media::VideoFrame::kYPlane),
|
| + buffer->data(media::VideoFrame::kUPlane),
|
| + buffer->data(media::VideoFrame::kVPlane),
|
| + handles[media::VideoFrame::kYPlane],
|
| + handles[media::VideoFrame::kUPlane],
|
| + handles[media::VideoFrame::kVPlane], timestamp)
|
| + : media::VideoFrame::WrapExternalGpuMemoryBuffer(
|
| + pixel_format, coded_size, gfx::Rect(coded_size), coded_size,
|
| + buffer->data(0), handles[0], timestamp);
|
| buffer_finished_callback = media::BindToCurrentLoop(
|
| base::Bind(&VideoCaptureImpl::OnClientBufferFinished2,
|
| weak_factory_.GetWeakPtr(), buffer_id, buffer));
|
|
|