Index: content/renderer/media/video_capture_impl.cc |
diff --git a/content/renderer/media/video_capture_impl.cc b/content/renderer/media/video_capture_impl.cc |
index 2b62823b7630203c4a5d1436906ac55ac856af37..b18cf7fd250a289200f07a86d2938586e2b9990e 100644 |
--- a/content/renderer/media/video_capture_impl.cc |
+++ b/content/renderer/media/video_capture_impl.cc |
@@ -15,6 +15,7 @@ |
#include "base/stl_util.h" |
#include "base/thread_task_runner_handle.h" |
#include "content/child/child_process.h" |
+#include "content/common/gpu/client/gpu_memory_buffer_impl.h" |
#include "content/common/media/video_capture_messages.h" |
#include "media/base/bind_to_current_loop.h" |
#include "media/base/limits.h" |
@@ -38,24 +39,80 @@ void SaveReleaseSyncPoint(uint32* storage, uint32 release_sync_point) { |
} // namespace |
+// A holder of a memory-backed buffer and accessors to it. |
class VideoCaptureImpl::ClientBuffer |
: public base::RefCountedThreadSafe<ClientBuffer> { |
public: |
- ClientBuffer(scoped_ptr<base::SharedMemory> buffer, |
- size_t buffer_size) |
- : buffer(buffer.Pass()), |
- buffer_size(buffer_size) {} |
- const scoped_ptr<base::SharedMemory> buffer; |
- const size_t buffer_size; |
+ ClientBuffer(scoped_ptr<base::SharedMemory> buffer, size_t buffer_size) |
+ : buffer_(buffer.Pass()), buffer_size_(buffer_size) {} |
+ |
+ base::SharedMemory* buffer() const { return buffer_.get(); } |
+ size_t buffer_size() const { return buffer_size_; } |
private: |
friend class base::RefCountedThreadSafe<ClientBuffer>; |
virtual ~ClientBuffer() {} |
+ const scoped_ptr<base::SharedMemory> buffer_; |
+ const size_t buffer_size_; |
+ |
DISALLOW_COPY_AND_ASSIGN(ClientBuffer); |
}; |
+// A holder of a GpuMemoryBuffer-backed buffer, Map()ed on ctor and Unmap()ed on |
+// dtor. Creates and owns GpuMemoryBuffer instances. |
+class VideoCaptureImpl::ClientBuffer2 |
+ : public base::RefCountedThreadSafe<ClientBuffer2> { |
+ public: |
+ ClientBuffer2( |
+ const std::vector<gfx::GpuMemoryBufferHandle>& client_handles, |
+ const gfx::Size& size) |
+ : handles_(client_handles), |
+ size_(size) { |
+ const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; |
+ for (size_t i = 0; i < handles_.size(); ++i) { |
+ const size_t width = media::VideoFrame::Columns(i, format, size_.width()); |
+ const size_t height = media::VideoFrame::Rows(i, format, size_.height()); |
+ buffers_.push_back(GpuMemoryBufferImpl::CreateFromHandle( |
+ handles_[i], |
+ gfx::Size(width, height), |
+ gfx::BufferFormat::R_8, |
+ gfx::BufferUsage::MAP, |
+ base::Bind(&ClientBuffer2::DestroyGpuMemoryBuffer, |
+ base::Unretained(this)))); |
+ void* data_ptr = nullptr; |
+ buffers_[i]->Map(&data_ptr); |
+ data_[i] = reinterpret_cast<uint8*>(data_ptr); |
+ strides_[i] = width; |
+ } |
+ } |
+ |
+ uint8* data(int plane) const { return data_[plane]; } |
+ int32 stride(int plane) const { return strides_[plane]; } |
+ std::vector<gfx::GpuMemoryBufferHandle> gpu_memory_buffer_handles() { |
+ return handles_; |
+ } |
+ |
+ private: |
+ friend class base::RefCountedThreadSafe<ClientBuffer2>; |
+ |
+ virtual ~ClientBuffer2() { |
+ for (auto& buffer : buffers_) |
+ buffer->Unmap(); |
+ } |
+ |
+ void DestroyGpuMemoryBuffer(uint32 sync_point) {} |
+ |
+ const std::vector<gfx::GpuMemoryBufferHandle> handles_; |
+ const gfx::Size size_; |
+ ScopedVector<gfx::GpuMemoryBuffer> buffers_; |
+ uint8* data_[media::VideoFrame::kMaxPlanes]; |
+ int32 strides_[media::VideoFrame::kMaxPlanes]; |
+ |
+ DISALLOW_COPY_AND_ASSIGN(ClientBuffer2); |
+}; |
+ |
VideoCaptureImpl::VideoCaptureImpl( |
const media::VideoCaptureSessionId session_id, |
VideoCaptureMessageFilter* filter) |
@@ -146,6 +203,7 @@ void VideoCaptureImpl::StopCapture() { |
state_update_cb_.Run(VIDEO_CAPTURE_STATE_STOPPED); |
StopDevice(); |
client_buffers_.clear(); |
+ client_buffer2s_.clear(); |
ResetClient(); |
weak_factory_.InvalidateWeakPtrs(); |
} |
@@ -185,7 +243,6 @@ void VideoCaptureImpl::OnBufferCreated(base::SharedMemoryHandle handle, |
DLOG(ERROR) << "OnBufferCreated: Map failed."; |
return; |
} |
- |
const bool inserted = |
client_buffers_.insert(std::make_pair(buffer_id, new ClientBuffer( |
shm.Pass(), length))) |
@@ -193,16 +250,40 @@ void VideoCaptureImpl::OnBufferCreated(base::SharedMemoryHandle handle, |
DCHECK(inserted); |
} |
-void VideoCaptureImpl::OnBufferDestroyed(int buffer_id) { |
+void VideoCaptureImpl::OnBufferCreated2( |
+ const std::vector<gfx::GpuMemoryBufferHandle>& handles, |
+ const gfx::Size& size, |
+ int buffer_id) { |
DCHECK(io_task_runner_->BelongsToCurrentThread()); |
- const ClientBufferMap::iterator iter = client_buffers_.find(buffer_id); |
- if (iter == client_buffers_.end()) |
+ // In case client calls StopCapture before the arrival of created buffer, |
+ // just close this buffer and return. |
+ if (state_ != VIDEO_CAPTURE_STATE_STARTED) |
return; |
- DCHECK(!iter->second.get() || iter->second->HasOneRef()) |
- << "Instructed to delete buffer we are still using."; |
- client_buffers_.erase(iter); |
+ const bool inserted = |
+ client_buffer2s_.insert(std::make_pair(buffer_id, |
+ new ClientBuffer2(handles, size))) |
+ .second; |
+ DCHECK(inserted); |
+} |
+ |
+void VideoCaptureImpl::OnBufferDestroyed(int buffer_id) { |
+ DCHECK(io_task_runner_->BelongsToCurrentThread()); |
+ |
+ const auto& cb_iter = client_buffers_.find(buffer_id); |
+ if (cb_iter != client_buffers_.end()) { |
+ DCHECK(!cb_iter->second.get() || cb_iter->second->HasOneRef()) |
+ << "Instructed to delete buffer we are still using."; |
+ client_buffers_.erase(cb_iter); |
+ } else { |
+ const auto& cb2_iter = client_buffer2s_.find(buffer_id); |
+ if (cb2_iter != client_buffer2s_.end()) { |
+ DCHECK(!cb2_iter->second.get() || cb2_iter->second->HasOneRef()) |
+ << "Instructed to delete buffer we are still using."; |
+ client_buffer2s_.erase(cb2_iter); |
+ } |
+ } |
} |
void VideoCaptureImpl::OnBufferReceived( |
@@ -227,15 +308,15 @@ void VideoCaptureImpl::OnBufferReceived( |
TRACE_EVENT_SCOPE_THREAD, "timestamp", |
timestamp.ToInternalValue(), "time_delta", |
(timestamp - first_frame_timestamp_).ToInternalValue()); |
+ // TODO(emircan): Handle texture upload and video frame creation for GMB |
+ // backed buffers. |
scoped_refptr<media::VideoFrame> frame; |
uint32* release_sync_point_storage = nullptr; |
scoped_refptr<ClientBuffer> buffer; |
- |
if (mailbox_holder.mailbox.IsZero()) { |
DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); |
- const ClientBufferMap::const_iterator iter = |
- client_buffers_.find(buffer_id); |
+ const auto& iter = client_buffers_.find(buffer_id); |
DCHECK(iter != client_buffers_.end()); |
buffer = iter->second; |
frame = media::VideoFrame::WrapExternalSharedMemory( |
@@ -243,35 +324,27 @@ void VideoCaptureImpl::OnBufferReceived( |
coded_size, |
visible_rect, |
gfx::Size(visible_rect.width(), visible_rect.height()), |
- reinterpret_cast<uint8*>(buffer->buffer->memory()), |
- buffer->buffer_size, |
- buffer->buffer->handle(), |
- 0 /* shared_memory_offset */, |
+ reinterpret_cast<uint8*>(buffer->buffer()->memory()), |
+ buffer->buffer_size(), |
+ buffer->buffer()->handle(), |
+ 0 /* shared_memory_offset */, |
timestamp - first_frame_timestamp_); |
- |
} else { |
DCHECK_EQ(media::PIXEL_FORMAT_ARGB, pixel_format); |
DCHECK(mailbox_holder.mailbox.Verify()); // Paranoia? |
// To be deleted in DidFinishConsumingFrame(). |
release_sync_point_storage = new uint32(0); |
frame = media::VideoFrame::WrapNativeTexture( |
- pixel_format, |
- mailbox_holder, |
- base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage), |
- coded_size, |
- gfx::Rect(coded_size), |
- coded_size, |
- timestamp - first_frame_timestamp_); |
+ pixel_format, |
+ mailbox_holder, |
+ base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage), |
+ coded_size, |
+ gfx::Rect(coded_size), |
+ coded_size, |
+ timestamp - first_frame_timestamp_); |
} |
frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
timestamp); |
- frame->AddDestructionObserver( |
- base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), |
- release_sync_point_storage, |
- media::BindToCurrentLoop(base::Bind( |
- &VideoCaptureImpl::OnClientBufferFinished, |
- weak_factory_.GetWeakPtr(), buffer_id, buffer)))); |
- |
frame->metadata()->MergeInternalValuesFrom(metadata); |
deliver_frame_cb_.Run(frame, timestamp); |
} |
@@ -286,6 +359,14 @@ void VideoCaptureImpl::OnClientBufferFinished( |
release_sync_point, |
consumer_resource_utilization)); |
} |
+void VideoCaptureImpl::OnClientBufferFinished2( |
+ int buffer_id, |
+ const scoped_refptr<ClientBuffer2>& gpu_memory_buffer, |
+ uint32 release_sync_point, |
+ double consumer_resource_utilization) { |
+ OnClientBufferFinished(buffer_id, scoped_refptr<ClientBuffer>(), |
+ release_sync_point, consumer_resource_utilization); |
+} |
void VideoCaptureImpl::OnStateChanged(VideoCaptureState state) { |
// TODO(ajose): http://crbug.com/522155 improve this state machine. |
@@ -295,6 +376,7 @@ void VideoCaptureImpl::OnStateChanged(VideoCaptureState state) { |
if (state == VIDEO_CAPTURE_STATE_STOPPED) { |
DVLOG(1) << "OnStateChanged: stopped!, device_id = " << device_id_; |
client_buffers_.clear(); |
+ client_buffer2s_.clear(); |
weak_factory_.InvalidateWeakPtrs(); |
return; |
} |