Index: content/renderer/media/video_capture_impl.cc |
diff --git a/content/renderer/media/video_capture_impl.cc b/content/renderer/media/video_capture_impl.cc |
index 46ab7c1cdf0953fb4fe1f777e93eb1559fae7c9f..cf863d248fb7f0c7c3de82fbdea4b8d1b345e828 100644 |
--- a/content/renderer/media/video_capture_impl.cc |
+++ b/content/renderer/media/video_capture_impl.cc |
@@ -31,8 +31,12 @@ namespace { |
// VideoCaptureImpl::DidFinishConsumingFrame() will read the value saved here, |
// and pass it back to the IO thread to pass back to the host via the |
// BufferReady IPC. |
-void SaveReleaseSyncPoint(uint32* storage, uint32 release_sync_point) { |
- *storage = release_sync_point; |
+void SaveReleaseSyncPoint(uint32* sync_point_storage, |
+ gpu::SyncToken* sync_token_storage, |
+ uint32 release_sync_point, |
+ const gpu::SyncToken& release_sync_token) { |
+ *sync_point_storage = release_sync_point; |
+ *sync_token_storage = release_sync_token; |
} |
} // namespace |
@@ -314,7 +318,8 @@ void VideoCaptureImpl::OnBufferReceived( |
const std::vector<gpu::MailboxHolder>& mailbox_holders) { |
DCHECK(io_task_runner_->BelongsToCurrentThread()); |
if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_) { |
- Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 0, -1.0)); |
+ Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 0, |
+ gpu::SyncToken(), -1.0)); |
return; |
} |
if (first_frame_timestamp_.is_null()) |
@@ -327,8 +332,9 @@ void VideoCaptureImpl::OnBufferReceived( |
(timestamp - first_frame_timestamp_).ToInternalValue()); |
scoped_refptr<media::VideoFrame> frame; |
- base::Callback<void(uint32, double)> buffer_finished_callback; |
+ BufferFinishedCB buffer_finished_callback; |
uint32* release_sync_point_storage = new uint32(0); |
+ gpu::SyncToken* release_sync_token_storage = new gpu::SyncToken; |
dcheng
2015/10/27 19:09:28
It doesn't have to be this CL, but can we make own
David Yen
2015/10/28 22:03:43
I will create a follow up CL for this.
|
if (mailbox_holders.empty()) { |
DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); |
const auto& iter = client_buffers_.find(buffer_id); |
@@ -359,12 +365,10 @@ void VideoCaptureImpl::OnBufferReceived( |
media::VideoFrame::NumPlanes(media::PIXEL_FORMAT_ARGB)) { |
DCHECK_EQ(media::PIXEL_FORMAT_ARGB, pixel_format); |
frame = media::VideoFrame::WrapNativeTexture( |
- pixel_format, |
- mailbox_holders[0], |
- base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage), |
- coded_size, |
- gfx::Rect(coded_size), |
- coded_size, |
+ pixel_format, mailbox_holders[0], |
+ base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage, |
+ release_sync_token_storage), |
+ coded_size, gfx::Rect(coded_size), coded_size, |
timestamp - first_frame_timestamp_); |
} else if (mailbox_holders.size() == |
media::VideoFrame::NumPlanes(media::PIXEL_FORMAT_I420)) { |
@@ -376,10 +380,9 @@ void VideoCaptureImpl::OnBufferReceived( |
mailbox_holders[media::VideoFrame::kYPlane], |
mailbox_holders[media::VideoFrame::kUPlane], |
mailbox_holders[media::VideoFrame::kVPlane], |
- base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage), |
- coded_size, |
- gfx::Rect(coded_size), |
- coded_size, |
+ base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage, |
+ release_sync_token_storage), |
+ coded_size, gfx::Rect(coded_size), coded_size, |
timestamp - first_frame_timestamp_); |
} |
buffer_finished_callback = media::BindToCurrentLoop( |
@@ -390,7 +393,8 @@ void VideoCaptureImpl::OnBufferReceived( |
timestamp); |
frame->AddDestructionObserver( |
base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), |
- release_sync_point_storage, buffer_finished_callback)); |
+ release_sync_point_storage, release_sync_token_storage, |
+ buffer_finished_callback)); |
frame->metadata()->MergeInternalValuesFrom(metadata); |
@@ -402,19 +406,22 @@ void VideoCaptureImpl::OnClientBufferFinished( |
int buffer_id, |
const scoped_refptr<ClientBuffer>& /* ignored_buffer */, |
uint32 release_sync_point, |
+ const gpu::SyncToken& release_sync_token, |
double consumer_resource_utilization) { |
DCHECK(io_task_runner_->BelongsToCurrentThread()); |
- Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, |
- release_sync_point, |
- consumer_resource_utilization)); |
+ Send(new VideoCaptureHostMsg_BufferReady( |
+ device_id_, buffer_id, release_sync_point, release_sync_token, |
+ consumer_resource_utilization)); |
} |
void VideoCaptureImpl::OnClientBufferFinished2( |
int buffer_id, |
const scoped_refptr<ClientBuffer2>& gpu_memory_buffer /* ignored_buffer */, |
uint32 release_sync_point, |
+ const gpu::SyncToken& release_sync_token, |
double consumer_resource_utilization) { |
OnClientBufferFinished(buffer_id, scoped_refptr<ClientBuffer>(), |
- release_sync_point, consumer_resource_utilization); |
+ release_sync_point, release_sync_token, |
+ consumer_resource_utilization); |
} |
void VideoCaptureImpl::OnStateChanged(VideoCaptureState state) { |
@@ -551,7 +558,8 @@ bool VideoCaptureImpl::RemoveClient(int client_id, ClientInfoMap* clients) { |
void VideoCaptureImpl::DidFinishConsumingFrame( |
const media::VideoFrameMetadata* metadata, |
uint32* release_sync_point_storage, |
- const base::Callback<void(uint32, double)>& callback_to_io_thread) { |
+ gpu::SyncToken* release_sync_token_storage, |
+ const BufferFinishedCB& callback_to_io_thread) { |
// Note: This function may be called on any thread by the VideoFrame |
// destructor. |metadata| is still valid for read-access at this point. |
@@ -561,13 +569,20 @@ void VideoCaptureImpl::DidFinishConsumingFrame( |
delete release_sync_point_storage; |
} |
+ gpu::SyncToken release_sync_token; |
+ if (release_sync_token_storage) { |
+ release_sync_token = *release_sync_token_storage; |
+ delete release_sync_token_storage; |
+ } |
+ |
double consumer_resource_utilization = -1.0; |
if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, |
&consumer_resource_utilization)) { |
consumer_resource_utilization = -1.0; |
} |
- callback_to_io_thread.Run(release_sync_point, consumer_resource_utilization); |
+ callback_to_io_thread.Run(release_sync_point, release_sync_token, |
+ consumer_resource_utilization); |
} |
} // namespace content |