Index: media/capture/video/video_capture_device_client.cc |
diff --git a/media/capture/video/video_capture_device_client.cc b/media/capture/video/video_capture_device_client.cc |
index 3590c7aa30f4331c52b5716645cd06ebe42e4a31..2e31a59adc896e72e09e832a37684e76367fe9b8 100644 |
--- a/media/capture/video/video_capture_device_client.cc |
+++ b/media/capture/video/video_capture_device_client.cc |
@@ -74,11 +74,16 @@ class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { |
VideoCaptureDeviceClient::VideoCaptureDeviceClient( |
std::unique_ptr<VideoFrameReceiver> receiver, |
scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
- const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) |
+ const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory, |
+ scoped_refptr<base::SingleThreadTaskRunner> |
+ utilization_reporting_task_runner) |
: receiver_(std::move(receiver)), |
jpeg_decoder_factory_callback_(jpeg_decoder_factory), |
external_jpeg_decoder_initialized_(false), |
buffer_pool_(std::move(buffer_pool)), |
+ optional_load_observer_(nullptr), |
+ utilization_reporting_task_runner_( |
+ std::move(utilization_reporting_task_runner)), |
last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {} |
VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { |
@@ -87,13 +92,19 @@ VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { |
// OnIncomingCapturedData. |
} |
+void VideoCaptureDeviceClient::SetConsumerLoadObserver( |
+ ConsumerLoadObserver* load_observer) { |
+ optional_load_observer_ = load_observer; |
+} |
+ |
void VideoCaptureDeviceClient::OnIncomingCapturedData( |
const uint8_t* data, |
int length, |
const VideoCaptureFormat& frame_format, |
int rotation, |
base::TimeTicks reference_time, |
- base::TimeDelta timestamp) { |
+ base::TimeDelta timestamp, |
+ int frame_id) { |
TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); |
DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); |
@@ -115,7 +126,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( |
if (frame_format.pixel_format == media::PIXEL_FORMAT_Y16) { |
return OnIncomingCapturedY16Data(data, length, frame_format, reference_time, |
- timestamp); |
+ timestamp, frame_id); |
} |
// |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest |
@@ -259,7 +270,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData( |
VideoCaptureFormat(dimensions, frame_format.frame_rate, |
media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); |
OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
- timestamp); |
+ timestamp, frame_id); |
} |
std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
@@ -276,8 +287,10 @@ VideoCaptureDeviceClient::ReserveOutputBuffer( |
int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
const int buffer_id = buffer_pool_->ReserveForProducer( |
frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); |
- if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) |
+ if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { |
receiver_->OnBufferDestroyed(buffer_id_to_drop); |
+ EraseEntryFromBufferIdToFrameIdMap(buffer_id_to_drop); |
+ } |
if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
return nullptr; |
return base::WrapUnique<Buffer>( |
@@ -288,7 +301,8 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
std::unique_ptr<Buffer> buffer, |
const VideoCaptureFormat& frame_format, |
base::TimeTicks reference_time, |
- base::TimeDelta timestamp) { |
+ base::TimeDelta timestamp, |
+ int frame_id) { |
DCHECK(IsFormatSupported(frame_format.pixel_format)); |
DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); |
@@ -311,12 +325,14 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
frame_format.frame_rate); |
frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
reference_time); |
- OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); |
+ OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame), frame_id); |
} |
void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( |
std::unique_ptr<Buffer> buffer, |
- scoped_refptr<VideoFrame> frame) { |
+ scoped_refptr<VideoFrame> frame, |
+ int frame_id) { |
+ AddEntryToBufferIdToFrameIdMap(buffer->id(), frame_id); |
receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); |
} |
@@ -354,6 +370,16 @@ double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { |
return buffer_pool_->GetBufferPoolUtilization(); |
} |
+void VideoCaptureDeviceClient::OnReceiverReportingUtilization( |
+ int buffer_id, |
+ double utilization) { |
+ DCHECK(utilization_reporting_task_runner_->BelongsToCurrentThread()); |
+ if (optional_load_observer_ == nullptr) |
+ return; |
+ optional_load_observer_->OnConsumerReportingUtilization( |
+ buffer_id_to_frame_id_map_[buffer_id], utilization); |
+} |
+ |
std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
VideoCaptureDeviceClient::ReserveI420OutputBuffer( |
const gfx::Size& dimensions, |
@@ -387,7 +413,8 @@ void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( |
int length, |
const VideoCaptureFormat& frame_format, |
base::TimeTicks reference_time, |
- base::TimeDelta timestamp) { |
+ base::TimeDelta timestamp, |
+ int frame_id) { |
std::unique_ptr<Buffer> buffer(ReserveOutputBuffer(frame_format.frame_size, |
media::PIXEL_FORMAT_Y16, |
media::PIXEL_STORAGE_CPU)); |
@@ -407,7 +434,33 @@ void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( |
VideoCaptureFormat(frame_format.frame_size, frame_format.frame_rate, |
media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); |
OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
- timestamp); |
+ timestamp, frame_id); |
+} |
+ |
+void VideoCaptureDeviceClient::AddEntryToBufferIdToFrameIdMap(int buffer_id, |
+ int frame_id) { |
+ if (utilization_reporting_task_runner_->BelongsToCurrentThread()) { |
miu
2016/12/01 05:25:18
The threading in these two new methods feels a bit
chfremer
2016/12/02 01:28:29
Luckily, we were able to eliminate the need for al
|
+ buffer_id_to_frame_id_map_[buffer_id] = frame_id; |
+ return; |
+ } |
+ utilization_reporting_task_runner_->PostTask( |
+ FROM_HERE, |
+ base::Bind(&VideoCaptureDeviceClient::AddEntryToBufferIdToFrameIdMap, |
+ base::Unretained(this), buffer_id, frame_id)); |
+} |
+ |
+void VideoCaptureDeviceClient::EraseEntryFromBufferIdToFrameIdMap( |
+ int buffer_id_to_drop) { |
+ if (utilization_reporting_task_runner_->BelongsToCurrentThread()) { |
+ if (buffer_id_to_frame_id_map_.find(buffer_id_to_drop) != |
+ buffer_id_to_frame_id_map_.end()) |
+ buffer_id_to_frame_id_map_.erase(buffer_id_to_drop); |
+ return; |
+ } |
+ utilization_reporting_task_runner_->PostTask( |
+ FROM_HERE, |
+ base::Bind(&VideoCaptureDeviceClient::EraseEntryFromBufferIdToFrameIdMap, |
+ base::Unretained(this), buffer_id_to_drop)); |
} |
} // namespace media |