Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(82)

Side by Side Diff: media/capture/video/video_capture_device_client.cc

Issue 2518143004: [Mojo Video Capture] Replace RESOURCE_UTILIZATION with interface ReceiverLoadObserver (Closed)
Patch Set: Fixes for failing bots Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/capture/video/video_capture_device_client.h" 5 #include "media/capture/video/video_capture_device_client.h"
6 6
7 #include <algorithm> 7 #include <algorithm>
8 #include <utility> 8 #include <utility>
9 9
10 #include "base/bind.h" 10 #include "base/bind.h"
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
67 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } 67 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); }
68 68
69 const int id_; 69 const int id_;
70 const scoped_refptr<VideoCaptureBufferPool> pool_; 70 const scoped_refptr<VideoCaptureBufferPool> pool_;
71 const std::unique_ptr<VideoCaptureBufferHandle> buffer_handle_; 71 const std::unique_ptr<VideoCaptureBufferHandle> buffer_handle_;
72 }; 72 };
73 73
74 VideoCaptureDeviceClient::VideoCaptureDeviceClient( 74 VideoCaptureDeviceClient::VideoCaptureDeviceClient(
75 std::unique_ptr<VideoFrameReceiver> receiver, 75 std::unique_ptr<VideoFrameReceiver> receiver,
76 scoped_refptr<VideoCaptureBufferPool> buffer_pool, 76 scoped_refptr<VideoCaptureBufferPool> buffer_pool,
77 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) 77 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory,
78 scoped_refptr<base::SingleThreadTaskRunner>
79 utilization_reporting_task_runner)
78 : receiver_(std::move(receiver)), 80 : receiver_(std::move(receiver)),
79 jpeg_decoder_factory_callback_(jpeg_decoder_factory), 81 jpeg_decoder_factory_callback_(jpeg_decoder_factory),
80 external_jpeg_decoder_initialized_(false), 82 external_jpeg_decoder_initialized_(false),
81 buffer_pool_(std::move(buffer_pool)), 83 buffer_pool_(std::move(buffer_pool)),
84 optional_load_observer_(nullptr),
85 utilization_reporting_task_runner_(
86 std::move(utilization_reporting_task_runner)),
82 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {} 87 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {}
83 88
84 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { 89 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {
85 // This should be on the platform auxiliary thread since 90 // This should be on the platform auxiliary thread since
86 // |external_jpeg_decoder_| need to be destructed on the same thread as 91 // |external_jpeg_decoder_| need to be destructed on the same thread as
87 // OnIncomingCapturedData. 92 // OnIncomingCapturedData.
88 } 93 }
89 94
95 void VideoCaptureDeviceClient::SetConsumerLoadObserver(
96 ConsumerLoadObserver* load_observer) {
97 optional_load_observer_ = load_observer;
98 }
99
90 void VideoCaptureDeviceClient::OnIncomingCapturedData( 100 void VideoCaptureDeviceClient::OnIncomingCapturedData(
91 const uint8_t* data, 101 const uint8_t* data,
92 int length, 102 int length,
93 const VideoCaptureFormat& frame_format, 103 const VideoCaptureFormat& frame_format,
94 int rotation, 104 int rotation,
95 base::TimeTicks reference_time, 105 base::TimeTicks reference_time,
96 base::TimeDelta timestamp) { 106 base::TimeDelta timestamp,
107 int frame_id) {
97 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); 108 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData");
98 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); 109 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage);
99 110
100 if (last_captured_pixel_format_ != frame_format.pixel_format) { 111 if (last_captured_pixel_format_ != frame_format.pixel_format) {
101 OnLog("Pixel format: " + 112 OnLog("Pixel format: " +
102 media::VideoPixelFormatToString(frame_format.pixel_format)); 113 media::VideoPixelFormatToString(frame_format.pixel_format));
103 last_captured_pixel_format_ = frame_format.pixel_format; 114 last_captured_pixel_format_ = frame_format.pixel_format;
104 115
105 if (frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && 116 if (frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG &&
106 !external_jpeg_decoder_initialized_) { 117 !external_jpeg_decoder_initialized_) {
107 external_jpeg_decoder_initialized_ = true; 118 external_jpeg_decoder_initialized_ = true;
108 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); 119 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run();
109 external_jpeg_decoder_->Initialize(); 120 external_jpeg_decoder_->Initialize();
110 } 121 }
111 } 122 }
112 123
113 if (!frame_format.IsValid()) 124 if (!frame_format.IsValid())
114 return; 125 return;
115 126
116 if (frame_format.pixel_format == media::PIXEL_FORMAT_Y16) { 127 if (frame_format.pixel_format == media::PIXEL_FORMAT_Y16) {
117 return OnIncomingCapturedY16Data(data, length, frame_format, reference_time, 128 return OnIncomingCapturedY16Data(data, length, frame_format, reference_time,
118 timestamp); 129 timestamp, frame_id);
119 } 130 }
120 131
121 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest 132 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest
122 // bit decomposition of {width, height}, grabbing the odd and even parts. 133 // bit decomposition of {width, height}, grabbing the odd and even parts.
123 const int chopped_width = frame_format.frame_size.width() & 1; 134 const int chopped_width = frame_format.frame_size.width() & 1;
124 const int chopped_height = frame_format.frame_size.height() & 1; 135 const int chopped_height = frame_format.frame_size.height() & 1;
125 const int new_unrotated_width = frame_format.frame_size.width() & ~1; 136 const int new_unrotated_width = frame_format.frame_size.width() & ~1;
126 const int new_unrotated_height = frame_format.frame_size.height() & ~1; 137 const int new_unrotated_height = frame_format.frame_size.height() & ~1;
127 138
128 int destination_width = new_unrotated_width; 139 int destination_width = new_unrotated_width;
(...skipping 123 matching lines...) Expand 10 before | Expand all | Expand 10 after
252 rotation_mode, origin_colorspace) != 0) { 263 rotation_mode, origin_colorspace) != 0) {
253 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " 264 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from "
254 << media::VideoPixelFormatToString(frame_format.pixel_format); 265 << media::VideoPixelFormatToString(frame_format.pixel_format);
255 return; 266 return;
256 } 267 }
257 268
258 const VideoCaptureFormat output_format = 269 const VideoCaptureFormat output_format =
259 VideoCaptureFormat(dimensions, frame_format.frame_rate, 270 VideoCaptureFormat(dimensions, frame_format.frame_rate,
260 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); 271 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU);
261 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, 272 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time,
262 timestamp); 273 timestamp, frame_id);
263 } 274 }
264 275
265 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> 276 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer>
266 VideoCaptureDeviceClient::ReserveOutputBuffer( 277 VideoCaptureDeviceClient::ReserveOutputBuffer(
267 const gfx::Size& frame_size, 278 const gfx::Size& frame_size,
268 media::VideoPixelFormat pixel_format, 279 media::VideoPixelFormat pixel_format,
269 media::VideoPixelStorage pixel_storage) { 280 media::VideoPixelStorage pixel_storage) {
270 DCHECK_GT(frame_size.width(), 0); 281 DCHECK_GT(frame_size.width(), 0);
271 DCHECK_GT(frame_size.height(), 0); 282 DCHECK_GT(frame_size.height(), 0);
272 DCHECK(IsFormatSupported(pixel_format)); 283 DCHECK(IsFormatSupported(pixel_format));
273 284
274 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if 285 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if
275 // it's a ShMem GMB or a DmaBuf GMB. 286 // it's a ShMem GMB or a DmaBuf GMB.
276 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; 287 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId;
277 const int buffer_id = buffer_pool_->ReserveForProducer( 288 const int buffer_id = buffer_pool_->ReserveForProducer(
278 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); 289 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop);
279 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) 290 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) {
280 receiver_->OnBufferDestroyed(buffer_id_to_drop); 291 receiver_->OnBufferDestroyed(buffer_id_to_drop);
292 EraseEntryFromBufferIdToFrameIdMap(buffer_id_to_drop);
293 }
281 if (buffer_id == VideoCaptureBufferPool::kInvalidId) 294 if (buffer_id == VideoCaptureBufferPool::kInvalidId)
282 return nullptr; 295 return nullptr;
283 return base::WrapUnique<Buffer>( 296 return base::WrapUnique<Buffer>(
284 new AutoReleaseBuffer(buffer_pool_, buffer_id)); 297 new AutoReleaseBuffer(buffer_pool_, buffer_id));
285 } 298 }
286 299
287 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( 300 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer(
288 std::unique_ptr<Buffer> buffer, 301 std::unique_ptr<Buffer> buffer,
289 const VideoCaptureFormat& frame_format, 302 const VideoCaptureFormat& frame_format,
290 base::TimeTicks reference_time, 303 base::TimeTicks reference_time,
291 base::TimeDelta timestamp) { 304 base::TimeDelta timestamp,
305 int frame_id) {
292 DCHECK(IsFormatSupported(frame_format.pixel_format)); 306 DCHECK(IsFormatSupported(frame_format.pixel_format));
293 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); 307 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage);
294 308
295 scoped_refptr<VideoFrame> frame; 309 scoped_refptr<VideoFrame> frame;
296 if (buffer->IsBackedByVideoFrame()) { 310 if (buffer->IsBackedByVideoFrame()) {
297 frame = buffer->GetVideoFrame(); 311 frame = buffer->GetVideoFrame();
298 frame->set_timestamp(timestamp); 312 frame->set_timestamp(timestamp);
299 } else { 313 } else {
300 frame = VideoFrame::WrapExternalSharedMemory( 314 frame = VideoFrame::WrapExternalSharedMemory(
301 frame_format.pixel_format, frame_format.frame_size, 315 frame_format.pixel_format, frame_format.frame_size,
302 gfx::Rect(frame_format.frame_size), frame_format.frame_size, 316 gfx::Rect(frame_format.frame_size), frame_format.frame_size,
303 reinterpret_cast<uint8_t*>(buffer->data()), 317 reinterpret_cast<uint8_t*>(buffer->data()),
304 VideoFrame::AllocationSize(frame_format.pixel_format, 318 VideoFrame::AllocationSize(frame_format.pixel_format,
305 frame_format.frame_size), 319 frame_format.frame_size),
306 base::SharedMemory::NULLHandle(), 0u, timestamp); 320 base::SharedMemory::NULLHandle(), 0u, timestamp);
307 } 321 }
308 if (!frame) 322 if (!frame)
309 return; 323 return;
310 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, 324 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
311 frame_format.frame_rate); 325 frame_format.frame_rate);
312 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, 326 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
313 reference_time); 327 reference_time);
314 OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); 328 OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame), frame_id);
315 } 329 }
316 330
317 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( 331 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame(
318 std::unique_ptr<Buffer> buffer, 332 std::unique_ptr<Buffer> buffer,
319 scoped_refptr<VideoFrame> frame) { 333 scoped_refptr<VideoFrame> frame,
334 int frame_id) {
335 AddEntryToBufferIdToFrameIdMap(buffer->id(), frame_id);
320 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); 336 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame));
321 } 337 }
322 338
323 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> 339 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer>
324 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( 340 VideoCaptureDeviceClient::ResurrectLastOutputBuffer(
325 const gfx::Size& dimensions, 341 const gfx::Size& dimensions,
326 media::VideoPixelFormat format, 342 media::VideoPixelFormat format,
327 media::VideoPixelStorage storage) { 343 media::VideoPixelStorage storage) {
328 const int buffer_id = 344 const int buffer_id =
329 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); 345 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage);
(...skipping 17 matching lines...) Expand all
347 } 363 }
348 364
349 void VideoCaptureDeviceClient::OnLog(const std::string& message) { 365 void VideoCaptureDeviceClient::OnLog(const std::string& message) {
350 receiver_->OnLog(message); 366 receiver_->OnLog(message);
351 } 367 }
352 368
353 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { 369 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const {
354 return buffer_pool_->GetBufferPoolUtilization(); 370 return buffer_pool_->GetBufferPoolUtilization();
355 } 371 }
356 372
373 void VideoCaptureDeviceClient::OnReceiverReportingUtilization(
374 int buffer_id,
375 double utilization) {
376 DCHECK(utilization_reporting_task_runner_->BelongsToCurrentThread());
377 if (optional_load_observer_ == nullptr)
378 return;
379 optional_load_observer_->OnConsumerReportingUtilization(
380 buffer_id_to_frame_id_map_[buffer_id], utilization);
381 }
382
357 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> 383 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer>
358 VideoCaptureDeviceClient::ReserveI420OutputBuffer( 384 VideoCaptureDeviceClient::ReserveI420OutputBuffer(
359 const gfx::Size& dimensions, 385 const gfx::Size& dimensions,
360 media::VideoPixelStorage storage, 386 media::VideoPixelStorage storage,
361 uint8_t** y_plane_data, 387 uint8_t** y_plane_data,
362 uint8_t** u_plane_data, 388 uint8_t** u_plane_data,
363 uint8_t** v_plane_data) { 389 uint8_t** v_plane_data) {
364 DCHECK(storage == media::PIXEL_STORAGE_CPU); 390 DCHECK(storage == media::PIXEL_STORAGE_CPU);
365 DCHECK(dimensions.height()); 391 DCHECK(dimensions.height());
366 DCHECK(dimensions.width()); 392 DCHECK(dimensions.width());
(...skipping 13 matching lines...) Expand all
380 *u_plane_data + 406 *u_plane_data +
381 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); 407 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea();
382 return buffer; 408 return buffer;
383 } 409 }
384 410
385 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( 411 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data(
386 const uint8_t* data, 412 const uint8_t* data,
387 int length, 413 int length,
388 const VideoCaptureFormat& frame_format, 414 const VideoCaptureFormat& frame_format,
389 base::TimeTicks reference_time, 415 base::TimeTicks reference_time,
390 base::TimeDelta timestamp) { 416 base::TimeDelta timestamp,
417 int frame_id) {
391 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer(frame_format.frame_size, 418 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer(frame_format.frame_size,
392 media::PIXEL_FORMAT_Y16, 419 media::PIXEL_FORMAT_Y16,
393 media::PIXEL_STORAGE_CPU)); 420 media::PIXEL_STORAGE_CPU));
394 // The input |length| can be greater than the required buffer size because of 421 // The input |length| can be greater than the required buffer size because of
395 // paddings and/or alignments, but it cannot be smaller. 422 // paddings and/or alignments, but it cannot be smaller.
396 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); 423 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize());
397 #if DCHECK_IS_ON() 424 #if DCHECK_IS_ON()
398 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; 425 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1;
399 if (dropped_frame_counter_ >= kMaxDroppedFrames) 426 if (dropped_frame_counter_ >= kMaxDroppedFrames)
400 OnError(FROM_HERE, "Too many frames dropped"); 427 OnError(FROM_HERE, "Too many frames dropped");
401 #endif 428 #endif
402 // Failed to reserve output buffer, so drop the frame. 429 // Failed to reserve output buffer, so drop the frame.
403 if (!buffer.get()) 430 if (!buffer.get())
404 return; 431 return;
405 memcpy(buffer->data(), data, length); 432 memcpy(buffer->data(), data, length);
406 const VideoCaptureFormat output_format = 433 const VideoCaptureFormat output_format =
407 VideoCaptureFormat(frame_format.frame_size, frame_format.frame_rate, 434 VideoCaptureFormat(frame_format.frame_size, frame_format.frame_rate,
408 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); 435 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU);
409 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, 436 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time,
410 timestamp); 437 timestamp, frame_id);
438 }
439
440 void VideoCaptureDeviceClient::AddEntryToBufferIdToFrameIdMap(int buffer_id,
441 int frame_id) {
442 if (utilization_reporting_task_runner_->BelongsToCurrentThread()) {
miu 2016/12/01 05:25:18 The threading in these two new methods feels a bit
chfremer 2016/12/02 01:28:29 Luckily, we were able to eliminate the need for al
443 buffer_id_to_frame_id_map_[buffer_id] = frame_id;
444 return;
445 }
446 utilization_reporting_task_runner_->PostTask(
447 FROM_HERE,
448 base::Bind(&VideoCaptureDeviceClient::AddEntryToBufferIdToFrameIdMap,
449 base::Unretained(this), buffer_id, frame_id));
450 }
451
452 void VideoCaptureDeviceClient::EraseEntryFromBufferIdToFrameIdMap(
453 int buffer_id_to_drop) {
454 if (utilization_reporting_task_runner_->BelongsToCurrentThread()) {
455 if (buffer_id_to_frame_id_map_.find(buffer_id_to_drop) !=
456 buffer_id_to_frame_id_map_.end())
457 buffer_id_to_frame_id_map_.erase(buffer_id_to_drop);
458 return;
459 }
460 utilization_reporting_task_runner_->PostTask(
461 FROM_HERE,
462 base::Bind(&VideoCaptureDeviceClient::EraseEntryFromBufferIdToFrameIdMap,
463 base::Unretained(this), buffer_id_to_drop));
411 } 464 }
412 465
413 } // namespace media 466 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698