OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/capture/video/video_capture_device_client.h" | 5 #include "media/capture/video/video_capture_device_client.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 23 matching lines...) Expand all Loading... |
34 pixel_format == media::PIXEL_FORMAT_Y16); | 34 pixel_format == media::PIXEL_FORMAT_Y16); |
35 } | 35 } |
36 } | 36 } |
37 | 37 |
38 namespace media { | 38 namespace media { |
39 | 39 |
40 // Class combining a Client::Buffer interface implementation and a pool buffer | 40 // Class combining a Client::Buffer interface implementation and a pool buffer |
41 // implementation to guarantee proper cleanup on destruction on our side. | 41 // implementation to guarantee proper cleanup on destruction on our side. |
42 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { | 42 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { |
43 public: | 43 public: |
44 AutoReleaseBuffer(scoped_refptr<VideoCaptureBufferPool> pool, int buffer_id) | 44 AutoReleaseBuffer(scoped_refptr<VideoCaptureBufferPool> pool, |
45 : id_(buffer_id), | 45 int buffer_id, |
46 pool_(std::move(pool)), | 46 int frame_feedback_id) |
| 47 : pool_(std::move(pool)), |
| 48 id_(buffer_id), |
| 49 frame_feedback_id_(frame_feedback_id), |
47 buffer_handle_(pool_->GetBufferHandle(buffer_id)) { | 50 buffer_handle_(pool_->GetBufferHandle(buffer_id)) { |
48 DCHECK(pool_.get()); | 51 DCHECK(pool_.get()); |
49 } | 52 } |
50 int id() const override { return id_; } | 53 int id() const override { return id_; } |
| 54 int frame_feedback_id() const override { return frame_feedback_id_; } |
51 gfx::Size dimensions() const override { return buffer_handle_->dimensions(); } | 55 gfx::Size dimensions() const override { return buffer_handle_->dimensions(); } |
52 size_t mapped_size() const override { return buffer_handle_->mapped_size(); } | 56 size_t mapped_size() const override { return buffer_handle_->mapped_size(); } |
53 void* data(int plane) override { return buffer_handle_->data(plane); } | 57 void* data(int plane) override { return buffer_handle_->data(plane); } |
54 #if defined(OS_POSIX) && !defined(OS_MACOSX) | 58 #if defined(OS_POSIX) && !defined(OS_MACOSX) |
55 base::FileDescriptor AsPlatformFile() override { | 59 base::FileDescriptor AsPlatformFile() override { |
56 return buffer_handle_->AsPlatformFile(); | 60 return buffer_handle_->AsPlatformFile(); |
57 } | 61 } |
58 #endif | 62 #endif |
59 bool IsBackedByVideoFrame() const override { | 63 bool IsBackedByVideoFrame() const override { |
60 return buffer_handle_->IsBackedByVideoFrame(); | 64 return buffer_handle_->IsBackedByVideoFrame(); |
61 } | 65 } |
62 scoped_refptr<VideoFrame> GetVideoFrame() override { | 66 scoped_refptr<VideoFrame> GetVideoFrame() override { |
63 return buffer_handle_->GetVideoFrame(); | 67 return buffer_handle_->GetVideoFrame(); |
64 } | 68 } |
65 | 69 |
66 private: | 70 private: |
67 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } | 71 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } |
68 | 72 |
| 73 const scoped_refptr<VideoCaptureBufferPool> pool_; |
69 const int id_; | 74 const int id_; |
70 const scoped_refptr<VideoCaptureBufferPool> pool_; | 75 const int frame_feedback_id_; |
71 const std::unique_ptr<VideoCaptureBufferHandle> buffer_handle_; | 76 const std::unique_ptr<VideoCaptureBufferHandle> buffer_handle_; |
72 }; | 77 }; |
73 | 78 |
74 VideoCaptureDeviceClient::VideoCaptureDeviceClient( | 79 VideoCaptureDeviceClient::VideoCaptureDeviceClient( |
75 std::unique_ptr<VideoFrameReceiver> receiver, | 80 std::unique_ptr<VideoFrameReceiver> receiver, |
76 scoped_refptr<VideoCaptureBufferPool> buffer_pool, | 81 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
77 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) | 82 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) |
78 : receiver_(std::move(receiver)), | 83 : receiver_(std::move(receiver)), |
79 jpeg_decoder_factory_callback_(jpeg_decoder_factory), | 84 jpeg_decoder_factory_callback_(jpeg_decoder_factory), |
80 external_jpeg_decoder_initialized_(false), | 85 external_jpeg_decoder_initialized_(false), |
81 buffer_pool_(std::move(buffer_pool)), | 86 buffer_pool_(std::move(buffer_pool)), |
82 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {} | 87 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {} |
83 | 88 |
84 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { | 89 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { |
85 // This should be on the platform auxiliary thread since | 90 // This should be on the platform auxiliary thread since |
86 // |external_jpeg_decoder_| need to be destructed on the same thread as | 91 // |external_jpeg_decoder_| need to be destructed on the same thread as |
87 // OnIncomingCapturedData. | 92 // OnIncomingCapturedData. |
88 } | 93 } |
89 | 94 |
90 void VideoCaptureDeviceClient::OnIncomingCapturedData( | 95 void VideoCaptureDeviceClient::OnIncomingCapturedData( |
91 const uint8_t* data, | 96 const uint8_t* data, |
92 int length, | 97 int length, |
93 const VideoCaptureFormat& frame_format, | 98 const VideoCaptureFormat& frame_format, |
94 int rotation, | 99 int rotation, |
95 base::TimeTicks reference_time, | 100 base::TimeTicks reference_time, |
96 base::TimeDelta timestamp) { | 101 base::TimeDelta timestamp, |
| 102 int frame_feedback_id) { |
97 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); | 103 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); |
98 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); | 104 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); |
99 | 105 |
100 if (last_captured_pixel_format_ != frame_format.pixel_format) { | 106 if (last_captured_pixel_format_ != frame_format.pixel_format) { |
101 OnLog("Pixel format: " + | 107 OnLog("Pixel format: " + |
102 media::VideoPixelFormatToString(frame_format.pixel_format)); | 108 media::VideoPixelFormatToString(frame_format.pixel_format)); |
103 last_captured_pixel_format_ = frame_format.pixel_format; | 109 last_captured_pixel_format_ = frame_format.pixel_format; |
104 | 110 |
105 if (frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && | 111 if (frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && |
106 !external_jpeg_decoder_initialized_) { | 112 !external_jpeg_decoder_initialized_) { |
107 external_jpeg_decoder_initialized_ = true; | 113 external_jpeg_decoder_initialized_ = true; |
108 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); | 114 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); |
109 external_jpeg_decoder_->Initialize(); | 115 external_jpeg_decoder_->Initialize(); |
110 } | 116 } |
111 } | 117 } |
112 | 118 |
113 if (!frame_format.IsValid()) | 119 if (!frame_format.IsValid()) |
114 return; | 120 return; |
115 | 121 |
116 if (frame_format.pixel_format == media::PIXEL_FORMAT_Y16) { | 122 if (frame_format.pixel_format == media::PIXEL_FORMAT_Y16) { |
117 return OnIncomingCapturedY16Data(data, length, frame_format, reference_time, | 123 return OnIncomingCapturedY16Data(data, length, frame_format, reference_time, |
118 timestamp); | 124 timestamp, frame_feedback_id); |
119 } | 125 } |
120 | 126 |
121 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest | 127 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest |
122 // bit decomposition of {width, height}, grabbing the odd and even parts. | 128 // bit decomposition of {width, height}, grabbing the odd and even parts. |
123 const int chopped_width = frame_format.frame_size.width() & 1; | 129 const int chopped_width = frame_format.frame_size.width() & 1; |
124 const int chopped_height = frame_format.frame_size.height() & 1; | 130 const int chopped_height = frame_format.frame_size.height() & 1; |
125 const int new_unrotated_width = frame_format.frame_size.width() & ~1; | 131 const int new_unrotated_width = frame_format.frame_size.width() & ~1; |
126 const int new_unrotated_height = frame_format.frame_size.height() & ~1; | 132 const int new_unrotated_height = frame_format.frame_size.height() & ~1; |
127 | 133 |
128 int destination_width = new_unrotated_width; | 134 int destination_width = new_unrotated_width; |
129 int destination_height = new_unrotated_height; | 135 int destination_height = new_unrotated_height; |
130 if (rotation == 90 || rotation == 270) | 136 if (rotation == 90 || rotation == 270) |
131 std::swap(destination_width, destination_height); | 137 std::swap(destination_width, destination_height); |
132 | 138 |
133 DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: " | 139 DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: " |
134 << rotation; | 140 << rotation; |
135 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | 141 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
136 if (rotation == 90) | 142 if (rotation == 90) |
137 rotation_mode = libyuv::kRotate90; | 143 rotation_mode = libyuv::kRotate90; |
138 else if (rotation == 180) | 144 else if (rotation == 180) |
139 rotation_mode = libyuv::kRotate180; | 145 rotation_mode = libyuv::kRotate180; |
140 else if (rotation == 270) | 146 else if (rotation == 270) |
141 rotation_mode = libyuv::kRotate270; | 147 rotation_mode = libyuv::kRotate270; |
142 | 148 |
143 const gfx::Size dimensions(destination_width, destination_height); | 149 const gfx::Size dimensions(destination_width, destination_height); |
144 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; | 150 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; |
145 std::unique_ptr<Buffer> buffer( | 151 std::unique_ptr<Buffer> buffer(ReserveI420OutputBuffer( |
146 ReserveI420OutputBuffer(dimensions, media::PIXEL_STORAGE_CPU, | 152 dimensions, media::PIXEL_STORAGE_CPU, frame_feedback_id, &y_plane_data, |
147 &y_plane_data, &u_plane_data, &v_plane_data)); | 153 &u_plane_data, &v_plane_data)); |
148 #if DCHECK_IS_ON() | 154 #if DCHECK_IS_ON() |
149 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | 155 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; |
150 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 156 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
151 OnError(FROM_HERE, "Too many frames dropped"); | 157 OnError(FROM_HERE, "Too many frames dropped"); |
152 #endif | 158 #endif |
153 // Failed to reserve I420 output buffer, so drop the frame. | 159 // Failed to reserve I420 output buffer, so drop the frame. |
154 if (!buffer.get()) | 160 if (!buffer.get()) |
155 return; | 161 return; |
156 | 162 |
157 const int yplane_stride = dimensions.width(); | 163 const int yplane_stride = dimensions.width(); |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
259 VideoCaptureFormat(dimensions, frame_format.frame_rate, | 265 VideoCaptureFormat(dimensions, frame_format.frame_rate, |
260 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); | 266 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); |
261 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 267 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
262 timestamp); | 268 timestamp); |
263 } | 269 } |
264 | 270 |
265 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 271 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
266 VideoCaptureDeviceClient::ReserveOutputBuffer( | 272 VideoCaptureDeviceClient::ReserveOutputBuffer( |
267 const gfx::Size& frame_size, | 273 const gfx::Size& frame_size, |
268 media::VideoPixelFormat pixel_format, | 274 media::VideoPixelFormat pixel_format, |
269 media::VideoPixelStorage pixel_storage) { | 275 media::VideoPixelStorage pixel_storage, |
| 276 int frame_feedback_id) { |
270 DCHECK_GT(frame_size.width(), 0); | 277 DCHECK_GT(frame_size.width(), 0); |
271 DCHECK_GT(frame_size.height(), 0); | 278 DCHECK_GT(frame_size.height(), 0); |
272 DCHECK(IsFormatSupported(pixel_format)); | 279 DCHECK(IsFormatSupported(pixel_format)); |
273 | 280 |
274 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if | 281 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if |
275 // it's a ShMem GMB or a DmaBuf GMB. | 282 // it's a ShMem GMB or a DmaBuf GMB. |
276 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 283 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
277 const int buffer_id = buffer_pool_->ReserveForProducer( | 284 const int buffer_id = |
278 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); | 285 buffer_pool_->ReserveForProducer(frame_size, pixel_format, pixel_storage, |
| 286 frame_feedback_id, &buffer_id_to_drop); |
279 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) | 287 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) |
280 receiver_->OnBufferDestroyed(buffer_id_to_drop); | 288 receiver_->OnBufferDestroyed(buffer_id_to_drop); |
281 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 289 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
282 return nullptr; | 290 return nullptr; |
283 return base::WrapUnique<Buffer>( | 291 return base::WrapUnique<Buffer>( |
284 new AutoReleaseBuffer(buffer_pool_, buffer_id)); | 292 new AutoReleaseBuffer(buffer_pool_, buffer_id, frame_feedback_id)); |
285 } | 293 } |
286 | 294 |
287 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 295 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
288 std::unique_ptr<Buffer> buffer, | 296 std::unique_ptr<Buffer> buffer, |
289 const VideoCaptureFormat& frame_format, | 297 const VideoCaptureFormat& frame_format, |
290 base::TimeTicks reference_time, | 298 base::TimeTicks reference_time, |
291 base::TimeDelta timestamp) { | 299 base::TimeDelta timestamp) { |
292 DCHECK(IsFormatSupported(frame_format.pixel_format)); | 300 DCHECK(IsFormatSupported(frame_format.pixel_format)); |
293 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); | 301 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); |
294 | 302 |
(...skipping 22 matching lines...) Expand all Loading... |
317 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( | 325 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( |
318 std::unique_ptr<Buffer> buffer, | 326 std::unique_ptr<Buffer> buffer, |
319 scoped_refptr<VideoFrame> frame) { | 327 scoped_refptr<VideoFrame> frame) { |
320 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); | 328 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); |
321 } | 329 } |
322 | 330 |
323 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 331 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
324 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( | 332 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( |
325 const gfx::Size& dimensions, | 333 const gfx::Size& dimensions, |
326 media::VideoPixelFormat format, | 334 media::VideoPixelFormat format, |
327 media::VideoPixelStorage storage) { | 335 media::VideoPixelStorage storage, |
| 336 int new_frame_feedback_id) { |
328 const int buffer_id = | 337 const int buffer_id = |
329 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); | 338 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); |
330 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 339 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
331 return nullptr; | 340 return nullptr; |
332 return base::WrapUnique<Buffer>( | 341 return base::WrapUnique<Buffer>( |
333 new AutoReleaseBuffer(buffer_pool_, buffer_id)); | 342 new AutoReleaseBuffer(buffer_pool_, buffer_id, new_frame_feedback_id)); |
334 } | 343 } |
335 | 344 |
336 void VideoCaptureDeviceClient::OnError( | 345 void VideoCaptureDeviceClient::OnError( |
337 const tracked_objects::Location& from_here, | 346 const tracked_objects::Location& from_here, |
338 const std::string& reason) { | 347 const std::string& reason) { |
339 const std::string log_message = base::StringPrintf( | 348 const std::string log_message = base::StringPrintf( |
340 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), | 349 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), |
341 reason.c_str(), | 350 reason.c_str(), |
342 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) | 351 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) |
343 .c_str()); | 352 .c_str()); |
344 DLOG(ERROR) << log_message; | 353 DLOG(ERROR) << log_message; |
345 OnLog(log_message); | 354 OnLog(log_message); |
346 receiver_->OnError(); | 355 receiver_->OnError(); |
347 } | 356 } |
348 | 357 |
349 void VideoCaptureDeviceClient::OnLog(const std::string& message) { | 358 void VideoCaptureDeviceClient::OnLog(const std::string& message) { |
350 receiver_->OnLog(message); | 359 receiver_->OnLog(message); |
351 } | 360 } |
352 | 361 |
353 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { | 362 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { |
354 return buffer_pool_->GetBufferPoolUtilization(); | 363 return buffer_pool_->GetBufferPoolUtilization(); |
355 } | 364 } |
356 | 365 |
357 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 366 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
358 VideoCaptureDeviceClient::ReserveI420OutputBuffer( | 367 VideoCaptureDeviceClient::ReserveI420OutputBuffer( |
359 const gfx::Size& dimensions, | 368 const gfx::Size& dimensions, |
360 media::VideoPixelStorage storage, | 369 media::VideoPixelStorage storage, |
| 370 int frame_feedback_id, |
361 uint8_t** y_plane_data, | 371 uint8_t** y_plane_data, |
362 uint8_t** u_plane_data, | 372 uint8_t** u_plane_data, |
363 uint8_t** v_plane_data) { | 373 uint8_t** v_plane_data) { |
364 DCHECK(storage == media::PIXEL_STORAGE_CPU); | 374 DCHECK(storage == media::PIXEL_STORAGE_CPU); |
365 DCHECK(dimensions.height()); | 375 DCHECK(dimensions.height()); |
366 DCHECK(dimensions.width()); | 376 DCHECK(dimensions.width()); |
367 | 377 |
368 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; | 378 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; |
369 std::unique_ptr<Buffer> buffer( | 379 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer( |
370 ReserveOutputBuffer(dimensions, media::PIXEL_FORMAT_I420, storage)); | 380 dimensions, media::PIXEL_FORMAT_I420, storage, frame_feedback_id)); |
371 if (!buffer) | 381 if (!buffer) |
372 return std::unique_ptr<Buffer>(); | 382 return std::unique_ptr<Buffer>(); |
373 // TODO(emircan): See http://crbug.com/521068, move this pointer | 383 // TODO(emircan): See http://crbug.com/521068, move this pointer |
374 // arithmetic inside Buffer::data() when this bug is resolved. | 384 // arithmetic inside Buffer::data() when this bug is resolved. |
375 *y_plane_data = reinterpret_cast<uint8_t*>(buffer->data()); | 385 *y_plane_data = reinterpret_cast<uint8_t*>(buffer->data()); |
376 *u_plane_data = | 386 *u_plane_data = |
377 *y_plane_data + | 387 *y_plane_data + |
378 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); | 388 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); |
379 *v_plane_data = | 389 *v_plane_data = |
380 *u_plane_data + | 390 *u_plane_data + |
381 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); | 391 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); |
382 return buffer; | 392 return buffer; |
383 } | 393 } |
384 | 394 |
385 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( | 395 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( |
386 const uint8_t* data, | 396 const uint8_t* data, |
387 int length, | 397 int length, |
388 const VideoCaptureFormat& frame_format, | 398 const VideoCaptureFormat& frame_format, |
389 base::TimeTicks reference_time, | 399 base::TimeTicks reference_time, |
390 base::TimeDelta timestamp) { | 400 base::TimeDelta timestamp, |
391 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer(frame_format.frame_size, | 401 int frame_feedback_id) { |
392 media::PIXEL_FORMAT_Y16, | 402 std::unique_ptr<Buffer> buffer( |
393 media::PIXEL_STORAGE_CPU)); | 403 ReserveOutputBuffer(frame_format.frame_size, media::PIXEL_FORMAT_Y16, |
| 404 media::PIXEL_STORAGE_CPU, frame_feedback_id)); |
394 // The input |length| can be greater than the required buffer size because of | 405 // The input |length| can be greater than the required buffer size because of |
395 // paddings and/or alignments, but it cannot be smaller. | 406 // paddings and/or alignments, but it cannot be smaller. |
396 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); | 407 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); |
397 #if DCHECK_IS_ON() | 408 #if DCHECK_IS_ON() |
398 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | 409 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; |
399 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 410 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
400 OnError(FROM_HERE, "Too many frames dropped"); | 411 OnError(FROM_HERE, "Too many frames dropped"); |
401 #endif | 412 #endif |
402 // Failed to reserve output buffer, so drop the frame. | 413 // Failed to reserve output buffer, so drop the frame. |
403 if (!buffer.get()) | 414 if (!buffer.get()) |
404 return; | 415 return; |
405 memcpy(buffer->data(), data, length); | 416 memcpy(buffer->data(), data, length); |
406 const VideoCaptureFormat output_format = | 417 const VideoCaptureFormat output_format = |
407 VideoCaptureFormat(frame_format.frame_size, frame_format.frame_rate, | 418 VideoCaptureFormat(frame_format.frame_size, frame_format.frame_rate, |
408 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); | 419 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); |
409 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 420 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
410 timestamp); | 421 timestamp); |
411 } | 422 } |
412 | 423 |
413 } // namespace media | 424 } // namespace media |
OLD | NEW |