OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/capture/video/video_capture_device_client.h" | 5 #include "media/capture/video/video_capture_device_client.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 20 matching lines...) Expand all Loading... |
31 namespace { | 31 namespace { |
32 | 32 |
33 bool IsFormatSupported(media::VideoPixelFormat pixel_format) { | 33 bool IsFormatSupported(media::VideoPixelFormat pixel_format) { |
34 return (pixel_format == media::PIXEL_FORMAT_I420 || | 34 return (pixel_format == media::PIXEL_FORMAT_I420 || |
35 pixel_format == media::PIXEL_FORMAT_Y16); | 35 pixel_format == media::PIXEL_FORMAT_Y16); |
36 } | 36 } |
37 } | 37 } |
38 | 38 |
39 namespace media { | 39 namespace media { |
40 | 40 |
41 // Class combining a Client::Buffer interface implementation and a pool buffer | 41 class BufferPoolBufferAccessProvider : public BufferAccessProvider { |
42 // implementation to guarantee proper cleanup on destruction on our side. | |
43 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { | |
44 public: | 42 public: |
45 AutoReleaseBuffer(scoped_refptr<VideoCaptureBufferPool> pool, | 43 BufferPoolBufferAccessProvider( |
46 int buffer_id, | 44 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
47 int frame_feedback_id) | 45 int buffer_id) |
48 : pool_(std::move(pool)), | 46 : buffer_pool_(std::move(buffer_pool)), buffer_id_(buffer_id) { |
49 id_(buffer_id), | 47 // The code invoking this contstructor is supposed to have already made a |
50 frame_feedback_id_(frame_feedback_id), | 48 // producer reservation on the buffer from pool. |
51 buffer_handle_(pool_->GetBufferHandle(buffer_id)) { | |
52 DCHECK(pool_.get()); | |
53 } | 49 } |
54 int id() const override { return id_; } | 50 |
55 int frame_feedback_id() const override { return frame_feedback_id_; } | 51 ~BufferPoolBufferAccessProvider() override { |
56 gfx::Size dimensions() const override { return buffer_handle_->dimensions(); } | 52 buffer_pool_->RelinquishProducerReservation(buffer_id_); |
57 size_t mapped_size() const override { return buffer_handle_->mapped_size(); } | |
58 void* data(int plane) override { return buffer_handle_->data(plane); } | |
59 #if defined(OS_POSIX) && !defined(OS_MACOSX) | |
60 base::FileDescriptor AsPlatformFile() override { | |
61 return buffer_handle_->AsPlatformFile(); | |
62 } | 53 } |
63 #endif | 54 |
64 bool IsBackedByVideoFrame() const override { | 55 // Implementation of BufferAccessProvider: |
65 return buffer_handle_->IsBackedByVideoFrame(); | 56 mojo::ScopedSharedBufferHandle GetHandleForTransit() override { |
| 57 return buffer_pool_->GetHandleForTransit(buffer_id_); |
66 } | 58 } |
67 scoped_refptr<VideoFrame> GetVideoFrame() override { | 59 std::unique_ptr<VideoCaptureBufferHandle> GetReadWriteAccess() override { |
68 return buffer_handle_->GetVideoFrame(); | 60 return buffer_pool_->GetReadWriteAccess(buffer_id_); |
69 } | 61 } |
70 | 62 |
71 private: | 63 private: |
72 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } | 64 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; |
73 | 65 const int buffer_id_; |
74 const scoped_refptr<VideoCaptureBufferPool> pool_; | |
75 const int id_; | |
76 const int frame_feedback_id_; | |
77 const std::unique_ptr<VideoCaptureBufferHandle> buffer_handle_; | |
78 }; | 66 }; |
79 | 67 |
80 VideoCaptureDeviceClient::VideoCaptureDeviceClient( | 68 VideoCaptureDeviceClient::VideoCaptureDeviceClient( |
81 std::unique_ptr<VideoFrameReceiver> receiver, | 69 std::unique_ptr<VideoFrameReceiver> receiver, |
82 scoped_refptr<VideoCaptureBufferPool> buffer_pool, | 70 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
83 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) | 71 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) |
84 : receiver_(std::move(receiver)), | 72 : receiver_(std::move(receiver)), |
85 jpeg_decoder_factory_callback_(jpeg_decoder_factory), | 73 jpeg_decoder_factory_callback_(jpeg_decoder_factory), |
86 external_jpeg_decoder_initialized_(false), | 74 external_jpeg_decoder_initialized_(false), |
87 buffer_pool_(std::move(buffer_pool)), | 75 buffer_pool_(std::move(buffer_pool)), |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
141 << rotation; | 129 << rotation; |
142 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | 130 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
143 if (rotation == 90) | 131 if (rotation == 90) |
144 rotation_mode = libyuv::kRotate90; | 132 rotation_mode = libyuv::kRotate90; |
145 else if (rotation == 180) | 133 else if (rotation == 180) |
146 rotation_mode = libyuv::kRotate180; | 134 rotation_mode = libyuv::kRotate180; |
147 else if (rotation == 270) | 135 else if (rotation == 270) |
148 rotation_mode = libyuv::kRotate270; | 136 rotation_mode = libyuv::kRotate270; |
149 | 137 |
150 const gfx::Size dimensions(destination_width, destination_height); | 138 const gfx::Size dimensions(destination_width, destination_height); |
151 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; | 139 Buffer buffer = |
152 std::unique_ptr<Buffer> buffer(ReserveI420OutputBuffer( | 140 ReserveOutputBuffer(dimensions, media::PIXEL_FORMAT_I420, |
153 dimensions, media::PIXEL_STORAGE_CPU, frame_feedback_id, &y_plane_data, | 141 media::PIXEL_STORAGE_CPU, frame_feedback_id); |
154 &u_plane_data, &v_plane_data)); | |
155 #if DCHECK_IS_ON() | 142 #if DCHECK_IS_ON() |
156 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | 143 dropped_frame_counter_ = buffer.is_valid() ? 0 : dropped_frame_counter_ + 1; |
157 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 144 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
158 OnError(FROM_HERE, "Too many frames dropped"); | 145 OnError(FROM_HERE, "Too many frames dropped"); |
159 #endif | 146 #endif |
160 // Failed to reserve I420 output buffer, so drop the frame. | 147 // Failed to reserve I420 output buffer, so drop the frame. |
161 if (!buffer.get()) | 148 if (!buffer.is_valid()) |
162 return; | 149 return; |
163 | 150 |
| 151 auto buffer_access = buffer.access_provider->GetReadWriteAccess(); |
| 152 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; |
| 153 InitializeI420PlanePointers(dimensions, buffer_access->data(), &y_plane_data, |
| 154 &u_plane_data, &v_plane_data); |
| 155 |
164 const int yplane_stride = dimensions.width(); | 156 const int yplane_stride = dimensions.width(); |
165 const int uv_plane_stride = yplane_stride / 2; | 157 const int uv_plane_stride = yplane_stride / 2; |
166 int crop_x = 0; | 158 int crop_x = 0; |
167 int crop_y = 0; | 159 int crop_y = 0; |
168 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 160 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
169 | 161 |
170 bool flip = false; | 162 bool flip = false; |
171 switch (format.pixel_format) { | 163 switch (format.pixel_format) { |
172 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. | 164 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. |
173 break; | 165 break; |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
260 return; | 252 return; |
261 } | 253 } |
262 | 254 |
263 const VideoCaptureFormat output_format = | 255 const VideoCaptureFormat output_format = |
264 VideoCaptureFormat(dimensions, format.frame_rate, | 256 VideoCaptureFormat(dimensions, format.frame_rate, |
265 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); | 257 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); |
266 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 258 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
267 timestamp); | 259 timestamp); |
268 } | 260 } |
269 | 261 |
270 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 262 media::VideoCaptureDevice::Client::Buffer |
271 VideoCaptureDeviceClient::ReserveOutputBuffer( | 263 VideoCaptureDeviceClient::ReserveOutputBuffer( |
272 const gfx::Size& frame_size, | 264 const gfx::Size& frame_size, |
273 media::VideoPixelFormat pixel_format, | 265 media::VideoPixelFormat pixel_format, |
274 media::VideoPixelStorage pixel_storage, | 266 media::VideoPixelStorage pixel_storage, |
275 int frame_feedback_id) { | 267 int frame_feedback_id) { |
276 DCHECK_GT(frame_size.width(), 0); | 268 DCHECK_GT(frame_size.width(), 0); |
277 DCHECK_GT(frame_size.height(), 0); | 269 DCHECK_GT(frame_size.height(), 0); |
278 DCHECK(IsFormatSupported(pixel_format)); | 270 DCHECK(IsFormatSupported(pixel_format)); |
279 | 271 |
280 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if | |
281 // it's a ShMem GMB or a DmaBuf GMB. | |
282 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 272 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
283 const int buffer_id = | 273 const int buffer_id = |
284 buffer_pool_->ReserveForProducer(frame_size, pixel_format, pixel_storage, | 274 buffer_pool_->ReserveForProducer(frame_size, pixel_format, pixel_storage, |
285 frame_feedback_id, &buffer_id_to_drop); | 275 frame_feedback_id, &buffer_id_to_drop); |
286 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) | 276 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) |
287 receiver_->OnBufferDestroyed(buffer_id_to_drop); | 277 receiver_->OnBufferDestroyed(buffer_id_to_drop); |
288 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 278 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
289 return nullptr; | 279 return Buffer(); |
290 return base::WrapUnique<Buffer>( | 280 return MakeBufferStruct(buffer_pool_, buffer_id, frame_feedback_id); |
291 new AutoReleaseBuffer(buffer_pool_, buffer_id, frame_feedback_id)); | |
292 } | 281 } |
293 | 282 |
294 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 283 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
295 std::unique_ptr<Buffer> buffer, | 284 Buffer buffer, |
296 const VideoCaptureFormat& format, | 285 const VideoCaptureFormat& format, |
297 base::TimeTicks reference_time, | 286 base::TimeTicks reference_time, |
298 base::TimeDelta timestamp) { | 287 base::TimeDelta timestamp) { |
299 OnIncomingCapturedBufferExt(std::move(buffer), format, reference_time, | 288 OnIncomingCapturedBufferExt(std::move(buffer), format, reference_time, |
300 timestamp, gfx::Rect(format.frame_size), | 289 timestamp, gfx::Rect(format.frame_size), |
301 VideoFrameMetadata()); | 290 VideoFrameMetadata()); |
302 } | 291 } |
303 | 292 |
304 void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt( | 293 void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt( |
305 std::unique_ptr<Buffer> buffer, | 294 Buffer buffer, |
306 const VideoCaptureFormat& format, | 295 const VideoCaptureFormat& format, |
307 base::TimeTicks reference_time, | 296 base::TimeTicks reference_time, |
308 base::TimeDelta timestamp, | 297 base::TimeDelta timestamp, |
309 gfx::Rect visible_rect, | 298 gfx::Rect visible_rect, |
310 const VideoFrameMetadata& additional_metadata) { | 299 const VideoFrameMetadata& additional_metadata) { |
311 const int buffer_id = buffer->id(); | 300 auto buffer_mojo_handle = buffer_pool_->GetHandleForTransit(buffer.id()); |
312 | |
313 auto buffer_mojo_handle = buffer_pool_->GetHandleForTransit(buffer_id); | |
314 base::SharedMemoryHandle memory_handle; | 301 base::SharedMemoryHandle memory_handle; |
315 size_t memory_size = 0; | 302 size_t memory_size = 0; |
316 bool read_only_flag = false; | 303 bool read_only_flag = false; |
317 const MojoResult unwrap_result_code = mojo::UnwrapSharedMemoryHandle( | 304 const MojoResult unwrap_result_code = mojo::UnwrapSharedMemoryHandle( |
318 std::move(buffer_mojo_handle), &memory_handle, &memory_size, | 305 std::move(buffer_mojo_handle), &memory_handle, &memory_size, |
319 &read_only_flag); | 306 &read_only_flag); |
320 DCHECK_EQ(MOJO_RESULT_OK, unwrap_result_code); | 307 DCHECK_EQ(MOJO_RESULT_OK, unwrap_result_code); |
321 | 308 |
| 309 auto buffer_access = buffer.access_provider->GetReadWriteAccess(); |
322 scoped_refptr<media::VideoFrame> frame = | 310 scoped_refptr<media::VideoFrame> frame = |
323 media::VideoFrame::WrapExternalSharedMemory( | 311 media::VideoFrame::WrapExternalSharedMemory( |
324 format.pixel_format, // format | 312 format.pixel_format, // format |
325 format.frame_size, // coded_size | 313 format.frame_size, // coded_size |
326 visible_rect, // visible_rect | 314 visible_rect, // visible_rect |
327 format.frame_size, // natural_size | 315 format.frame_size, // natural_size |
328 static_cast<uint8_t*>(buffer->data()), // data | 316 buffer_access->data(), // data |
329 buffer->mapped_size(), // data_size | 317 buffer_access->mapped_size(), // data_size |
330 memory_handle, // handle | 318 memory_handle, // handle |
331 0, // shared_memory_offset | 319 0, // shared_memory_offset |
332 timestamp); // timestamp | 320 timestamp); // timestamp |
333 frame->metadata()->MergeMetadataFrom(&additional_metadata); | 321 frame->metadata()->MergeMetadataFrom(&additional_metadata); |
334 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, | 322 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
335 format.frame_rate); | 323 format.frame_rate); |
336 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | 324 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
337 reference_time); | 325 reference_time); |
338 | 326 |
339 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); | 327 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); |
340 } | 328 } |
341 | 329 |
342 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 330 media::VideoCaptureDevice::Client::Buffer |
343 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( | 331 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( |
344 const gfx::Size& dimensions, | 332 const gfx::Size& dimensions, |
345 media::VideoPixelFormat format, | 333 media::VideoPixelFormat format, |
346 media::VideoPixelStorage storage, | 334 media::VideoPixelStorage storage, |
347 int new_frame_feedback_id) { | 335 int new_frame_feedback_id) { |
348 const int buffer_id = | 336 const int buffer_id = |
349 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); | 337 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); |
350 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 338 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
351 return nullptr; | 339 return Buffer(); |
352 return base::WrapUnique<Buffer>( | 340 return MakeBufferStruct(buffer_pool_, buffer_id, new_frame_feedback_id); |
353 new AutoReleaseBuffer(buffer_pool_, buffer_id, new_frame_feedback_id)); | |
354 } | 341 } |
355 | 342 |
356 void VideoCaptureDeviceClient::OnError( | 343 void VideoCaptureDeviceClient::OnError( |
357 const tracked_objects::Location& from_here, | 344 const tracked_objects::Location& from_here, |
358 const std::string& reason) { | 345 const std::string& reason) { |
359 const std::string log_message = base::StringPrintf( | 346 const std::string log_message = base::StringPrintf( |
360 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), | 347 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), |
361 reason.c_str(), | 348 reason.c_str(), |
362 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) | 349 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) |
363 .c_str()); | 350 .c_str()); |
364 DLOG(ERROR) << log_message; | 351 DLOG(ERROR) << log_message; |
365 OnLog(log_message); | 352 OnLog(log_message); |
366 receiver_->OnError(); | 353 receiver_->OnError(); |
367 } | 354 } |
368 | 355 |
369 void VideoCaptureDeviceClient::OnLog(const std::string& message) { | 356 void VideoCaptureDeviceClient::OnLog(const std::string& message) { |
370 receiver_->OnLog(message); | 357 receiver_->OnLog(message); |
371 } | 358 } |
372 | 359 |
373 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { | 360 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { |
374 return buffer_pool_->GetBufferPoolUtilization(); | 361 return buffer_pool_->GetBufferPoolUtilization(); |
375 } | 362 } |
376 | 363 |
377 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 364 // static |
378 VideoCaptureDeviceClient::ReserveI420OutputBuffer( | 365 VideoCaptureDevice::Client::Buffer VideoCaptureDeviceClient::MakeBufferStruct( |
| 366 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
| 367 int buffer_id, |
| 368 int frame_feedback_id) { |
| 369 return Buffer( |
| 370 buffer_id, frame_feedback_id, |
| 371 base::MakeUnique<BufferPoolBufferAccessProvider>(buffer_pool, buffer_id)); |
| 372 } |
| 373 |
| 374 void VideoCaptureDeviceClient::InitializeI420PlanePointers( |
379 const gfx::Size& dimensions, | 375 const gfx::Size& dimensions, |
380 media::VideoPixelStorage storage, | 376 uint8_t* const data, |
381 int frame_feedback_id, | |
382 uint8_t** y_plane_data, | 377 uint8_t** y_plane_data, |
383 uint8_t** u_plane_data, | 378 uint8_t** u_plane_data, |
384 uint8_t** v_plane_data) { | 379 uint8_t** v_plane_data) { |
385 DCHECK(storage == media::PIXEL_STORAGE_CPU); | |
386 DCHECK(dimensions.height()); | 380 DCHECK(dimensions.height()); |
387 DCHECK(dimensions.width()); | 381 DCHECK(dimensions.width()); |
388 | 382 |
389 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; | 383 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; |
390 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer( | |
391 dimensions, media::PIXEL_FORMAT_I420, storage, frame_feedback_id)); | |
392 if (!buffer) | |
393 return std::unique_ptr<Buffer>(); | |
394 // TODO(emircan): See http://crbug.com/521068, move this pointer | 384 // TODO(emircan): See http://crbug.com/521068, move this pointer |
395 // arithmetic inside Buffer::data() when this bug is resolved. | 385 // arithmetic inside Buffer::data() when this bug is resolved. |
396 *y_plane_data = reinterpret_cast<uint8_t*>(buffer->data()); | 386 *y_plane_data = data; |
397 *u_plane_data = | 387 *u_plane_data = |
398 *y_plane_data + | 388 *y_plane_data + |
399 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); | 389 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); |
400 *v_plane_data = | 390 *v_plane_data = |
401 *u_plane_data + | 391 *u_plane_data + |
402 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); | 392 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); |
403 return buffer; | |
404 } | 393 } |
405 | 394 |
406 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( | 395 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( |
407 const uint8_t* data, | 396 const uint8_t* data, |
408 int length, | 397 int length, |
409 const VideoCaptureFormat& format, | 398 const VideoCaptureFormat& format, |
410 base::TimeTicks reference_time, | 399 base::TimeTicks reference_time, |
411 base::TimeDelta timestamp, | 400 base::TimeDelta timestamp, |
412 int frame_feedback_id) { | 401 int frame_feedback_id) { |
413 std::unique_ptr<Buffer> buffer( | 402 Buffer buffer = |
414 ReserveOutputBuffer(format.frame_size, media::PIXEL_FORMAT_Y16, | 403 ReserveOutputBuffer(format.frame_size, media::PIXEL_FORMAT_Y16, |
415 media::PIXEL_STORAGE_CPU, frame_feedback_id)); | 404 media::PIXEL_STORAGE_CPU, frame_feedback_id); |
416 // The input |length| can be greater than the required buffer size because of | 405 // The input |length| can be greater than the required buffer size because of |
417 // paddings and/or alignments, but it cannot be smaller. | 406 // paddings and/or alignments, but it cannot be smaller. |
418 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); | 407 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); |
419 #if DCHECK_IS_ON() | 408 #if DCHECK_IS_ON() |
420 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | 409 dropped_frame_counter_ = buffer.is_valid() ? 0 : dropped_frame_counter_ + 1; |
421 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 410 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
422 OnError(FROM_HERE, "Too many frames dropped"); | 411 OnError(FROM_HERE, "Too many frames dropped"); |
423 #endif | 412 #endif |
424 // Failed to reserve output buffer, so drop the frame. | 413 // Failed to reserve output buffer, so drop the frame. |
425 if (!buffer.get()) | 414 if (!buffer.is_valid()) |
426 return; | 415 return; |
427 memcpy(buffer->data(), data, length); | 416 auto buffer_access = buffer.access_provider->GetReadWriteAccess(); |
| 417 memcpy(buffer_access->data(), data, length); |
428 const VideoCaptureFormat output_format = | 418 const VideoCaptureFormat output_format = |
429 VideoCaptureFormat(format.frame_size, format.frame_rate, | 419 VideoCaptureFormat(format.frame_size, format.frame_rate, |
430 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); | 420 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); |
431 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 421 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
432 timestamp); | 422 timestamp); |
433 } | 423 } |
434 | 424 |
435 } // namespace media | 425 } // namespace media |
OLD | NEW |