OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/capture/video/video_capture_device_client.h" | 5 #include "media/capture/video/video_capture_device_client.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 20 matching lines...) Expand all Loading... |
31 namespace { | 31 namespace { |
32 | 32 |
33 bool IsFormatSupported(media::VideoPixelFormat pixel_format) { | 33 bool IsFormatSupported(media::VideoPixelFormat pixel_format) { |
34 return (pixel_format == media::PIXEL_FORMAT_I420 || | 34 return (pixel_format == media::PIXEL_FORMAT_I420 || |
35 pixel_format == media::PIXEL_FORMAT_Y16); | 35 pixel_format == media::PIXEL_FORMAT_Y16); |
36 } | 36 } |
37 } | 37 } |
38 | 38 |
39 namespace media { | 39 namespace media { |
40 | 40 |
41 // Class combining a Client::Buffer interface implementation and a pool buffer | 41 class BufferPoolProducerReservation : public Ownership { |
42 // implementation to guarantee proper cleanup on destruction on our side. | |
43 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { | |
44 public: | 42 public: |
45 AutoReleaseBuffer(scoped_refptr<VideoCaptureBufferPool> pool, | 43 BufferPoolProducerReservation( |
46 int buffer_id, | 44 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
47 int frame_feedback_id) | 45 int buffer_id) |
48 : pool_(std::move(pool)), | 46 : buffer_pool_(std::move(buffer_pool)), buffer_id_(buffer_id) { |
49 id_(buffer_id), | 47 // The code invoking this contstructor is supposed to have already made a |
50 frame_feedback_id_(frame_feedback_id), | 48 // producer reservation on the buffer from pool. |
51 buffer_handle_(pool_->GetBufferHandle(buffer_id)) { | |
52 DCHECK(pool_.get()); | |
53 } | 49 } |
54 int id() const override { return id_; } | 50 |
55 int frame_feedback_id() const override { return frame_feedback_id_; } | 51 ~BufferPoolProducerReservation() override { |
56 gfx::Size dimensions() const override { return buffer_handle_->dimensions(); } | 52 buffer_pool_->RelinquishProducerReservation(buffer_id_); |
57 size_t mapped_size() const override { return buffer_handle_->mapped_size(); } | |
58 void* data(int plane) override { return buffer_handle_->data(plane); } | |
59 #if defined(OS_POSIX) && !defined(OS_MACOSX) | |
60 base::FileDescriptor AsPlatformFile() override { | |
61 return buffer_handle_->AsPlatformFile(); | |
62 } | |
63 #endif | |
64 bool IsBackedByVideoFrame() const override { | |
65 return buffer_handle_->IsBackedByVideoFrame(); | |
66 } | |
67 scoped_refptr<VideoFrame> GetVideoFrame() override { | |
68 return buffer_handle_->GetVideoFrame(); | |
69 } | 53 } |
70 | 54 |
71 private: | 55 private: |
72 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } | 56 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; |
| 57 const int buffer_id_; |
| 58 }; |
73 | 59 |
74 const scoped_refptr<VideoCaptureBufferPool> pool_; | 60 class BufferPoolBufferHandleProvider : public BufferHandleProvider { |
75 const int id_; | 61 public: |
76 const int frame_feedback_id_; | 62 BufferPoolBufferHandleProvider( |
77 const std::unique_ptr<VideoCaptureBufferHandle> buffer_handle_; | 63 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
| 64 int buffer_id) |
| 65 : buffer_pool_(std::move(buffer_pool)), buffer_id_(buffer_id) {} |
| 66 |
| 67 // Implementation of BufferHandleProvider: |
| 68 mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit() override { |
| 69 return buffer_pool_->GetHandleForInterProcessTransit(buffer_id_); |
| 70 } |
| 71 std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess() |
| 72 override { |
| 73 return buffer_pool_->GetHandleForInProcessAccess(buffer_id_); |
| 74 } |
| 75 |
| 76 private: |
| 77 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; |
| 78 const int buffer_id_; |
78 }; | 79 }; |
79 | 80 |
80 VideoCaptureDeviceClient::VideoCaptureDeviceClient( | 81 VideoCaptureDeviceClient::VideoCaptureDeviceClient( |
81 std::unique_ptr<VideoFrameReceiver> receiver, | 82 std::unique_ptr<VideoFrameReceiver> receiver, |
82 scoped_refptr<VideoCaptureBufferPool> buffer_pool, | 83 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
83 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) | 84 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) |
84 : receiver_(std::move(receiver)), | 85 : receiver_(std::move(receiver)), |
85 jpeg_decoder_factory_callback_(jpeg_decoder_factory), | 86 jpeg_decoder_factory_callback_(jpeg_decoder_factory), |
86 external_jpeg_decoder_initialized_(false), | 87 external_jpeg_decoder_initialized_(false), |
87 buffer_pool_(std::move(buffer_pool)), | 88 buffer_pool_(std::move(buffer_pool)), |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
141 << rotation; | 142 << rotation; |
142 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | 143 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
143 if (rotation == 90) | 144 if (rotation == 90) |
144 rotation_mode = libyuv::kRotate90; | 145 rotation_mode = libyuv::kRotate90; |
145 else if (rotation == 180) | 146 else if (rotation == 180) |
146 rotation_mode = libyuv::kRotate180; | 147 rotation_mode = libyuv::kRotate180; |
147 else if (rotation == 270) | 148 else if (rotation == 270) |
148 rotation_mode = libyuv::kRotate270; | 149 rotation_mode = libyuv::kRotate270; |
149 | 150 |
150 const gfx::Size dimensions(destination_width, destination_height); | 151 const gfx::Size dimensions(destination_width, destination_height); |
151 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; | 152 Buffer buffer = |
152 std::unique_ptr<Buffer> buffer(ReserveI420OutputBuffer( | 153 ReserveOutputBuffer(dimensions, media::PIXEL_FORMAT_I420, |
153 dimensions, media::PIXEL_STORAGE_CPU, frame_feedback_id, &y_plane_data, | 154 media::PIXEL_STORAGE_CPU, frame_feedback_id); |
154 &u_plane_data, &v_plane_data)); | |
155 #if DCHECK_IS_ON() | 155 #if DCHECK_IS_ON() |
156 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | 156 dropped_frame_counter_ = buffer.is_valid() ? 0 : dropped_frame_counter_ + 1; |
157 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 157 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
158 OnError(FROM_HERE, "Too many frames dropped"); | 158 OnError(FROM_HERE, "Too many frames dropped"); |
159 #endif | 159 #endif |
160 // Failed to reserve I420 output buffer, so drop the frame. | 160 // Failed to reserve I420 output buffer, so drop the frame. |
161 if (!buffer.get()) | 161 if (!buffer.is_valid()) |
162 return; | 162 return; |
163 | 163 |
| 164 auto buffer_access = buffer.handle_provider->GetHandleForInProcessAccess(); |
| 165 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; |
| 166 InitializeI420PlanePointers(dimensions, buffer_access->data(), &y_plane_data, |
| 167 &u_plane_data, &v_plane_data); |
| 168 |
164 const int yplane_stride = dimensions.width(); | 169 const int yplane_stride = dimensions.width(); |
165 const int uv_plane_stride = yplane_stride / 2; | 170 const int uv_plane_stride = yplane_stride / 2; |
166 int crop_x = 0; | 171 int crop_x = 0; |
167 int crop_y = 0; | 172 int crop_y = 0; |
168 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 173 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
169 | 174 |
170 bool flip = false; | 175 bool flip = false; |
171 switch (format.pixel_format) { | 176 switch (format.pixel_format) { |
172 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. | 177 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. |
173 break; | 178 break; |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
260 return; | 265 return; |
261 } | 266 } |
262 | 267 |
263 const VideoCaptureFormat output_format = | 268 const VideoCaptureFormat output_format = |
264 VideoCaptureFormat(dimensions, format.frame_rate, | 269 VideoCaptureFormat(dimensions, format.frame_rate, |
265 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); | 270 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); |
266 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 271 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
267 timestamp); | 272 timestamp); |
268 } | 273 } |
269 | 274 |
270 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 275 media::VideoCaptureDevice::Client::Buffer |
271 VideoCaptureDeviceClient::ReserveOutputBuffer( | 276 VideoCaptureDeviceClient::ReserveOutputBuffer( |
272 const gfx::Size& frame_size, | 277 const gfx::Size& frame_size, |
273 media::VideoPixelFormat pixel_format, | 278 media::VideoPixelFormat pixel_format, |
274 media::VideoPixelStorage pixel_storage, | 279 media::VideoPixelStorage pixel_storage, |
275 int frame_feedback_id) { | 280 int frame_feedback_id) { |
276 DCHECK_GT(frame_size.width(), 0); | 281 DCHECK_GT(frame_size.width(), 0); |
277 DCHECK_GT(frame_size.height(), 0); | 282 DCHECK_GT(frame_size.height(), 0); |
278 DCHECK(IsFormatSupported(pixel_format)); | 283 DCHECK(IsFormatSupported(pixel_format)); |
279 | 284 |
280 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if | |
281 // it's a ShMem GMB or a DmaBuf GMB. | |
282 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 285 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
283 const int buffer_id = | 286 const int buffer_id = |
284 buffer_pool_->ReserveForProducer(frame_size, pixel_format, pixel_storage, | 287 buffer_pool_->ReserveForProducer(frame_size, pixel_format, pixel_storage, |
285 frame_feedback_id, &buffer_id_to_drop); | 288 frame_feedback_id, &buffer_id_to_drop); |
286 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) | 289 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) |
287 receiver_->OnBufferDestroyed(buffer_id_to_drop); | 290 receiver_->OnBufferDestroyed(buffer_id_to_drop); |
288 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 291 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
289 return nullptr; | 292 return Buffer(); |
290 return base::WrapUnique<Buffer>( | 293 return MakeBufferStruct(buffer_pool_, buffer_id, frame_feedback_id); |
291 new AutoReleaseBuffer(buffer_pool_, buffer_id, frame_feedback_id)); | |
292 } | 294 } |
293 | 295 |
294 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 296 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
295 std::unique_ptr<Buffer> buffer, | 297 Buffer buffer, |
296 const VideoCaptureFormat& format, | 298 const VideoCaptureFormat& format, |
297 base::TimeTicks reference_time, | 299 base::TimeTicks reference_time, |
298 base::TimeDelta timestamp) { | 300 base::TimeDelta timestamp) { |
299 OnIncomingCapturedBufferExt(std::move(buffer), format, reference_time, | 301 OnIncomingCapturedBufferExt(std::move(buffer), format, reference_time, |
300 timestamp, gfx::Rect(format.frame_size), | 302 timestamp, gfx::Rect(format.frame_size), |
301 VideoFrameMetadata()); | 303 VideoFrameMetadata()); |
302 } | 304 } |
303 | 305 |
304 void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt( | 306 void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt( |
305 std::unique_ptr<Buffer> buffer, | 307 Buffer buffer, |
306 const VideoCaptureFormat& format, | 308 const VideoCaptureFormat& format, |
307 base::TimeTicks reference_time, | 309 base::TimeTicks reference_time, |
308 base::TimeDelta timestamp, | 310 base::TimeDelta timestamp, |
309 gfx::Rect visible_rect, | 311 gfx::Rect visible_rect, |
310 const VideoFrameMetadata& additional_metadata) { | 312 const VideoFrameMetadata& additional_metadata) { |
311 const int buffer_id = buffer->id(); | 313 auto buffer_mojo_handle = |
312 | 314 buffer_pool_->GetHandleForInterProcessTransit(buffer.id()); |
313 auto buffer_mojo_handle = buffer_pool_->GetHandleForTransit(buffer_id); | |
314 base::SharedMemoryHandle memory_handle; | 315 base::SharedMemoryHandle memory_handle; |
315 size_t memory_size = 0; | 316 size_t memory_size = 0; |
316 bool read_only_flag = false; | 317 bool read_only_flag = false; |
317 const MojoResult unwrap_result_code = mojo::UnwrapSharedMemoryHandle( | 318 const MojoResult unwrap_result_code = mojo::UnwrapSharedMemoryHandle( |
318 std::move(buffer_mojo_handle), &memory_handle, &memory_size, | 319 std::move(buffer_mojo_handle), &memory_handle, &memory_size, |
319 &read_only_flag); | 320 &read_only_flag); |
320 DCHECK_EQ(MOJO_RESULT_OK, unwrap_result_code); | 321 DCHECK_EQ(MOJO_RESULT_OK, unwrap_result_code); |
321 | 322 |
| 323 auto buffer_access = buffer.handle_provider->GetHandleForInProcessAccess(); |
322 scoped_refptr<media::VideoFrame> frame = | 324 scoped_refptr<media::VideoFrame> frame = |
323 media::VideoFrame::WrapExternalSharedMemory( | 325 media::VideoFrame::WrapExternalSharedMemory( |
324 format.pixel_format, // format | 326 format.pixel_format, // format |
325 format.frame_size, // coded_size | 327 format.frame_size, // coded_size |
326 visible_rect, // visible_rect | 328 visible_rect, // visible_rect |
327 format.frame_size, // natural_size | 329 format.frame_size, // natural_size |
328 static_cast<uint8_t*>(buffer->data()), // data | 330 buffer_access->data(), // data |
329 buffer->mapped_size(), // data_size | 331 buffer_access->mapped_size(), // data_size |
330 memory_handle, // handle | 332 memory_handle, // handle |
331 0, // shared_memory_offset | 333 0, // shared_memory_offset |
332 timestamp); // timestamp | 334 timestamp); // timestamp |
333 frame->metadata()->MergeMetadataFrom(&additional_metadata); | 335 frame->metadata()->MergeMetadataFrom(&additional_metadata); |
334 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, | 336 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
335 format.frame_rate); | 337 format.frame_rate); |
336 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | 338 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
337 reference_time); | 339 reference_time); |
338 | 340 |
339 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); | 341 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); |
340 } | 342 } |
341 | 343 |
342 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 344 media::VideoCaptureDevice::Client::Buffer |
343 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( | 345 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( |
344 const gfx::Size& dimensions, | 346 const gfx::Size& dimensions, |
345 media::VideoPixelFormat format, | 347 media::VideoPixelFormat format, |
346 media::VideoPixelStorage storage, | 348 media::VideoPixelStorage storage, |
347 int new_frame_feedback_id) { | 349 int new_frame_feedback_id) { |
348 const int buffer_id = | 350 const int buffer_id = |
349 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); | 351 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); |
350 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 352 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
351 return nullptr; | 353 return Buffer(); |
352 return base::WrapUnique<Buffer>( | 354 return MakeBufferStruct(buffer_pool_, buffer_id, new_frame_feedback_id); |
353 new AutoReleaseBuffer(buffer_pool_, buffer_id, new_frame_feedback_id)); | |
354 } | 355 } |
355 | 356 |
356 void VideoCaptureDeviceClient::OnError( | 357 void VideoCaptureDeviceClient::OnError( |
357 const tracked_objects::Location& from_here, | 358 const tracked_objects::Location& from_here, |
358 const std::string& reason) { | 359 const std::string& reason) { |
359 const std::string log_message = base::StringPrintf( | 360 const std::string log_message = base::StringPrintf( |
360 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), | 361 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), |
361 reason.c_str(), | 362 reason.c_str(), |
362 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) | 363 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) |
363 .c_str()); | 364 .c_str()); |
364 DLOG(ERROR) << log_message; | 365 DLOG(ERROR) << log_message; |
365 OnLog(log_message); | 366 OnLog(log_message); |
366 receiver_->OnError(); | 367 receiver_->OnError(); |
367 } | 368 } |
368 | 369 |
369 void VideoCaptureDeviceClient::OnLog(const std::string& message) { | 370 void VideoCaptureDeviceClient::OnLog(const std::string& message) { |
370 receiver_->OnLog(message); | 371 receiver_->OnLog(message); |
371 } | 372 } |
372 | 373 |
373 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { | 374 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { |
374 return buffer_pool_->GetBufferPoolUtilization(); | 375 return buffer_pool_->GetBufferPoolUtilization(); |
375 } | 376 } |
376 | 377 |
377 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 378 // static |
378 VideoCaptureDeviceClient::ReserveI420OutputBuffer( | 379 VideoCaptureDevice::Client::Buffer VideoCaptureDeviceClient::MakeBufferStruct( |
| 380 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
| 381 int buffer_id, |
| 382 int frame_feedback_id) { |
| 383 return Buffer( |
| 384 buffer_id, frame_feedback_id, |
| 385 base::MakeUnique<BufferPoolBufferHandleProvider>(buffer_pool, buffer_id), |
| 386 base::MakeUnique<BufferPoolProducerReservation>(buffer_pool, buffer_id)); |
| 387 } |
| 388 |
| 389 void VideoCaptureDeviceClient::InitializeI420PlanePointers( |
379 const gfx::Size& dimensions, | 390 const gfx::Size& dimensions, |
380 media::VideoPixelStorage storage, | 391 uint8_t* const data, |
381 int frame_feedback_id, | |
382 uint8_t** y_plane_data, | 392 uint8_t** y_plane_data, |
383 uint8_t** u_plane_data, | 393 uint8_t** u_plane_data, |
384 uint8_t** v_plane_data) { | 394 uint8_t** v_plane_data) { |
385 DCHECK(storage == media::PIXEL_STORAGE_CPU); | |
386 DCHECK(dimensions.height()); | 395 DCHECK(dimensions.height()); |
387 DCHECK(dimensions.width()); | 396 DCHECK(dimensions.width()); |
388 | 397 |
389 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; | 398 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; |
390 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer( | |
391 dimensions, media::PIXEL_FORMAT_I420, storage, frame_feedback_id)); | |
392 if (!buffer) | |
393 return std::unique_ptr<Buffer>(); | |
394 // TODO(emircan): See http://crbug.com/521068, move this pointer | 399 // TODO(emircan): See http://crbug.com/521068, move this pointer |
395 // arithmetic inside Buffer::data() when this bug is resolved. | 400 // arithmetic inside Buffer::data() when this bug is resolved. |
396 *y_plane_data = reinterpret_cast<uint8_t*>(buffer->data()); | 401 *y_plane_data = data; |
397 *u_plane_data = | 402 *u_plane_data = |
398 *y_plane_data + | 403 *y_plane_data + |
399 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); | 404 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); |
400 *v_plane_data = | 405 *v_plane_data = |
401 *u_plane_data + | 406 *u_plane_data + |
402 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); | 407 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); |
403 return buffer; | |
404 } | 408 } |
405 | 409 |
406 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( | 410 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( |
407 const uint8_t* data, | 411 const uint8_t* data, |
408 int length, | 412 int length, |
409 const VideoCaptureFormat& format, | 413 const VideoCaptureFormat& format, |
410 base::TimeTicks reference_time, | 414 base::TimeTicks reference_time, |
411 base::TimeDelta timestamp, | 415 base::TimeDelta timestamp, |
412 int frame_feedback_id) { | 416 int frame_feedback_id) { |
413 std::unique_ptr<Buffer> buffer( | 417 Buffer buffer = |
414 ReserveOutputBuffer(format.frame_size, media::PIXEL_FORMAT_Y16, | 418 ReserveOutputBuffer(format.frame_size, media::PIXEL_FORMAT_Y16, |
415 media::PIXEL_STORAGE_CPU, frame_feedback_id)); | 419 media::PIXEL_STORAGE_CPU, frame_feedback_id); |
416 // The input |length| can be greater than the required buffer size because of | 420 // The input |length| can be greater than the required buffer size because of |
417 // paddings and/or alignments, but it cannot be smaller. | 421 // paddings and/or alignments, but it cannot be smaller. |
418 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); | 422 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); |
419 #if DCHECK_IS_ON() | 423 #if DCHECK_IS_ON() |
420 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | 424 dropped_frame_counter_ = buffer.is_valid() ? 0 : dropped_frame_counter_ + 1; |
421 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 425 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
422 OnError(FROM_HERE, "Too many frames dropped"); | 426 OnError(FROM_HERE, "Too many frames dropped"); |
423 #endif | 427 #endif |
424 // Failed to reserve output buffer, so drop the frame. | 428 // Failed to reserve output buffer, so drop the frame. |
425 if (!buffer.get()) | 429 if (!buffer.is_valid()) |
426 return; | 430 return; |
427 memcpy(buffer->data(), data, length); | 431 auto buffer_access = buffer.handle_provider->GetHandleForInProcessAccess(); |
| 432 memcpy(buffer_access->data(), data, length); |
428 const VideoCaptureFormat output_format = | 433 const VideoCaptureFormat output_format = |
429 VideoCaptureFormat(format.frame_size, format.frame_rate, | 434 VideoCaptureFormat(format.frame_size, format.frame_rate, |
430 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); | 435 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); |
431 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 436 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
432 timestamp); | 437 timestamp); |
433 } | 438 } |
434 | 439 |
435 } // namespace media | 440 } // namespace media |
OLD | NEW |