OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/capture/video/video_capture_device_client.h" | 5 #include "media/capture/video/video_capture_device_client.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 20 matching lines...) Expand all Loading... |
31 namespace { | 31 namespace { |
32 | 32 |
33 bool IsFormatSupported(media::VideoPixelFormat pixel_format) { | 33 bool IsFormatSupported(media::VideoPixelFormat pixel_format) { |
34 return (pixel_format == media::PIXEL_FORMAT_I420 || | 34 return (pixel_format == media::PIXEL_FORMAT_I420 || |
35 pixel_format == media::PIXEL_FORMAT_Y16); | 35 pixel_format == media::PIXEL_FORMAT_Y16); |
36 } | 36 } |
37 } | 37 } |
38 | 38 |
39 namespace media { | 39 namespace media { |
40 | 40 |
41 class BufferPoolProducerReservationReleaser | 41 // Class combining a Client::Buffer interface implementation and a pool buffer |
42 : public VideoCaptureDevice::Client::Buffer::ScopedAccessPermission { | 42 // implementation to guarantee proper cleanup on destruction on our side. |
| 43 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { |
43 public: | 44 public: |
44 BufferPoolProducerReservationReleaser( | 45 AutoReleaseBuffer(scoped_refptr<VideoCaptureBufferPool> pool, |
45 scoped_refptr<VideoCaptureBufferPool> buffer_pool, | 46 int buffer_id, |
46 int buffer_id) | 47 int frame_feedback_id) |
47 : buffer_pool_(std::move(buffer_pool)), buffer_id_(buffer_id) {} | 48 : pool_(std::move(pool)), |
48 | 49 id_(buffer_id), |
49 ~BufferPoolProducerReservationReleaser() override { | 50 frame_feedback_id_(frame_feedback_id), |
50 buffer_pool_->RelinquishProducerReservation(buffer_id_); | 51 buffer_handle_(pool_->GetBufferHandle(buffer_id)) { |
| 52 DCHECK(pool_.get()); |
| 53 } |
| 54 int id() const override { return id_; } |
| 55 int frame_feedback_id() const override { return frame_feedback_id_; } |
| 56 gfx::Size dimensions() const override { return buffer_handle_->dimensions(); } |
| 57 size_t mapped_size() const override { return buffer_handle_->mapped_size(); } |
| 58 void* data(int plane) override { return buffer_handle_->data(plane); } |
| 59 #if defined(OS_POSIX) && !defined(OS_MACOSX) |
| 60 base::FileDescriptor AsPlatformFile() override { |
| 61 return buffer_handle_->AsPlatformFile(); |
| 62 } |
| 63 #endif |
| 64 bool IsBackedByVideoFrame() const override { |
| 65 return buffer_handle_->IsBackedByVideoFrame(); |
| 66 } |
| 67 scoped_refptr<VideoFrame> GetVideoFrame() override { |
| 68 return buffer_handle_->GetVideoFrame(); |
51 } | 69 } |
52 | 70 |
53 private: | 71 private: |
54 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; | 72 ~AutoReleaseBuffer() override { pool_->RelinquishProducerReservation(id_); } |
55 const int buffer_id_; | |
56 }; | |
57 | 73 |
58 class BufferPoolBufferHandleProvider | 74 const scoped_refptr<VideoCaptureBufferPool> pool_; |
59 : public VideoCaptureDevice::Client::Buffer::HandleProvider { | 75 const int id_; |
60 public: | 76 const int frame_feedback_id_; |
61 BufferPoolBufferHandleProvider( | 77 const std::unique_ptr<VideoCaptureBufferHandle> buffer_handle_; |
62 scoped_refptr<VideoCaptureBufferPool> buffer_pool, | |
63 int buffer_id) | |
64 : buffer_pool_(std::move(buffer_pool)), buffer_id_(buffer_id) {} | |
65 | |
66 // Implementation of HandleProvider: | |
67 mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit() override { | |
68 return buffer_pool_->GetHandleForInterProcessTransit(buffer_id_); | |
69 } | |
70 std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess() | |
71 override { | |
72 return buffer_pool_->GetHandleForInProcessAccess(buffer_id_); | |
73 } | |
74 | |
75 private: | |
76 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; | |
77 const int buffer_id_; | |
78 }; | 78 }; |
79 | 79 |
80 VideoCaptureDeviceClient::VideoCaptureDeviceClient( | 80 VideoCaptureDeviceClient::VideoCaptureDeviceClient( |
81 std::unique_ptr<VideoFrameReceiver> receiver, | 81 std::unique_ptr<VideoFrameReceiver> receiver, |
82 scoped_refptr<VideoCaptureBufferPool> buffer_pool, | 82 scoped_refptr<VideoCaptureBufferPool> buffer_pool, |
83 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) | 83 const VideoCaptureJpegDecoderFactoryCB& jpeg_decoder_factory) |
84 : receiver_(std::move(receiver)), | 84 : receiver_(std::move(receiver)), |
85 jpeg_decoder_factory_callback_(jpeg_decoder_factory), | 85 jpeg_decoder_factory_callback_(jpeg_decoder_factory), |
86 external_jpeg_decoder_initialized_(false), | 86 external_jpeg_decoder_initialized_(false), |
87 buffer_pool_(std::move(buffer_pool)), | 87 buffer_pool_(std::move(buffer_pool)), |
88 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {} | 88 last_captured_pixel_format_(media::PIXEL_FORMAT_UNKNOWN) {} |
89 | 89 |
90 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { | 90 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { |
91 // This should be on the platform auxiliary thread since | 91 // This should be on the platform auxiliary thread since |
92 // |external_jpeg_decoder_| need to be destructed on the same thread as | 92 // |external_jpeg_decoder_| need to be destructed on the same thread as |
93 // OnIncomingCapturedData. | 93 // OnIncomingCapturedData. |
94 } | 94 } |
95 | 95 |
96 // static | |
97 VideoCaptureDevice::Client::Buffer VideoCaptureDeviceClient::MakeBufferStruct( | |
98 scoped_refptr<VideoCaptureBufferPool> buffer_pool, | |
99 int buffer_id, | |
100 int frame_feedback_id) { | |
101 return Buffer( | |
102 buffer_id, frame_feedback_id, | |
103 base::MakeUnique<BufferPoolBufferHandleProvider>(buffer_pool, buffer_id), | |
104 base::MakeUnique<BufferPoolProducerReservationReleaser>(buffer_pool, | |
105 buffer_id)); | |
106 } | |
107 | |
108 void VideoCaptureDeviceClient::OnIncomingCapturedData( | 96 void VideoCaptureDeviceClient::OnIncomingCapturedData( |
109 const uint8_t* data, | 97 const uint8_t* data, |
110 int length, | 98 int length, |
111 const VideoCaptureFormat& format, | 99 const VideoCaptureFormat& format, |
112 int rotation, | 100 int rotation, |
113 base::TimeTicks reference_time, | 101 base::TimeTicks reference_time, |
114 base::TimeDelta timestamp, | 102 base::TimeDelta timestamp, |
115 int frame_feedback_id) { | 103 int frame_feedback_id) { |
116 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); | 104 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); |
117 DCHECK_EQ(media::PIXEL_STORAGE_CPU, format.pixel_storage); | 105 DCHECK_EQ(media::PIXEL_STORAGE_CPU, format.pixel_storage); |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
153 << rotation; | 141 << rotation; |
154 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | 142 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
155 if (rotation == 90) | 143 if (rotation == 90) |
156 rotation_mode = libyuv::kRotate90; | 144 rotation_mode = libyuv::kRotate90; |
157 else if (rotation == 180) | 145 else if (rotation == 180) |
158 rotation_mode = libyuv::kRotate180; | 146 rotation_mode = libyuv::kRotate180; |
159 else if (rotation == 270) | 147 else if (rotation == 270) |
160 rotation_mode = libyuv::kRotate270; | 148 rotation_mode = libyuv::kRotate270; |
161 | 149 |
162 const gfx::Size dimensions(destination_width, destination_height); | 150 const gfx::Size dimensions(destination_width, destination_height); |
163 Buffer buffer = | 151 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; |
164 ReserveOutputBuffer(dimensions, media::PIXEL_FORMAT_I420, | 152 std::unique_ptr<Buffer> buffer(ReserveI420OutputBuffer( |
165 media::PIXEL_STORAGE_CPU, frame_feedback_id); | 153 dimensions, media::PIXEL_STORAGE_CPU, frame_feedback_id, &y_plane_data, |
| 154 &u_plane_data, &v_plane_data)); |
166 #if DCHECK_IS_ON() | 155 #if DCHECK_IS_ON() |
167 dropped_frame_counter_ = buffer.is_valid() ? 0 : dropped_frame_counter_ + 1; | 156 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; |
168 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 157 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
169 OnError(FROM_HERE, "Too many frames dropped"); | 158 OnError(FROM_HERE, "Too many frames dropped"); |
170 #endif | 159 #endif |
171 // Failed to reserve I420 output buffer, so drop the frame. | 160 // Failed to reserve I420 output buffer, so drop the frame. |
172 if (!buffer.is_valid()) | 161 if (!buffer.get()) |
173 return; | 162 return; |
174 | 163 |
175 auto buffer_access = buffer.handle_provider()->GetHandleForInProcessAccess(); | |
176 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; | |
177 InitializeI420PlanePointers(dimensions, buffer_access->data(), &y_plane_data, | |
178 &u_plane_data, &v_plane_data); | |
179 | |
180 const int yplane_stride = dimensions.width(); | 164 const int yplane_stride = dimensions.width(); |
181 const int uv_plane_stride = yplane_stride / 2; | 165 const int uv_plane_stride = yplane_stride / 2; |
182 int crop_x = 0; | 166 int crop_x = 0; |
183 int crop_y = 0; | 167 int crop_y = 0; |
184 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 168 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
185 | 169 |
186 bool flip = false; | 170 bool flip = false; |
187 switch (format.pixel_format) { | 171 switch (format.pixel_format) { |
188 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. | 172 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. |
189 break; | 173 break; |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
276 return; | 260 return; |
277 } | 261 } |
278 | 262 |
279 const VideoCaptureFormat output_format = | 263 const VideoCaptureFormat output_format = |
280 VideoCaptureFormat(dimensions, format.frame_rate, | 264 VideoCaptureFormat(dimensions, format.frame_rate, |
281 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); | 265 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); |
282 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 266 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
283 timestamp); | 267 timestamp); |
284 } | 268 } |
285 | 269 |
286 media::VideoCaptureDevice::Client::Buffer | 270 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
287 VideoCaptureDeviceClient::ReserveOutputBuffer( | 271 VideoCaptureDeviceClient::ReserveOutputBuffer( |
288 const gfx::Size& frame_size, | 272 const gfx::Size& frame_size, |
289 media::VideoPixelFormat pixel_format, | 273 media::VideoPixelFormat pixel_format, |
290 media::VideoPixelStorage pixel_storage, | 274 media::VideoPixelStorage pixel_storage, |
291 int frame_feedback_id) { | 275 int frame_feedback_id) { |
292 DCHECK_GT(frame_size.width(), 0); | 276 DCHECK_GT(frame_size.width(), 0); |
293 DCHECK_GT(frame_size.height(), 0); | 277 DCHECK_GT(frame_size.height(), 0); |
294 DCHECK(IsFormatSupported(pixel_format)); | 278 DCHECK(IsFormatSupported(pixel_format)); |
295 | 279 |
| 280 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if |
| 281 // it's a ShMem GMB or a DmaBuf GMB. |
296 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 282 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
297 const int buffer_id = | 283 const int buffer_id = |
298 buffer_pool_->ReserveForProducer(frame_size, pixel_format, pixel_storage, | 284 buffer_pool_->ReserveForProducer(frame_size, pixel_format, pixel_storage, |
299 frame_feedback_id, &buffer_id_to_drop); | 285 frame_feedback_id, &buffer_id_to_drop); |
300 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) | 286 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) |
301 receiver_->OnBufferDestroyed(buffer_id_to_drop); | 287 receiver_->OnBufferDestroyed(buffer_id_to_drop); |
302 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 288 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
303 return Buffer(); | 289 return nullptr; |
304 return MakeBufferStruct(buffer_pool_, buffer_id, frame_feedback_id); | 290 return base::WrapUnique<Buffer>( |
| 291 new AutoReleaseBuffer(buffer_pool_, buffer_id, frame_feedback_id)); |
305 } | 292 } |
306 | 293 |
307 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 294 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
308 Buffer buffer, | 295 std::unique_ptr<Buffer> buffer, |
309 const VideoCaptureFormat& format, | 296 const VideoCaptureFormat& format, |
310 base::TimeTicks reference_time, | 297 base::TimeTicks reference_time, |
311 base::TimeDelta timestamp) { | 298 base::TimeDelta timestamp) { |
312 OnIncomingCapturedBufferExt(std::move(buffer), format, reference_time, | 299 OnIncomingCapturedBufferExt(std::move(buffer), format, reference_time, |
313 timestamp, gfx::Rect(format.frame_size), | 300 timestamp, gfx::Rect(format.frame_size), |
314 VideoFrameMetadata()); | 301 VideoFrameMetadata()); |
315 } | 302 } |
316 | 303 |
317 void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt( | 304 void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt( |
318 Buffer buffer, | 305 std::unique_ptr<Buffer> buffer, |
319 const VideoCaptureFormat& format, | 306 const VideoCaptureFormat& format, |
320 base::TimeTicks reference_time, | 307 base::TimeTicks reference_time, |
321 base::TimeDelta timestamp, | 308 base::TimeDelta timestamp, |
322 gfx::Rect visible_rect, | 309 gfx::Rect visible_rect, |
323 const VideoFrameMetadata& additional_metadata) { | 310 const VideoFrameMetadata& additional_metadata) { |
324 auto buffer_mojo_handle = | 311 const int buffer_id = buffer->id(); |
325 buffer_pool_->GetHandleForInterProcessTransit(buffer.id()); | 312 |
| 313 auto buffer_mojo_handle = buffer_pool_->GetHandleForTransit(buffer_id); |
326 base::SharedMemoryHandle memory_handle; | 314 base::SharedMemoryHandle memory_handle; |
327 size_t memory_size = 0; | 315 size_t memory_size = 0; |
328 bool read_only_flag = false; | 316 bool read_only_flag = false; |
329 const MojoResult unwrap_result_code = mojo::UnwrapSharedMemoryHandle( | 317 const MojoResult unwrap_result_code = mojo::UnwrapSharedMemoryHandle( |
330 std::move(buffer_mojo_handle), &memory_handle, &memory_size, | 318 std::move(buffer_mojo_handle), &memory_handle, &memory_size, |
331 &read_only_flag); | 319 &read_only_flag); |
332 DCHECK_EQ(MOJO_RESULT_OK, unwrap_result_code); | 320 DCHECK_EQ(MOJO_RESULT_OK, unwrap_result_code); |
333 | 321 |
334 auto buffer_access = buffer.handle_provider()->GetHandleForInProcessAccess(); | |
335 scoped_refptr<media::VideoFrame> frame = | 322 scoped_refptr<media::VideoFrame> frame = |
336 media::VideoFrame::WrapExternalSharedMemory( | 323 media::VideoFrame::WrapExternalSharedMemory( |
337 format.pixel_format, // format | 324 format.pixel_format, // format |
338 format.frame_size, // coded_size | 325 format.frame_size, // coded_size |
339 visible_rect, // visible_rect | 326 visible_rect, // visible_rect |
340 format.frame_size, // natural_size | 327 format.frame_size, // natural_size |
341 buffer_access->data(), // data | 328 static_cast<uint8_t*>(buffer->data()), // data |
342 buffer_access->mapped_size(), // data_size | 329 buffer->mapped_size(), // data_size |
343 memory_handle, // handle | 330 memory_handle, // handle |
344 0, // shared_memory_offset | 331 0, // shared_memory_offset |
345 timestamp); // timestamp | 332 timestamp); // timestamp |
346 frame->metadata()->MergeMetadataFrom(&additional_metadata); | 333 frame->metadata()->MergeMetadataFrom(&additional_metadata); |
347 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, | 334 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
348 format.frame_rate); | 335 format.frame_rate); |
349 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | 336 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
350 reference_time); | 337 reference_time); |
351 | 338 |
352 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); | 339 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); |
353 } | 340 } |
354 | 341 |
355 media::VideoCaptureDevice::Client::Buffer | 342 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
356 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( | 343 VideoCaptureDeviceClient::ResurrectLastOutputBuffer( |
357 const gfx::Size& dimensions, | 344 const gfx::Size& dimensions, |
358 media::VideoPixelFormat format, | 345 media::VideoPixelFormat format, |
359 media::VideoPixelStorage storage, | 346 media::VideoPixelStorage storage, |
360 int new_frame_feedback_id) { | 347 int new_frame_feedback_id) { |
361 const int buffer_id = | 348 const int buffer_id = |
362 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); | 349 buffer_pool_->ResurrectLastForProducer(dimensions, format, storage); |
363 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 350 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
364 return Buffer(); | 351 return nullptr; |
365 return MakeBufferStruct(buffer_pool_, buffer_id, new_frame_feedback_id); | 352 return base::WrapUnique<Buffer>( |
| 353 new AutoReleaseBuffer(buffer_pool_, buffer_id, new_frame_feedback_id)); |
366 } | 354 } |
367 | 355 |
368 void VideoCaptureDeviceClient::OnError( | 356 void VideoCaptureDeviceClient::OnError( |
369 const tracked_objects::Location& from_here, | 357 const tracked_objects::Location& from_here, |
370 const std::string& reason) { | 358 const std::string& reason) { |
371 const std::string log_message = base::StringPrintf( | 359 const std::string log_message = base::StringPrintf( |
372 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), | 360 "error@ %s, %s, OS message: %s", from_here.ToString().c_str(), |
373 reason.c_str(), | 361 reason.c_str(), |
374 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) | 362 logging::SystemErrorCodeToString(logging::GetLastSystemErrorCode()) |
375 .c_str()); | 363 .c_str()); |
376 DLOG(ERROR) << log_message; | 364 DLOG(ERROR) << log_message; |
377 OnLog(log_message); | 365 OnLog(log_message); |
378 receiver_->OnError(); | 366 receiver_->OnError(); |
379 } | 367 } |
380 | 368 |
381 void VideoCaptureDeviceClient::OnLog(const std::string& message) { | 369 void VideoCaptureDeviceClient::OnLog(const std::string& message) { |
382 receiver_->OnLog(message); | 370 receiver_->OnLog(message); |
383 } | 371 } |
384 | 372 |
385 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { | 373 double VideoCaptureDeviceClient::GetBufferPoolUtilization() const { |
386 return buffer_pool_->GetBufferPoolUtilization(); | 374 return buffer_pool_->GetBufferPoolUtilization(); |
387 } | 375 } |
388 | 376 |
389 void VideoCaptureDeviceClient::InitializeI420PlanePointers( | 377 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
| 378 VideoCaptureDeviceClient::ReserveI420OutputBuffer( |
390 const gfx::Size& dimensions, | 379 const gfx::Size& dimensions, |
391 uint8_t* const data, | 380 media::VideoPixelStorage storage, |
| 381 int frame_feedback_id, |
392 uint8_t** y_plane_data, | 382 uint8_t** y_plane_data, |
393 uint8_t** u_plane_data, | 383 uint8_t** u_plane_data, |
394 uint8_t** v_plane_data) { | 384 uint8_t** v_plane_data) { |
| 385 DCHECK(storage == media::PIXEL_STORAGE_CPU); |
395 DCHECK(dimensions.height()); | 386 DCHECK(dimensions.height()); |
396 DCHECK(dimensions.width()); | 387 DCHECK(dimensions.width()); |
397 | 388 |
398 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; | 389 const media::VideoPixelFormat format = media::PIXEL_FORMAT_I420; |
| 390 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer( |
| 391 dimensions, media::PIXEL_FORMAT_I420, storage, frame_feedback_id)); |
| 392 if (!buffer) |
| 393 return std::unique_ptr<Buffer>(); |
399 // TODO(emircan): See http://crbug.com/521068, move this pointer | 394 // TODO(emircan): See http://crbug.com/521068, move this pointer |
400 // arithmetic inside Buffer::data() when this bug is resolved. | 395 // arithmetic inside Buffer::data() when this bug is resolved. |
401 *y_plane_data = data; | 396 *y_plane_data = reinterpret_cast<uint8_t*>(buffer->data()); |
402 *u_plane_data = | 397 *u_plane_data = |
403 *y_plane_data + | 398 *y_plane_data + |
404 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); | 399 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); |
405 *v_plane_data = | 400 *v_plane_data = |
406 *u_plane_data + | 401 *u_plane_data + |
407 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); | 402 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); |
| 403 return buffer; |
408 } | 404 } |
409 | 405 |
410 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( | 406 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( |
411 const uint8_t* data, | 407 const uint8_t* data, |
412 int length, | 408 int length, |
413 const VideoCaptureFormat& format, | 409 const VideoCaptureFormat& format, |
414 base::TimeTicks reference_time, | 410 base::TimeTicks reference_time, |
415 base::TimeDelta timestamp, | 411 base::TimeDelta timestamp, |
416 int frame_feedback_id) { | 412 int frame_feedback_id) { |
417 Buffer buffer = | 413 std::unique_ptr<Buffer> buffer( |
418 ReserveOutputBuffer(format.frame_size, media::PIXEL_FORMAT_Y16, | 414 ReserveOutputBuffer(format.frame_size, media::PIXEL_FORMAT_Y16, |
419 media::PIXEL_STORAGE_CPU, frame_feedback_id); | 415 media::PIXEL_STORAGE_CPU, frame_feedback_id)); |
420 // The input |length| can be greater than the required buffer size because of | 416 // The input |length| can be greater than the required buffer size because of |
421 // paddings and/or alignments, but it cannot be smaller. | 417 // paddings and/or alignments, but it cannot be smaller. |
422 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); | 418 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); |
423 #if DCHECK_IS_ON() | 419 #if DCHECK_IS_ON() |
424 dropped_frame_counter_ = buffer.is_valid() ? 0 : dropped_frame_counter_ + 1; | 420 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; |
425 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 421 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
426 OnError(FROM_HERE, "Too many frames dropped"); | 422 OnError(FROM_HERE, "Too many frames dropped"); |
427 #endif | 423 #endif |
428 // Failed to reserve output buffer, so drop the frame. | 424 // Failed to reserve output buffer, so drop the frame. |
429 if (!buffer.is_valid()) | 425 if (!buffer.get()) |
430 return; | 426 return; |
431 auto buffer_access = buffer.handle_provider()->GetHandleForInProcessAccess(); | 427 memcpy(buffer->data(), data, length); |
432 memcpy(buffer_access->data(), data, length); | |
433 const VideoCaptureFormat output_format = | 428 const VideoCaptureFormat output_format = |
434 VideoCaptureFormat(format.frame_size, format.frame_rate, | 429 VideoCaptureFormat(format.frame_size, format.frame_rate, |
435 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); | 430 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); |
436 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 431 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
437 timestamp); | 432 timestamp); |
438 } | 433 } |
439 | 434 |
440 } // namespace media | 435 } // namespace media |
OLD | NEW |