OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/capture/video/video_capture_device_client.h" | 5 #include "media/capture/video/video_capture_device_client.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
11 #include "base/command_line.h" | 11 #include "base/command_line.h" |
12 #include "base/location.h" | 12 #include "base/location.h" |
13 #include "base/memory/ptr_util.h" | 13 #include "base/memory/ptr_util.h" |
14 #include "base/strings/stringprintf.h" | 14 #include "base/strings/stringprintf.h" |
15 #include "base/trace_event/trace_event.h" | 15 #include "base/trace_event/trace_event.h" |
16 #include "build/build_config.h" | 16 #include "build/build_config.h" |
17 #include "media/base/bind_to_current_loop.h" | 17 #include "media/base/bind_to_current_loop.h" |
18 #include "media/base/video_capture_types.h" | 18 #include "media/base/video_capture_types.h" |
19 #include "media/base/video_frame.h" | 19 #include "media/base/video_frame.h" |
20 #include "media/capture/video/video_capture_buffer_handle.h" | 20 #include "media/capture/video/video_capture_buffer_handle.h" |
21 #include "media/capture/video/video_capture_buffer_pool.h" | 21 #include "media/capture/video/video_capture_buffer_pool.h" |
22 #include "media/capture/video/video_capture_jpeg_decoder.h" | 22 #include "media/capture/video/video_capture_jpeg_decoder.h" |
23 #include "media/capture/video/video_frame_receiver.h" | 23 #include "media/capture/video/video_frame_receiver.h" |
24 #include "third_party/libyuv/include/libyuv.h" | 24 #include "third_party/libyuv/include/libyuv.h" |
25 | 25 |
26 using media::VideoCaptureFormat; | 26 using media::VideoCaptureFormat; |
27 using media::VideoFrame; | 27 using media::VideoFrame; |
28 using media::VideoFrameMetadata; | 28 using media::VideoFrameMetadata; |
29 | 29 |
30 namespace { | |
31 | |
32 bool IsFormatSupported(media::VideoPixelFormat pixel_format) { | |
33 return (pixel_format == media::PIXEL_FORMAT_I420 || | |
34 pixel_format == media::PIXEL_FORMAT_Y16); | |
35 } | |
36 } | |
37 | |
30 namespace media { | 38 namespace media { |
31 | 39 |
32 // Class combining a Client::Buffer interface implementation and a pool buffer | 40 // Class combining a Client::Buffer interface implementation and a pool buffer |
33 // implementation to guarantee proper cleanup on destruction on our side. | 41 // implementation to guarantee proper cleanup on destruction on our side. |
34 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { | 42 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { |
35 public: | 43 public: |
36 AutoReleaseBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, | 44 AutoReleaseBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, |
37 int buffer_id) | 45 int buffer_id) |
38 : id_(buffer_id), | 46 : id_(buffer_id), |
39 pool_(pool), | 47 pool_(pool), |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
99 !external_jpeg_decoder_initialized_) { | 107 !external_jpeg_decoder_initialized_) { |
100 external_jpeg_decoder_initialized_ = true; | 108 external_jpeg_decoder_initialized_ = true; |
101 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); | 109 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); |
102 external_jpeg_decoder_->Initialize(); | 110 external_jpeg_decoder_->Initialize(); |
103 } | 111 } |
104 } | 112 } |
105 | 113 |
106 if (!frame_format.IsValid()) | 114 if (!frame_format.IsValid()) |
107 return; | 115 return; |
108 | 116 |
117 // The input |length| can be greater than the required buffer size because of | |
118 // paddings and/or alignments, but it cannot be smaller. | |
119 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); | |
mcasas
2016/10/27 00:44:42
This line is duplicated in l.244.
aleksandar.stojiljkovic
2016/10/27 21:27:22
Done.
| |
120 | |
121 const bool convert_to_I420 = | |
122 frame_format.pixel_format != media::PIXEL_FORMAT_Y16; | |
mcasas
2016/10/27 00:44:42
There's too many special-cases for Y16 and
anyway
aleksandar.stojiljkovic
2016/10/27 21:27:22
Done.
| |
123 | |
109 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest | 124 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest |
110 // bit decomposition of {width, height}, grabbing the odd and even parts. | 125 // bit decomposition of {width, height}, grabbing the odd and even parts. |
111 const int chopped_width = frame_format.frame_size.width() & 1; | 126 const int chopped_width = frame_format.frame_size.width() & 1; |
112 const int chopped_height = frame_format.frame_size.height() & 1; | 127 const int chopped_height = frame_format.frame_size.height() & 1; |
113 const int new_unrotated_width = frame_format.frame_size.width() & ~1; | 128 const int new_unrotated_width = convert_to_I420 |
114 const int new_unrotated_height = frame_format.frame_size.height() & ~1; | 129 ? (frame_format.frame_size.width() & ~1) |
130 : frame_format.frame_size.width(); | |
131 const int new_unrotated_height = convert_to_I420 | |
132 ? (frame_format.frame_size.height() & ~1) | |
133 : frame_format.frame_size.height(); | |
115 | 134 |
116 int destination_width = new_unrotated_width; | 135 int destination_width = new_unrotated_width; |
117 int destination_height = new_unrotated_height; | 136 int destination_height = new_unrotated_height; |
118 if (rotation == 90 || rotation == 270) | 137 if (rotation == 90 || rotation == 270) |
119 std::swap(destination_width, destination_height); | 138 std::swap(destination_width, destination_height); |
120 | 139 |
121 DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: " | 140 DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: " |
122 << rotation; | 141 << rotation; |
123 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | 142 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
124 if (rotation == 90) | 143 if (rotation == 90) |
125 rotation_mode = libyuv::kRotate90; | 144 rotation_mode = libyuv::kRotate90; |
126 else if (rotation == 180) | 145 else if (rotation == 180) |
127 rotation_mode = libyuv::kRotate180; | 146 rotation_mode = libyuv::kRotate180; |
128 else if (rotation == 270) | 147 else if (rotation == 270) |
129 rotation_mode = libyuv::kRotate270; | 148 rotation_mode = libyuv::kRotate270; |
130 | 149 |
131 const gfx::Size dimensions(destination_width, destination_height); | 150 const gfx::Size dimensions(destination_width, destination_height); |
132 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; | 151 uint8_t* y_plane_data = nullptr; |
152 uint8_t* u_plane_data = nullptr; | |
153 uint8_t* v_plane_data = nullptr; | |
154 | |
133 std::unique_ptr<Buffer> buffer( | 155 std::unique_ptr<Buffer> buffer( |
134 ReserveI420OutputBuffer(dimensions, media::PIXEL_STORAGE_CPU, | 156 convert_to_I420 |
135 &y_plane_data, &u_plane_data, &v_plane_data)); | 157 ? ReserveI420OutputBuffer(dimensions, media::PIXEL_STORAGE_CPU, |
158 &y_plane_data, &u_plane_data, &v_plane_data) | |
159 : ReserveOutputBuffer(dimensions, frame_format.pixel_format, | |
160 media::PIXEL_STORAGE_CPU)); | |
136 #if DCHECK_IS_ON() | 161 #if DCHECK_IS_ON() |
137 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | 162 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; |
138 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 163 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
139 OnError(FROM_HERE, "Too many frames dropped"); | 164 OnError(FROM_HERE, "Too many frames dropped"); |
140 #endif | 165 #endif |
141 // Failed to reserve I420 output buffer, so drop the frame. | 166 // Failed to reserve output buffer, so drop the frame. |
142 if (!buffer.get()) | 167 if (!buffer.get()) |
143 return; | 168 return; |
144 | 169 |
145 const int yplane_stride = dimensions.width(); | 170 const int yplane_stride = dimensions.width(); |
146 const int uv_plane_stride = yplane_stride / 2; | 171 const int uv_plane_stride = yplane_stride / 2; |
147 int crop_x = 0; | 172 int crop_x = 0; |
148 int crop_y = 0; | 173 int crop_y = 0; |
149 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 174 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
150 | 175 |
151 bool flip = false; | 176 bool flip = false; |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
201 // platforms. | 226 // platforms. |
202 #if defined(OS_WIN) | 227 #if defined(OS_WIN) |
203 flip = true; | 228 flip = true; |
204 #endif | 229 #endif |
205 case media::PIXEL_FORMAT_ARGB: | 230 case media::PIXEL_FORMAT_ARGB: |
206 origin_colorspace = libyuv::FOURCC_ARGB; | 231 origin_colorspace = libyuv::FOURCC_ARGB; |
207 break; | 232 break; |
208 case media::PIXEL_FORMAT_MJPEG: | 233 case media::PIXEL_FORMAT_MJPEG: |
209 origin_colorspace = libyuv::FOURCC_MJPG; | 234 origin_colorspace = libyuv::FOURCC_MJPG; |
210 break; | 235 break; |
236 case media::PIXEL_FORMAT_Y16: | |
237 break; | |
211 default: | 238 default: |
212 NOTREACHED(); | 239 NOTREACHED(); |
213 } | 240 } |
214 | 241 |
215 // The input |length| can be greater than the required buffer size because of | 242 // The input |length| can be greater than the required buffer size because of |
216 // paddings and/or alignments, but it cannot be smaller. | 243 // paddings and/or alignments, but it cannot be smaller. |
217 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); | 244 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); |
218 | 245 |
219 if (external_jpeg_decoder_) { | 246 if (external_jpeg_decoder_) { |
220 const VideoCaptureJpegDecoder::STATUS status = | 247 const VideoCaptureJpegDecoder::STATUS status = |
221 external_jpeg_decoder_->GetStatus(); | 248 external_jpeg_decoder_->GetStatus(); |
222 if (status == VideoCaptureJpegDecoder::FAILED) { | 249 if (status == VideoCaptureJpegDecoder::FAILED) { |
223 external_jpeg_decoder_.reset(); | 250 external_jpeg_decoder_.reset(); |
224 } else if (status == VideoCaptureJpegDecoder::INIT_PASSED && | 251 } else if (status == VideoCaptureJpegDecoder::INIT_PASSED && |
225 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && | 252 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && |
226 rotation == 0 && !flip) { | 253 rotation == 0 && !flip) { |
227 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, | 254 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, |
228 reference_time, timestamp, | 255 reference_time, timestamp, |
229 std::move(buffer)); | 256 std::move(buffer)); |
230 return; | 257 return; |
231 } | 258 } |
232 } | 259 } |
233 | 260 |
234 if (libyuv::ConvertToI420(data, length, y_plane_data, yplane_stride, | 261 VideoPixelFormat pixel_format = frame_format.pixel_format; |
235 u_plane_data, uv_plane_stride, v_plane_data, | 262 if (convert_to_I420) { |
236 uv_plane_stride, crop_x, crop_y, | 263 pixel_format = media::PIXEL_FORMAT_I420; |
237 frame_format.frame_size.width(), | 264 if (libyuv::ConvertToI420( |
238 (flip ? -1 : 1) * frame_format.frame_size.height(), | 265 data, length, y_plane_data, yplane_stride, u_plane_data, |
239 new_unrotated_width, new_unrotated_height, | 266 uv_plane_stride, v_plane_data, uv_plane_stride, crop_x, crop_y, |
240 rotation_mode, origin_colorspace) != 0) { | 267 frame_format.frame_size.width(), |
241 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " | 268 (flip ? -1 : 1) * frame_format.frame_size.height(), |
242 << media::VideoPixelFormatToString(frame_format.pixel_format); | 269 new_unrotated_width, new_unrotated_height, rotation_mode, |
243 return; | 270 origin_colorspace) != 0) { |
271 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " | |
272 << media::VideoPixelFormatToString( | |
273 frame_format.pixel_format); | |
274 return; | |
275 } | |
276 } else { | |
277 memcpy(buffer->data(), data, length); | |
244 } | 278 } |
245 | 279 |
246 const VideoCaptureFormat output_format = | 280 const VideoCaptureFormat output_format = |
247 VideoCaptureFormat(dimensions, frame_format.frame_rate, | 281 VideoCaptureFormat(dimensions, frame_format.frame_rate, pixel_format, |
248 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); | 282 media::PIXEL_STORAGE_CPU); |
249 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 283 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
250 timestamp); | 284 timestamp); |
251 } | 285 } |
252 | 286 |
253 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 287 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
254 VideoCaptureDeviceClient::ReserveOutputBuffer( | 288 VideoCaptureDeviceClient::ReserveOutputBuffer( |
255 const gfx::Size& frame_size, | 289 const gfx::Size& frame_size, |
256 media::VideoPixelFormat pixel_format, | 290 media::VideoPixelFormat pixel_format, |
257 media::VideoPixelStorage pixel_storage) { | 291 media::VideoPixelStorage pixel_storage) { |
258 DCHECK_GT(frame_size.width(), 0); | 292 DCHECK_GT(frame_size.width(), 0); |
259 DCHECK_GT(frame_size.height(), 0); | 293 DCHECK_GT(frame_size.height(), 0); |
260 // Currently, only I420 pixel format is supported. | 294 DCHECK(IsFormatSupported(pixel_format)); |
261 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); | |
262 | 295 |
263 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if | 296 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if |
264 // it's a ShMem GMB or a DmaBuf GMB. | 297 // it's a ShMem GMB or a DmaBuf GMB. |
265 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 298 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
266 const int buffer_id = buffer_pool_->ReserveForProducer( | 299 const int buffer_id = buffer_pool_->ReserveForProducer( |
267 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); | 300 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); |
268 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) | 301 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) |
269 receiver_->OnBufferDestroyed(buffer_id_to_drop); | 302 receiver_->OnBufferDestroyed(buffer_id_to_drop); |
270 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 303 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
271 return nullptr; | 304 return nullptr; |
272 return base::WrapUnique<Buffer>( | 305 return base::WrapUnique<Buffer>( |
273 new AutoReleaseBuffer(buffer_pool_, buffer_id)); | 306 new AutoReleaseBuffer(buffer_pool_, buffer_id)); |
274 } | 307 } |
275 | 308 |
276 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 309 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
277 std::unique_ptr<Buffer> buffer, | 310 std::unique_ptr<Buffer> buffer, |
278 const VideoCaptureFormat& frame_format, | 311 const VideoCaptureFormat& frame_format, |
279 base::TimeTicks reference_time, | 312 base::TimeTicks reference_time, |
280 base::TimeDelta timestamp) { | 313 base::TimeDelta timestamp) { |
281 // Currently, only I420 pixel format is supported. | 314 DCHECK(IsFormatSupported(frame_format.pixel_format)); |
282 DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format); | |
283 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); | 315 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); |
284 | 316 |
285 scoped_refptr<VideoFrame> frame; | 317 scoped_refptr<VideoFrame> frame; |
286 if (buffer->IsBackedByVideoFrame()) { | 318 if (buffer->IsBackedByVideoFrame()) { |
287 frame = buffer->GetVideoFrame(); | 319 frame = buffer->GetVideoFrame(); |
288 frame->set_timestamp(timestamp); | 320 frame->set_timestamp(timestamp); |
289 } else { | 321 } else { |
290 frame = VideoFrame::WrapExternalSharedMemory( | 322 frame = VideoFrame::WrapExternalSharedMemory( |
291 media::PIXEL_FORMAT_I420, frame_format.frame_size, | 323 frame_format.pixel_format, frame_format.frame_size, |
292 gfx::Rect(frame_format.frame_size), frame_format.frame_size, | 324 gfx::Rect(frame_format.frame_size), frame_format.frame_size, |
293 reinterpret_cast<uint8_t*>(buffer->data()), | 325 reinterpret_cast<uint8_t*>(buffer->data()), |
294 VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420, | 326 VideoFrame::AllocationSize(frame_format.pixel_format, |
295 frame_format.frame_size), | 327 frame_format.frame_size), |
296 base::SharedMemory::NULLHandle(), 0u, timestamp); | 328 base::SharedMemory::NULLHandle(), 0u, timestamp); |
297 } | 329 } |
298 if (!frame) | 330 if (!frame) |
299 return; | 331 return; |
300 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, | 332 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
301 frame_format.frame_rate); | 333 frame_format.frame_rate); |
302 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | 334 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
303 reference_time); | 335 reference_time); |
304 OnIncomingCapturedVideoFrame(std::move(buffer), frame); | 336 OnIncomingCapturedVideoFrame(std::move(buffer), frame); |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
366 *u_plane_data = | 398 *u_plane_data = |
367 *y_plane_data + | 399 *y_plane_data + |
368 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); | 400 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); |
369 *v_plane_data = | 401 *v_plane_data = |
370 *u_plane_data + | 402 *u_plane_data + |
371 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); | 403 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); |
372 return buffer; | 404 return buffer; |
373 } | 405 } |
374 | 406 |
375 } // namespace media | 407 } // namespace media |
OLD | NEW |