OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/renderer_host/media/video_capture_device_client.h" | 5 #include "content/browser/renderer_host/media/video_capture_device_client.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 10 matching lines...) Expand all Loading... |
21 #include "media/base/bind_to_current_loop.h" | 21 #include "media/base/bind_to_current_loop.h" |
22 #include "media/base/media_switches.h" | 22 #include "media/base/media_switches.h" |
23 #include "media/base/video_capture_types.h" | 23 #include "media/base/video_capture_types.h" |
24 #include "media/base/video_frame.h" | 24 #include "media/base/video_frame.h" |
25 #include "third_party/libyuv/include/libyuv.h" | 25 #include "third_party/libyuv/include/libyuv.h" |
26 | 26 |
27 using media::VideoCaptureFormat; | 27 using media::VideoCaptureFormat; |
28 using media::VideoFrame; | 28 using media::VideoFrame; |
29 using media::VideoFrameMetadata; | 29 using media::VideoFrameMetadata; |
30 | 30 |
| 31 namespace { |
| 32 |
| 33 bool isFormatSupported(media::VideoPixelFormat pixel_format) { |
| 34 // Currently, only I420, Y8 and Y16 pixel formats are supported. |
| 35 return (pixel_format == media::PIXEL_FORMAT_I420 || |
| 36 pixel_format == media::PIXEL_FORMAT_Y8 || |
| 37 pixel_format == media::PIXEL_FORMAT_Y16); |
| 38 } |
| 39 } |
| 40 |
31 namespace content { | 41 namespace content { |
32 | 42 |
33 // Class combining a Client::Buffer interface implementation and a pool buffer | 43 // Class combining a Client::Buffer interface implementation and a pool buffer |
34 // implementation to guarantee proper cleanup on destruction on our side. | 44 // implementation to guarantee proper cleanup on destruction on our side. |
35 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { | 45 class AutoReleaseBuffer : public media::VideoCaptureDevice::Client::Buffer { |
36 public: | 46 public: |
37 AutoReleaseBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, | 47 AutoReleaseBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, |
38 int buffer_id) | 48 int buffer_id) |
39 : id_(buffer_id), | 49 : id_(buffer_id), |
40 pool_(pool), | 50 pool_(pool), |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
101 external_jpeg_decoder_.reset(new VideoCaptureGpuJpegDecoder(base::Bind( | 111 external_jpeg_decoder_.reset(new VideoCaptureGpuJpegDecoder(base::Bind( |
102 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, | 112 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, |
103 controller_))); | 113 controller_))); |
104 external_jpeg_decoder_->Initialize(); | 114 external_jpeg_decoder_->Initialize(); |
105 } | 115 } |
106 } | 116 } |
107 | 117 |
108 if (!frame_format.IsValid()) | 118 if (!frame_format.IsValid()) |
109 return; | 119 return; |
110 | 120 |
| 121 // The input |length| can be greater than the required buffer size because of |
| 122 // paddings and/or alignments, but it cannot be smaller. |
| 123 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); |
| 124 |
| 125 const bool useFullSize = |
| 126 frame_format.pixel_format == media::PIXEL_FORMAT_Y8 || |
| 127 frame_format.pixel_format == media::PIXEL_FORMAT_Y16; |
| 128 |
111 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest | 129 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest |
112 // bit decomposition of {width, height}, grabbing the odd and even parts. | 130 // bit decomposition of {width, height}, grabbing the odd and even parts. |
113 const int chopped_width = frame_format.frame_size.width() & 1; | 131 const int chopped_width = frame_format.frame_size.width() & 1; |
114 const int chopped_height = frame_format.frame_size.height() & 1; | 132 const int chopped_height = frame_format.frame_size.height() & 1; |
115 const int new_unrotated_width = frame_format.frame_size.width() & ~1; | 133 const int new_unrotated_width = (useFullSize) |
116 const int new_unrotated_height = frame_format.frame_size.height() & ~1; | 134 ? frame_format.frame_size.width() |
| 135 : (frame_format.frame_size.width() & ~1); |
| 136 const int new_unrotated_height = |
| 137 (useFullSize) ? frame_format.frame_size.height() |
| 138 : (frame_format.frame_size.height() & ~1); |
117 | 139 |
118 int destination_width = new_unrotated_width; | 140 int destination_width = new_unrotated_width; |
119 int destination_height = new_unrotated_height; | 141 int destination_height = new_unrotated_height; |
120 if (rotation == 90 || rotation == 270) | 142 if (rotation == 90 || rotation == 270) |
121 std::swap(destination_width, destination_height); | 143 std::swap(destination_width, destination_height); |
122 | 144 |
123 DCHECK_EQ(0, rotation % 90) | 145 DCHECK_EQ(0, rotation % 90) |
124 << " Rotation must be a multiple of 90, now: " << rotation; | 146 << " Rotation must be a multiple of 90, now: " << rotation; |
125 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | 147 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
126 if (rotation == 90) | 148 if (rotation == 90) |
127 rotation_mode = libyuv::kRotate90; | 149 rotation_mode = libyuv::kRotate90; |
128 else if (rotation == 180) | 150 else if (rotation == 180) |
129 rotation_mode = libyuv::kRotate180; | 151 rotation_mode = libyuv::kRotate180; |
130 else if (rotation == 270) | 152 else if (rotation == 270) |
131 rotation_mode = libyuv::kRotate270; | 153 rotation_mode = libyuv::kRotate270; |
132 | 154 |
133 const gfx::Size dimensions(destination_width, destination_height); | 155 const gfx::Size dimensions(destination_width, destination_height); |
134 const media::VideoPixelStorage output_pixel_storage = | 156 const media::VideoPixelStorage output_pixel_storage = |
135 use_gpu_memory_buffers_ ? media::PIXEL_STORAGE_GPUMEMORYBUFFER | 157 use_gpu_memory_buffers_ ? media::PIXEL_STORAGE_GPUMEMORYBUFFER |
136 : media::PIXEL_STORAGE_CPU; | 158 : media::PIXEL_STORAGE_CPU; |
137 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; | 159 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; |
| 160 |
| 161 if (frame_format.pixel_format == media::PIXEL_FORMAT_Y8 || |
| 162 frame_format.pixel_format == media::PIXEL_FORMAT_Y16) { |
| 163 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer( |
| 164 dimensions, frame_format.pixel_format, output_pixel_storage)); |
| 165 if (!buffer.get()) { |
| 166 DLOG(WARNING) << "Failed to reserve output buffer:" |
| 167 << media::VideoPixelFormatToString( |
| 168 frame_format.pixel_format); |
| 169 return; |
| 170 } |
| 171 memcpy(buffer->data(), data, length); |
| 172 const VideoCaptureFormat output_format = |
| 173 VideoCaptureFormat(dimensions, frame_format.frame_rate, |
| 174 frame_format.pixel_format, output_pixel_storage); |
| 175 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
| 176 timestamp); |
| 177 return; |
| 178 } |
| 179 |
138 std::unique_ptr<Buffer> buffer( | 180 std::unique_ptr<Buffer> buffer( |
139 ReserveI420OutputBuffer(dimensions, output_pixel_storage, &y_plane_data, | 181 ReserveI420OutputBuffer(dimensions, output_pixel_storage, &y_plane_data, |
140 &u_plane_data, &v_plane_data)); | 182 &u_plane_data, &v_plane_data)); |
141 if (!buffer.get()) { | 183 if (!buffer.get()) { |
142 DLOG(WARNING) << "Failed to reserve I420 output buffer."; | 184 DLOG(WARNING) << "Failed to reserve I420 output buffer."; |
143 return; | 185 return; |
144 } | 186 } |
145 | 187 |
146 const int yplane_stride = dimensions.width(); | 188 const int yplane_stride = dimensions.width(); |
147 const int uv_plane_stride = yplane_stride / 2; | 189 const int uv_plane_stride = yplane_stride / 2; |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
206 case media::PIXEL_FORMAT_ARGB: | 248 case media::PIXEL_FORMAT_ARGB: |
207 origin_colorspace = libyuv::FOURCC_ARGB; | 249 origin_colorspace = libyuv::FOURCC_ARGB; |
208 break; | 250 break; |
209 case media::PIXEL_FORMAT_MJPEG: | 251 case media::PIXEL_FORMAT_MJPEG: |
210 origin_colorspace = libyuv::FOURCC_MJPG; | 252 origin_colorspace = libyuv::FOURCC_MJPG; |
211 break; | 253 break; |
212 default: | 254 default: |
213 NOTREACHED(); | 255 NOTREACHED(); |
214 } | 256 } |
215 | 257 |
216 // The input |length| can be greater than the required buffer size because of | |
217 // paddings and/or alignments, but it cannot be smaller. | |
218 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); | |
219 | |
220 if (external_jpeg_decoder_) { | 258 if (external_jpeg_decoder_) { |
221 const VideoCaptureGpuJpegDecoder::STATUS status = | 259 const VideoCaptureGpuJpegDecoder::STATUS status = |
222 external_jpeg_decoder_->GetStatus(); | 260 external_jpeg_decoder_->GetStatus(); |
223 if (status == VideoCaptureGpuJpegDecoder::FAILED) { | 261 if (status == VideoCaptureGpuJpegDecoder::FAILED) { |
224 external_jpeg_decoder_.reset(); | 262 external_jpeg_decoder_.reset(); |
225 } else if (status == VideoCaptureGpuJpegDecoder::INIT_PASSED && | 263 } else if (status == VideoCaptureGpuJpegDecoder::INIT_PASSED && |
226 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && | 264 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && |
227 rotation == 0 && !flip) { | 265 rotation == 0 && !flip) { |
228 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, | 266 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, |
229 reference_time, timestamp, | 267 reference_time, timestamp, |
(...skipping 30 matching lines...) Expand all Loading... |
260 timestamp); | 298 timestamp); |
261 } | 299 } |
262 | 300 |
263 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 301 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
264 VideoCaptureDeviceClient::ReserveOutputBuffer( | 302 VideoCaptureDeviceClient::ReserveOutputBuffer( |
265 const gfx::Size& frame_size, | 303 const gfx::Size& frame_size, |
266 media::VideoPixelFormat pixel_format, | 304 media::VideoPixelFormat pixel_format, |
267 media::VideoPixelStorage pixel_storage) { | 305 media::VideoPixelStorage pixel_storage) { |
268 DCHECK_GT(frame_size.width(), 0); | 306 DCHECK_GT(frame_size.width(), 0); |
269 DCHECK_GT(frame_size.height(), 0); | 307 DCHECK_GT(frame_size.height(), 0); |
270 // Currently, only I420 pixel format is supported. | 308 DCHECK(isFormatSupported(pixel_format)); |
271 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); | |
272 | 309 |
273 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if | 310 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if |
274 // it's a ShMem GMB or a DmaBuf GMB. | 311 // it's a ShMem GMB or a DmaBuf GMB. |
275 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 312 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
276 const int buffer_id = buffer_pool_->ReserveForProducer( | 313 const int buffer_id = buffer_pool_->ReserveForProducer( |
277 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); | 314 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); |
278 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { | 315 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { |
279 BrowserThread::PostTask(BrowserThread::IO, | 316 BrowserThread::PostTask(BrowserThread::IO, |
280 FROM_HERE, | 317 FROM_HERE, |
281 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, | 318 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, |
282 controller_, buffer_id_to_drop)); | 319 controller_, buffer_id_to_drop)); |
283 } | 320 } |
284 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 321 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
285 return nullptr; | 322 return nullptr; |
286 return base::WrapUnique<Buffer>( | 323 return base::WrapUnique<Buffer>( |
287 new AutoReleaseBuffer(buffer_pool_, buffer_id)); | 324 new AutoReleaseBuffer(buffer_pool_, buffer_id)); |
288 } | 325 } |
289 | 326 |
290 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 327 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
291 std::unique_ptr<Buffer> buffer, | 328 std::unique_ptr<Buffer> buffer, |
292 const VideoCaptureFormat& frame_format, | 329 const VideoCaptureFormat& frame_format, |
293 base::TimeTicks reference_time, | 330 base::TimeTicks reference_time, |
294 base::TimeDelta timestamp) { | 331 base::TimeDelta timestamp) { |
295 // Currently, only I420 pixel format is supported. | 332 DCHECK(isFormatSupported(frame_format.pixel_format)); |
296 DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format); | |
297 | 333 |
298 scoped_refptr<VideoFrame> frame; | 334 scoped_refptr<VideoFrame> frame; |
299 switch (frame_format.pixel_storage) { | 335 switch (frame_format.pixel_storage) { |
300 case media::PIXEL_STORAGE_GPUMEMORYBUFFER: { | 336 case media::PIXEL_STORAGE_GPUMEMORYBUFFER: { |
| 337 // TODO(astojilj) Check Y8 and Y16 support. |
| 338 DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format); |
301 // Create a VideoFrame to set the correct storage_type and pixel_format. | 339 // Create a VideoFrame to set the correct storage_type and pixel_format. |
302 gfx::GpuMemoryBufferHandle handle; | 340 gfx::GpuMemoryBufferHandle handle; |
303 frame = VideoFrame::WrapExternalYuvGpuMemoryBuffers( | 341 frame = VideoFrame::WrapExternalYuvGpuMemoryBuffers( |
304 media::PIXEL_FORMAT_I420, frame_format.frame_size, | 342 media::PIXEL_FORMAT_I420, frame_format.frame_size, |
305 gfx::Rect(frame_format.frame_size), frame_format.frame_size, 0, 0, 0, | 343 gfx::Rect(frame_format.frame_size), frame_format.frame_size, 0, 0, 0, |
306 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kYPlane)), | 344 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kYPlane)), |
307 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kUPlane)), | 345 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kUPlane)), |
308 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kVPlane)), | 346 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kVPlane)), |
309 handle, handle, handle, timestamp); | 347 handle, handle, handle, timestamp); |
310 break; | 348 break; |
311 } | 349 } |
312 case media::PIXEL_STORAGE_CPU: | 350 case media::PIXEL_STORAGE_CPU: |
313 frame = VideoFrame::WrapExternalSharedMemory( | 351 frame = VideoFrame::WrapExternalSharedMemory( |
314 media::PIXEL_FORMAT_I420, frame_format.frame_size, | 352 frame_format.pixel_format, frame_format.frame_size, |
315 gfx::Rect(frame_format.frame_size), frame_format.frame_size, | 353 gfx::Rect(frame_format.frame_size), frame_format.frame_size, |
316 reinterpret_cast<uint8_t*>(buffer->data()), | 354 reinterpret_cast<uint8_t*>(buffer->data()), |
317 VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420, | 355 VideoFrame::AllocationSize(frame_format.pixel_format, |
318 frame_format.frame_size), | 356 frame_format.frame_size), |
319 base::SharedMemory::NULLHandle(), 0u, timestamp); | 357 base::SharedMemory::NULLHandle(), 0u, timestamp); |
320 break; | 358 break; |
321 } | 359 } |
322 if (!frame) | 360 if (!frame) |
323 return; | 361 return; |
324 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, | 362 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
325 frame_format.frame_rate); | 363 frame_format.frame_rate); |
326 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | 364 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
327 reference_time); | 365 reference_time); |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
417 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kUPlane)); | 455 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kUPlane)); |
418 *v_plane_data = | 456 *v_plane_data = |
419 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kVPlane)); | 457 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kVPlane)); |
420 return buffer; | 458 return buffer; |
421 } | 459 } |
422 NOTREACHED(); | 460 NOTREACHED(); |
423 return std::unique_ptr<Buffer>(); | 461 return std::unique_ptr<Buffer>(); |
424 } | 462 } |
425 | 463 |
426 } // namespace content | 464 } // namespace content |
OLD | NEW |