OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/renderer_host/media/video_capture_device_client.h" | 5 #include "content/browser/renderer_host/media/video_capture_device_client.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
101 external_jpeg_decoder_.reset(new VideoCaptureGpuJpegDecoder(base::Bind( | 101 external_jpeg_decoder_.reset(new VideoCaptureGpuJpegDecoder(base::Bind( |
102 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, | 102 &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread, |
103 controller_))); | 103 controller_))); |
104 external_jpeg_decoder_->Initialize(); | 104 external_jpeg_decoder_->Initialize(); |
105 } | 105 } |
106 } | 106 } |
107 | 107 |
108 if (!frame_format.IsValid()) | 108 if (!frame_format.IsValid()) |
109 return; | 109 return; |
110 | 110 |
| 111 // The input |length| can be greater than the required buffer size because of |
| 112 // paddings and/or alignments, but it cannot be smaller. |
| 113 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); |
| 114 |
111 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest | 115 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest |
112 // bit decomposition of {width, height}, grabbing the odd and even parts. | 116 // bit decomposition of {width, height}, grabbing the odd and even parts. |
113 const int chopped_width = frame_format.frame_size.width() & 1; | 117 const int chopped_width = frame_format.frame_size.width() & 1; |
114 const int chopped_height = frame_format.frame_size.height() & 1; | 118 const int chopped_height = frame_format.frame_size.height() & 1; |
115 const int new_unrotated_width = frame_format.frame_size.width() & ~1; | 119 const int new_unrotated_width = frame_format.frame_size.width() & ~1; |
116 const int new_unrotated_height = frame_format.frame_size.height() & ~1; | 120 const int new_unrotated_height = frame_format.frame_size.height() & ~1; |
117 | 121 |
118 int destination_width = new_unrotated_width; | 122 int destination_width = new_unrotated_width; |
119 int destination_height = new_unrotated_height; | 123 int destination_height = new_unrotated_height; |
120 if (rotation == 90 || rotation == 270) | 124 if (rotation == 90 || rotation == 270) |
121 std::swap(destination_width, destination_height); | 125 std::swap(destination_width, destination_height); |
122 | 126 |
123 DCHECK_EQ(0, rotation % 90) | 127 DCHECK_EQ(0, rotation % 90) |
124 << " Rotation must be a multiple of 90, now: " << rotation; | 128 << " Rotation must be a multiple of 90, now: " << rotation; |
125 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | 129 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
126 if (rotation == 90) | 130 if (rotation == 90) |
127 rotation_mode = libyuv::kRotate90; | 131 rotation_mode = libyuv::kRotate90; |
128 else if (rotation == 180) | 132 else if (rotation == 180) |
129 rotation_mode = libyuv::kRotate180; | 133 rotation_mode = libyuv::kRotate180; |
130 else if (rotation == 270) | 134 else if (rotation == 270) |
131 rotation_mode = libyuv::kRotate270; | 135 rotation_mode = libyuv::kRotate270; |
132 | 136 |
133 const gfx::Size dimensions(destination_width, destination_height); | 137 const gfx::Size dimensions(destination_width, destination_height); |
134 const media::VideoPixelStorage output_pixel_storage = | 138 const media::VideoPixelStorage output_pixel_storage = |
135 use_gpu_memory_buffers_ ? media::PIXEL_STORAGE_GPUMEMORYBUFFER | 139 use_gpu_memory_buffers_ ? media::PIXEL_STORAGE_GPUMEMORYBUFFER |
136 : media::PIXEL_STORAGE_CPU; | 140 : media::PIXEL_STORAGE_CPU; |
137 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; | 141 uint8_t *y_plane_data, *u_plane_data, *v_plane_data; |
138 std::unique_ptr<Buffer> buffer( | 142 |
139 ReserveI420OutputBuffer(dimensions, output_pixel_storage, &y_plane_data, | 143 if (frame_format.pixel_format == media::PIXEL_FORMAT_Y16) { |
140 &u_plane_data, &v_plane_data)); | 144 std::unique_ptr<Buffer> buffer(ReserveOutputBuffer(dimensions, |
| 145 media::PIXEL_FORMAT_Y16, output_pixel_storage)); |
| 146 if (!buffer.get()) { |
| 147 DLOG(WARNING) << "Failed to reserve Y16 output buffer."; |
| 148 return; |
| 149 } |
| 150 memcpy(buffer->data(), data, length); |
| 151 const VideoCaptureFormat output_format = VideoCaptureFormat( |
| 152 dimensions, frame_format.frame_rate, |
| 153 media::PIXEL_FORMAT_Y16, output_pixel_storage); |
| 154 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
| 155 timestamp); |
| 156 return; |
| 157 } |
| 158 |
| 159 std::unique_ptr<Buffer> buffer(ReserveI420OutputBuffer(dimensions, |
| 160 output_pixel_storage, &y_plane_data, &u_plane_data, &v_plane_data)); |
141 if (!buffer.get()) { | 161 if (!buffer.get()) { |
142 DLOG(WARNING) << "Failed to reserve I420 output buffer."; | 162 DLOG(WARNING) << "Failed to reserve I420 output buffer."; |
143 return; | 163 return; |
144 } | 164 } |
145 | 165 |
146 const int yplane_stride = dimensions.width(); | 166 const int yplane_stride = dimensions.width(); |
147 const int uv_plane_stride = yplane_stride / 2; | 167 const int uv_plane_stride = yplane_stride / 2; |
148 int crop_x = 0; | 168 int crop_x = 0; |
149 int crop_y = 0; | 169 int crop_y = 0; |
150 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 170 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
206 case media::PIXEL_FORMAT_ARGB: | 226 case media::PIXEL_FORMAT_ARGB: |
207 origin_colorspace = libyuv::FOURCC_ARGB; | 227 origin_colorspace = libyuv::FOURCC_ARGB; |
208 break; | 228 break; |
209 case media::PIXEL_FORMAT_MJPEG: | 229 case media::PIXEL_FORMAT_MJPEG: |
210 origin_colorspace = libyuv::FOURCC_MJPG; | 230 origin_colorspace = libyuv::FOURCC_MJPG; |
211 break; | 231 break; |
212 default: | 232 default: |
213 NOTREACHED(); | 233 NOTREACHED(); |
214 } | 234 } |
215 | 235 |
216 // The input |length| can be greater than the required buffer size because of | |
217 // paddings and/or alignments, but it cannot be smaller. | |
218 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); | |
219 | |
220 if (external_jpeg_decoder_) { | 236 if (external_jpeg_decoder_) { |
221 const VideoCaptureGpuJpegDecoder::STATUS status = | 237 const VideoCaptureGpuJpegDecoder::STATUS status = |
222 external_jpeg_decoder_->GetStatus(); | 238 external_jpeg_decoder_->GetStatus(); |
223 if (status == VideoCaptureGpuJpegDecoder::FAILED) { | 239 if (status == VideoCaptureGpuJpegDecoder::FAILED) { |
224 external_jpeg_decoder_.reset(); | 240 external_jpeg_decoder_.reset(); |
225 } else if (status == VideoCaptureGpuJpegDecoder::INIT_PASSED && | 241 } else if (status == VideoCaptureGpuJpegDecoder::INIT_PASSED && |
226 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && | 242 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && |
227 rotation == 0 && !flip) { | 243 rotation == 0 && !flip) { |
228 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, | 244 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, |
229 reference_time, timestamp, | 245 reference_time, timestamp, |
(...skipping 30 matching lines...) Expand all Loading... |
260 timestamp); | 276 timestamp); |
261 } | 277 } |
262 | 278 |
263 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 279 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
264 VideoCaptureDeviceClient::ReserveOutputBuffer( | 280 VideoCaptureDeviceClient::ReserveOutputBuffer( |
265 const gfx::Size& frame_size, | 281 const gfx::Size& frame_size, |
266 media::VideoPixelFormat pixel_format, | 282 media::VideoPixelFormat pixel_format, |
267 media::VideoPixelStorage pixel_storage) { | 283 media::VideoPixelStorage pixel_storage) { |
268 DCHECK_GT(frame_size.width(), 0); | 284 DCHECK_GT(frame_size.width(), 0); |
269 DCHECK_GT(frame_size.height(), 0); | 285 DCHECK_GT(frame_size.height(), 0); |
270 // Currently, only I420 pixel format is supported. | 286 // Currently, only I420 and Y16 pixel formats are supported. |
271 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); | 287 DCHECK(media::PIXEL_FORMAT_I420 == pixel_format || |
| 288 media::PIXEL_FORMAT_Y16 == pixel_format); |
272 | 289 |
273 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if | 290 // TODO(mcasas): For PIXEL_STORAGE_GPUMEMORYBUFFER, find a way to indicate if |
274 // it's a ShMem GMB or a DmaBuf GMB. | 291 // it's a ShMem GMB or a DmaBuf GMB. |
275 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 292 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
276 const int buffer_id = buffer_pool_->ReserveForProducer( | 293 const int buffer_id = buffer_pool_->ReserveForProducer( |
277 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); | 294 frame_size, pixel_format, pixel_storage, &buffer_id_to_drop); |
278 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { | 295 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { |
279 BrowserThread::PostTask(BrowserThread::IO, | 296 BrowserThread::PostTask(BrowserThread::IO, |
280 FROM_HERE, | 297 FROM_HERE, |
281 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, | 298 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, |
282 controller_, buffer_id_to_drop)); | 299 controller_, buffer_id_to_drop)); |
283 } | 300 } |
284 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 301 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
285 return nullptr; | 302 return nullptr; |
286 return base::WrapUnique<Buffer>( | 303 return base::WrapUnique<Buffer>( |
287 new AutoReleaseBuffer(buffer_pool_, buffer_id)); | 304 new AutoReleaseBuffer(buffer_pool_, buffer_id)); |
288 } | 305 } |
289 | 306 |
290 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 307 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
291 std::unique_ptr<Buffer> buffer, | 308 std::unique_ptr<Buffer> buffer, |
292 const VideoCaptureFormat& frame_format, | 309 const VideoCaptureFormat& frame_format, |
293 base::TimeTicks reference_time, | 310 base::TimeTicks reference_time, |
294 base::TimeDelta timestamp) { | 311 base::TimeDelta timestamp) { |
295 // Currently, only I420 pixel format is supported. | 312 // Currently, only I420 and Y16 pixel formats are supported. |
296 DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format); | 313 DCHECK(media::PIXEL_FORMAT_I420 == frame_format.pixel_format || |
| 314 media::PIXEL_FORMAT_Y16 == frame_format.pixel_format); |
297 | 315 |
298 scoped_refptr<VideoFrame> frame; | 316 scoped_refptr<VideoFrame> frame; |
299 switch (frame_format.pixel_storage) { | 317 switch (frame_format.pixel_storage) { |
300 case media::PIXEL_STORAGE_GPUMEMORYBUFFER: { | 318 case media::PIXEL_STORAGE_GPUMEMORYBUFFER: { |
| 319 // TODO(astojilj) Check Y16 support. |
| 320 DCHECK_EQ(media::PIXEL_FORMAT_I420, frame_format.pixel_format); |
301 // Create a VideoFrame to set the correct storage_type and pixel_format. | 321 // Create a VideoFrame to set the correct storage_type and pixel_format. |
302 gfx::GpuMemoryBufferHandle handle; | 322 gfx::GpuMemoryBufferHandle handle; |
303 frame = VideoFrame::WrapExternalYuvGpuMemoryBuffers( | 323 frame = VideoFrame::WrapExternalYuvGpuMemoryBuffers( |
304 media::PIXEL_FORMAT_I420, frame_format.frame_size, | 324 media::PIXEL_FORMAT_I420, frame_format.frame_size, |
305 gfx::Rect(frame_format.frame_size), frame_format.frame_size, 0, 0, 0, | 325 gfx::Rect(frame_format.frame_size), frame_format.frame_size, 0, 0, 0, |
306 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kYPlane)), | 326 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kYPlane)), |
307 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kUPlane)), | 327 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kUPlane)), |
308 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kVPlane)), | 328 reinterpret_cast<uint8_t*>(buffer->data(media::VideoFrame::kVPlane)), |
309 handle, handle, handle, timestamp); | 329 handle, handle, handle, timestamp); |
310 break; | 330 break; |
311 } | 331 } |
312 case media::PIXEL_STORAGE_CPU: | 332 case media::PIXEL_STORAGE_CPU: |
313 frame = VideoFrame::WrapExternalSharedMemory( | 333 frame = VideoFrame::WrapExternalSharedMemory( |
314 media::PIXEL_FORMAT_I420, frame_format.frame_size, | 334 frame_format.pixel_format, frame_format.frame_size, |
315 gfx::Rect(frame_format.frame_size), frame_format.frame_size, | 335 gfx::Rect(frame_format.frame_size), frame_format.frame_size, |
316 reinterpret_cast<uint8_t*>(buffer->data()), | 336 reinterpret_cast<uint8_t*>(buffer->data()), |
317 VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420, | 337 VideoFrame::AllocationSize(frame_format.pixel_format, |
318 frame_format.frame_size), | 338 frame_format.frame_size), |
319 base::SharedMemory::NULLHandle(), 0u, timestamp); | 339 base::SharedMemory::NULLHandle(), 0u, timestamp); |
320 break; | 340 break; |
321 } | 341 } |
322 if (!frame) | 342 if (!frame) |
323 return; | 343 return; |
324 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, | 344 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
325 frame_format.frame_rate); | 345 frame_format.frame_rate); |
326 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | 346 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
327 reference_time); | 347 reference_time); |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
417 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kUPlane)); | 437 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kUPlane)); |
418 *v_plane_data = | 438 *v_plane_data = |
419 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kVPlane)); | 439 reinterpret_cast<uint8_t*>(buffer->data(VideoFrame::kVPlane)); |
420 return buffer; | 440 return buffer; |
421 } | 441 } |
422 NOTREACHED(); | 442 NOTREACHED(); |
423 return std::unique_ptr<Buffer>(); | 443 return std::unique_ptr<Buffer>(); |
424 } | 444 } |
425 | 445 |
426 } // namespace content | 446 } // namespace content |
OLD | NEW |