OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/capture/video/video_capture_device_client.h" | 5 #include "media/capture/video/video_capture_device_client.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
88 | 88 |
89 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { | 89 VideoCaptureDeviceClient::~VideoCaptureDeviceClient() { |
90 // This should be on the platform auxiliary thread since | 90 // This should be on the platform auxiliary thread since |
91 // |external_jpeg_decoder_| need to be destructed on the same thread as | 91 // |external_jpeg_decoder_| need to be destructed on the same thread as |
92 // OnIncomingCapturedData. | 92 // OnIncomingCapturedData. |
93 } | 93 } |
94 | 94 |
95 void VideoCaptureDeviceClient::OnIncomingCapturedData( | 95 void VideoCaptureDeviceClient::OnIncomingCapturedData( |
96 const uint8_t* data, | 96 const uint8_t* data, |
97 int length, | 97 int length, |
98 const VideoCaptureFormat& frame_format, | 98 const VideoCaptureFormat& format, |
99 int rotation, | 99 int rotation, |
100 base::TimeTicks reference_time, | 100 base::TimeTicks reference_time, |
101 base::TimeDelta timestamp, | 101 base::TimeDelta timestamp, |
102 int frame_feedback_id) { | 102 int frame_feedback_id) { |
103 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); | 103 TRACE_EVENT0("video", "VideoCaptureDeviceClient::OnIncomingCapturedData"); |
104 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); | 104 DCHECK_EQ(media::PIXEL_STORAGE_CPU, format.pixel_storage); |
105 | 105 |
106 if (last_captured_pixel_format_ != frame_format.pixel_format) { | 106 if (last_captured_pixel_format_ != format.pixel_format) { |
107 OnLog("Pixel format: " + | 107 OnLog("Pixel format: " + |
108 media::VideoPixelFormatToString(frame_format.pixel_format)); | 108 media::VideoPixelFormatToString(format.pixel_format)); |
109 last_captured_pixel_format_ = frame_format.pixel_format; | 109 last_captured_pixel_format_ = format.pixel_format; |
110 | 110 |
111 if (frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && | 111 if (format.pixel_format == media::PIXEL_FORMAT_MJPEG && |
112 !external_jpeg_decoder_initialized_) { | 112 !external_jpeg_decoder_initialized_) { |
113 external_jpeg_decoder_initialized_ = true; | 113 external_jpeg_decoder_initialized_ = true; |
114 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); | 114 external_jpeg_decoder_ = jpeg_decoder_factory_callback_.Run(); |
115 external_jpeg_decoder_->Initialize(); | 115 external_jpeg_decoder_->Initialize(); |
116 } | 116 } |
117 } | 117 } |
118 | 118 |
119 if (!frame_format.IsValid()) | 119 if (!format.IsValid()) |
120 return; | 120 return; |
121 | 121 |
122 if (frame_format.pixel_format == media::PIXEL_FORMAT_Y16) { | 122 if (format.pixel_format == media::PIXEL_FORMAT_Y16) { |
123 return OnIncomingCapturedY16Data(data, length, frame_format, reference_time, | 123 return OnIncomingCapturedY16Data(data, length, format, reference_time, |
124 timestamp, frame_feedback_id); | 124 timestamp, frame_feedback_id); |
125 } | 125 } |
126 | 126 |
127 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest | 127 // |chopped_{width,height} and |new_unrotated_{width,height}| are the lowest |
128 // bit decomposition of {width, height}, grabbing the odd and even parts. | 128 // bit decomposition of {width, height}, grabbing the odd and even parts. |
129 const int chopped_width = frame_format.frame_size.width() & 1; | 129 const int chopped_width = format.frame_size.width() & 1; |
130 const int chopped_height = frame_format.frame_size.height() & 1; | 130 const int chopped_height = format.frame_size.height() & 1; |
131 const int new_unrotated_width = frame_format.frame_size.width() & ~1; | 131 const int new_unrotated_width = format.frame_size.width() & ~1; |
132 const int new_unrotated_height = frame_format.frame_size.height() & ~1; | 132 const int new_unrotated_height = format.frame_size.height() & ~1; |
133 | 133 |
134 int destination_width = new_unrotated_width; | 134 int destination_width = new_unrotated_width; |
135 int destination_height = new_unrotated_height; | 135 int destination_height = new_unrotated_height; |
136 if (rotation == 90 || rotation == 270) | 136 if (rotation == 90 || rotation == 270) |
137 std::swap(destination_width, destination_height); | 137 std::swap(destination_width, destination_height); |
138 | 138 |
139 DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: " | 139 DCHECK_EQ(0, rotation % 90) << " Rotation must be a multiple of 90, now: " |
140 << rotation; | 140 << rotation; |
141 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | 141 libyuv::RotationMode rotation_mode = libyuv::kRotate0; |
142 if (rotation == 90) | 142 if (rotation == 90) |
(...skipping 17 matching lines...) Expand all Loading... |
160 if (!buffer.get()) | 160 if (!buffer.get()) |
161 return; | 161 return; |
162 | 162 |
163 const int yplane_stride = dimensions.width(); | 163 const int yplane_stride = dimensions.width(); |
164 const int uv_plane_stride = yplane_stride / 2; | 164 const int uv_plane_stride = yplane_stride / 2; |
165 int crop_x = 0; | 165 int crop_x = 0; |
166 int crop_y = 0; | 166 int crop_y = 0; |
167 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 167 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
168 | 168 |
169 bool flip = false; | 169 bool flip = false; |
170 switch (frame_format.pixel_format) { | 170 switch (format.pixel_format) { |
171 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. | 171 case media::PIXEL_FORMAT_UNKNOWN: // Color format not set. |
172 break; | 172 break; |
173 case media::PIXEL_FORMAT_I420: | 173 case media::PIXEL_FORMAT_I420: |
174 DCHECK(!chopped_width && !chopped_height); | 174 DCHECK(!chopped_width && !chopped_height); |
175 origin_colorspace = libyuv::FOURCC_I420; | 175 origin_colorspace = libyuv::FOURCC_I420; |
176 break; | 176 break; |
177 case media::PIXEL_FORMAT_YV12: | 177 case media::PIXEL_FORMAT_YV12: |
178 DCHECK(!chopped_width && !chopped_height); | 178 DCHECK(!chopped_width && !chopped_height); |
179 origin_colorspace = libyuv::FOURCC_YV12; | 179 origin_colorspace = libyuv::FOURCC_YV12; |
180 break; | 180 break; |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
225 break; | 225 break; |
226 case media::PIXEL_FORMAT_MJPEG: | 226 case media::PIXEL_FORMAT_MJPEG: |
227 origin_colorspace = libyuv::FOURCC_MJPG; | 227 origin_colorspace = libyuv::FOURCC_MJPG; |
228 break; | 228 break; |
229 default: | 229 default: |
230 NOTREACHED(); | 230 NOTREACHED(); |
231 } | 231 } |
232 | 232 |
233 // The input |length| can be greater than the required buffer size because of | 233 // The input |length| can be greater than the required buffer size because of |
234 // paddings and/or alignments, but it cannot be smaller. | 234 // paddings and/or alignments, but it cannot be smaller. |
235 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); | 235 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); |
236 | 236 |
237 if (external_jpeg_decoder_) { | 237 if (external_jpeg_decoder_) { |
238 const VideoCaptureJpegDecoder::STATUS status = | 238 const VideoCaptureJpegDecoder::STATUS status = |
239 external_jpeg_decoder_->GetStatus(); | 239 external_jpeg_decoder_->GetStatus(); |
240 if (status == VideoCaptureJpegDecoder::FAILED) { | 240 if (status == VideoCaptureJpegDecoder::FAILED) { |
241 external_jpeg_decoder_.reset(); | 241 external_jpeg_decoder_.reset(); |
242 } else if (status == VideoCaptureJpegDecoder::INIT_PASSED && | 242 } else if (status == VideoCaptureJpegDecoder::INIT_PASSED && |
243 frame_format.pixel_format == media::PIXEL_FORMAT_MJPEG && | 243 format.pixel_format == media::PIXEL_FORMAT_MJPEG && |
244 rotation == 0 && !flip) { | 244 rotation == 0 && !flip) { |
245 external_jpeg_decoder_->DecodeCapturedData(data, length, frame_format, | 245 external_jpeg_decoder_->DecodeCapturedData( |
246 reference_time, timestamp, | 246 data, length, format, reference_time, timestamp, std::move(buffer)); |
247 std::move(buffer)); | |
248 return; | 247 return; |
249 } | 248 } |
250 } | 249 } |
251 | 250 |
252 if (libyuv::ConvertToI420(data, length, y_plane_data, yplane_stride, | 251 if (libyuv::ConvertToI420( |
253 u_plane_data, uv_plane_stride, v_plane_data, | 252 data, length, y_plane_data, yplane_stride, u_plane_data, |
254 uv_plane_stride, crop_x, crop_y, | 253 uv_plane_stride, v_plane_data, uv_plane_stride, crop_x, crop_y, |
255 frame_format.frame_size.width(), | 254 format.frame_size.width(), |
256 (flip ? -1 : 1) * frame_format.frame_size.height(), | 255 (flip ? -1 : 1) * format.frame_size.height(), new_unrotated_width, |
257 new_unrotated_width, new_unrotated_height, | 256 new_unrotated_height, rotation_mode, origin_colorspace) != 0) { |
258 rotation_mode, origin_colorspace) != 0) { | |
259 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " | 257 DLOG(WARNING) << "Failed to convert buffer's pixel format to I420 from " |
260 << media::VideoPixelFormatToString(frame_format.pixel_format); | 258 << media::VideoPixelFormatToString(format.pixel_format); |
261 return; | 259 return; |
262 } | 260 } |
263 | 261 |
264 const VideoCaptureFormat output_format = | 262 const VideoCaptureFormat output_format = |
265 VideoCaptureFormat(dimensions, frame_format.frame_rate, | 263 VideoCaptureFormat(dimensions, format.frame_rate, |
266 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); | 264 media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU); |
267 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 265 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
268 timestamp); | 266 timestamp); |
269 } | 267 } |
270 | 268 |
271 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> | 269 std::unique_ptr<media::VideoCaptureDevice::Client::Buffer> |
272 VideoCaptureDeviceClient::ReserveOutputBuffer( | 270 VideoCaptureDeviceClient::ReserveOutputBuffer( |
273 const gfx::Size& frame_size, | 271 const gfx::Size& frame_size, |
274 media::VideoPixelFormat pixel_format, | 272 media::VideoPixelFormat pixel_format, |
275 media::VideoPixelStorage pixel_storage, | 273 media::VideoPixelStorage pixel_storage, |
(...skipping 11 matching lines...) Expand all Loading... |
287 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) | 285 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) |
288 receiver_->OnBufferDestroyed(buffer_id_to_drop); | 286 receiver_->OnBufferDestroyed(buffer_id_to_drop); |
289 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | 287 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
290 return nullptr; | 288 return nullptr; |
291 return base::WrapUnique<Buffer>( | 289 return base::WrapUnique<Buffer>( |
292 new AutoReleaseBuffer(buffer_pool_, buffer_id, frame_feedback_id)); | 290 new AutoReleaseBuffer(buffer_pool_, buffer_id, frame_feedback_id)); |
293 } | 291 } |
294 | 292 |
295 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( | 293 void VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
296 std::unique_ptr<Buffer> buffer, | 294 std::unique_ptr<Buffer> buffer, |
297 const VideoCaptureFormat& frame_format, | 295 const VideoCaptureFormat& format, |
298 base::TimeTicks reference_time, | 296 base::TimeTicks reference_time, |
299 base::TimeDelta timestamp) { | 297 base::TimeDelta timestamp) { |
300 DCHECK(IsFormatSupported(frame_format.pixel_format)); | 298 DCHECK(IsFormatSupported(format.pixel_format)); |
301 DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage); | 299 DCHECK_EQ(media::PIXEL_STORAGE_CPU, format.pixel_storage); |
302 | 300 |
303 scoped_refptr<VideoFrame> frame; | 301 scoped_refptr<VideoFrame> frame; |
304 if (buffer->IsBackedByVideoFrame()) { | 302 if (buffer->IsBackedByVideoFrame()) { |
305 frame = buffer->GetVideoFrame(); | 303 frame = buffer->GetVideoFrame(); |
306 frame->set_timestamp(timestamp); | 304 frame->set_timestamp(timestamp); |
307 } else { | 305 } else { |
308 frame = VideoFrame::WrapExternalSharedMemory( | 306 frame = VideoFrame::WrapExternalSharedMemory( |
309 frame_format.pixel_format, frame_format.frame_size, | 307 format.pixel_format, format.frame_size, gfx::Rect(format.frame_size), |
310 gfx::Rect(frame_format.frame_size), frame_format.frame_size, | 308 format.frame_size, reinterpret_cast<uint8_t*>(buffer->data()), |
311 reinterpret_cast<uint8_t*>(buffer->data()), | 309 VideoFrame::AllocationSize(format.pixel_format, format.frame_size), |
312 VideoFrame::AllocationSize(frame_format.pixel_format, | |
313 frame_format.frame_size), | |
314 base::SharedMemory::NULLHandle(), 0u, timestamp); | 310 base::SharedMemory::NULLHandle(), 0u, timestamp); |
315 } | 311 } |
316 if (!frame) | 312 if (!frame) |
317 return; | 313 return; |
318 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, | 314 frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, |
319 frame_format.frame_rate); | 315 format.frame_rate); |
320 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, | 316 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, |
321 reference_time); | 317 reference_time); |
322 OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); | 318 OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); |
323 } | 319 } |
324 | 320 |
325 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( | 321 void VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( |
326 std::unique_ptr<Buffer> buffer, | 322 std::unique_ptr<Buffer> buffer, |
327 scoped_refptr<VideoFrame> frame) { | 323 scoped_refptr<VideoFrame> frame) { |
328 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); | 324 receiver_->OnIncomingCapturedVideoFrame(std::move(buffer), std::move(frame)); |
329 } | 325 } |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
388 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); | 384 VideoFrame::PlaneSize(format, VideoFrame::kYPlane, dimensions).GetArea(); |
389 *v_plane_data = | 385 *v_plane_data = |
390 *u_plane_data + | 386 *u_plane_data + |
391 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); | 387 VideoFrame::PlaneSize(format, VideoFrame::kUPlane, dimensions).GetArea(); |
392 return buffer; | 388 return buffer; |
393 } | 389 } |
394 | 390 |
395 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( | 391 void VideoCaptureDeviceClient::OnIncomingCapturedY16Data( |
396 const uint8_t* data, | 392 const uint8_t* data, |
397 int length, | 393 int length, |
398 const VideoCaptureFormat& frame_format, | 394 const VideoCaptureFormat& format, |
399 base::TimeTicks reference_time, | 395 base::TimeTicks reference_time, |
400 base::TimeDelta timestamp, | 396 base::TimeDelta timestamp, |
401 int frame_feedback_id) { | 397 int frame_feedback_id) { |
402 std::unique_ptr<Buffer> buffer( | 398 std::unique_ptr<Buffer> buffer( |
403 ReserveOutputBuffer(frame_format.frame_size, media::PIXEL_FORMAT_Y16, | 399 ReserveOutputBuffer(format.frame_size, media::PIXEL_FORMAT_Y16, |
404 media::PIXEL_STORAGE_CPU, frame_feedback_id)); | 400 media::PIXEL_STORAGE_CPU, frame_feedback_id)); |
405 // The input |length| can be greater than the required buffer size because of | 401 // The input |length| can be greater than the required buffer size because of |
406 // paddings and/or alignments, but it cannot be smaller. | 402 // paddings and/or alignments, but it cannot be smaller. |
407 DCHECK_GE(static_cast<size_t>(length), frame_format.ImageAllocationSize()); | 403 DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize()); |
408 #if DCHECK_IS_ON() | 404 #if DCHECK_IS_ON() |
409 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; | 405 dropped_frame_counter_ = buffer.get() ? 0 : dropped_frame_counter_ + 1; |
410 if (dropped_frame_counter_ >= kMaxDroppedFrames) | 406 if (dropped_frame_counter_ >= kMaxDroppedFrames) |
411 OnError(FROM_HERE, "Too many frames dropped"); | 407 OnError(FROM_HERE, "Too many frames dropped"); |
412 #endif | 408 #endif |
413 // Failed to reserve output buffer, so drop the frame. | 409 // Failed to reserve output buffer, so drop the frame. |
414 if (!buffer.get()) | 410 if (!buffer.get()) |
415 return; | 411 return; |
416 memcpy(buffer->data(), data, length); | 412 memcpy(buffer->data(), data, length); |
417 const VideoCaptureFormat output_format = | 413 const VideoCaptureFormat output_format = |
418 VideoCaptureFormat(frame_format.frame_size, frame_format.frame_rate, | 414 VideoCaptureFormat(format.frame_size, format.frame_rate, |
419 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); | 415 media::PIXEL_FORMAT_Y16, media::PIXEL_STORAGE_CPU); |
420 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, | 416 OnIncomingCapturedBuffer(std::move(buffer), output_format, reference_time, |
421 timestamp); | 417 timestamp); |
422 } | 418 } |
423 | 419 |
424 } // namespace media | 420 } // namespace media |
OLD | NEW |