OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/renderer_host/media/video_capture_controller.h" | 5 #include "content/browser/renderer_host/media/video_capture_controller.h" |
6 | 6 |
7 #include <set> | 7 #include <set> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/debug/trace_event.h" | 10 #include "base/debug/trace_event.h" |
11 #include "base/memory/scoped_ptr.h" | 11 #include "base/memory/scoped_ptr.h" |
12 #include "base/stl_util.h" | 12 #include "base/stl_util.h" |
13 #include "content/browser/renderer_host/media/media_stream_manager.h" | 13 #include "content/browser/renderer_host/media/media_stream_manager.h" |
14 #include "content/browser/renderer_host/media/video_capture_manager.h" | 14 #include "content/browser/renderer_host/media/video_capture_manager.h" |
15 #include "content/public/browser/browser_thread.h" | 15 #include "content/public/browser/browser_thread.h" |
16 #include "media/base/video_frame.h" | 16 #include "media/base/video_frame.h" |
17 #include "media/base/video_util.h" | 17 #include "media/base/video_util.h" |
18 #include "media/base/yuv_convert.h" | 18 #include "media/base/yuv_convert.h" |
19 | 19 |
20 #if !defined(OS_IOS) && !defined(OS_ANDROID) | 20 #if !defined(OS_IOS) && !defined(OS_ANDROID) |
21 #include "third_party/libyuv/include/libyuv.h" | 21 #include "third_party/libyuv/include/libyuv.h" |
22 #endif | 22 #endif |
23 | 23 |
24 namespace { | 24 namespace { |
25 | 25 |
26 #if defined(OS_IOS) || defined(OS_ANDROID) | |
26 // TODO(wjia): Support stride. | 27 // TODO(wjia): Support stride. |
27 void RotatePackedYV12Frame( | 28 void RotatePackedYV12Frame( |
28 const uint8* src, | 29 const uint8* src, |
29 uint8* dest_yplane, | 30 uint8* dest_yplane, |
30 uint8* dest_uplane, | 31 uint8* dest_uplane, |
31 uint8* dest_vplane, | 32 uint8* dest_vplane, |
32 int width, | 33 int width, |
33 int height, | 34 int height, |
34 int rotation, | 35 int rotation, |
35 bool flip_vert, | 36 bool flip_vert, |
36 bool flip_horiz) { | 37 bool flip_horiz) { |
37 media::RotatePlaneByPixels( | 38 media::RotatePlaneByPixels( |
38 src, dest_yplane, width, height, rotation, flip_vert, flip_horiz); | 39 src, dest_yplane, width, height, rotation, flip_vert, flip_horiz); |
39 int y_size = width * height; | 40 int y_size = width * height; |
40 src += y_size; | 41 src += y_size; |
41 media::RotatePlaneByPixels( | 42 media::RotatePlaneByPixels( |
42 src, dest_uplane, width/2, height/2, rotation, flip_vert, flip_horiz); | 43 src, dest_uplane, width/2, height/2, rotation, flip_vert, flip_horiz); |
43 src += y_size/4; | 44 src += y_size/4; |
44 media::RotatePlaneByPixels( | 45 media::RotatePlaneByPixels( |
45 src, dest_vplane, width/2, height/2, rotation, flip_vert, flip_horiz); | 46 src, dest_vplane, width/2, height/2, rotation, flip_vert, flip_horiz); |
46 } | 47 } |
48 #endif // #if defined(OS_IOS) || defined(OS_ANDROID) | |
47 | 49 |
48 } // namespace | 50 } // namespace |
49 | 51 |
50 namespace content { | 52 namespace content { |
51 | 53 |
52 // The number of buffers that VideoCaptureBufferPool should allocate. | 54 // The number of buffers that VideoCaptureBufferPool should allocate. |
53 static const int kNoOfBuffers = 3; | 55 static const int kNoOfBuffers = 3; |
54 | 56 |
55 struct VideoCaptureController::ControllerClient { | 57 struct VideoCaptureController::ControllerClient { |
56 ControllerClient( | 58 ControllerClient( |
(...skipping 194 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
251 if (!buffer_pool_.get()) | 253 if (!buffer_pool_.get()) |
252 return NULL; | 254 return NULL; |
253 return buffer_pool_->ReserveI420VideoFrame(gfx::Size(frame_info_.width, | 255 return buffer_pool_->ReserveI420VideoFrame(gfx::Size(frame_info_.width, |
254 frame_info_.height), | 256 frame_info_.height), |
255 0); | 257 0); |
256 } | 258 } |
257 | 259 |
258 // Implements VideoCaptureDevice::EventHandler. | 260 // Implements VideoCaptureDevice::EventHandler. |
259 // OnIncomingCapturedFrame is called the thread running the capture device. | 261 // OnIncomingCapturedFrame is called the thread running the capture device. |
260 // I.e.- DirectShow thread on windows and v4l2_thread on Linux. | 262 // I.e.- DirectShow thread on windows and v4l2_thread on Linux. |
263 #if !defined(OS_IOS) && !defined(OS_ANDROID) | |
261 void VideoCaptureController::OnIncomingCapturedFrame( | 264 void VideoCaptureController::OnIncomingCapturedFrame( |
262 const uint8* data, | 265 const uint8* data, |
263 int length, | 266 int length, |
267 base::Time timestamp, | |
268 int rotation, | |
269 bool flip_vert, | |
270 bool flip_horiz) { | |
271 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedFrame"); | |
272 | |
273 scoped_refptr<media::VideoFrame> dst; | |
274 { | |
275 base::AutoLock lock(buffer_pool_lock_); | |
276 if (!buffer_pool_.get()) | |
277 return; | |
278 dst = buffer_pool_->ReserveI420VideoFrame( | |
279 gfx::Size(frame_info_.width, frame_info_.height), rotation); | |
280 } | |
281 | |
282 if (!dst.get()) | |
283 return; | |
284 | |
285 uint8* yplane = dst->data(media::VideoFrame::kYPlane); | |
286 uint8* uplane = dst->data(media::VideoFrame::kUPlane); | |
287 uint8* vplane = dst->data(media::VideoFrame::kVPlane); | |
288 int yplane_stride = frame_info_.width; | |
289 int uv_plane_stride = (frame_info_.width + 1) / 2; | |
290 int crop_x = 0; | |
291 int crop_y = 0; | |
292 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | |
293 // Assuming rotation happens first and flips next, we can consolidate both | |
294 // vertical and horizontal flips together with rotation into two variables: | |
295 // new_rotation = (rotation + 180 * vertical_flip) modulo 360 | |
296 // new_vertical_flip = horizontal_flip XOR vertical_flip | |
297 int new_rotation_angle = (rotation + 180 * flip_vert) % 360; | |
298 libyuv::RotationMode rotation_mode = libyuv::kRotate0; | |
299 if (new_rotation_angle == 90) | |
300 rotation_mode = libyuv::kRotate90; | |
301 else if (new_rotation_angle == 180) | |
302 rotation_mode = libyuv::kRotate180; | |
303 else if (new_rotation_angle == 270) | |
304 rotation_mode = libyuv::kRotate270; | |
305 | |
306 switch (frame_info_.color) { | |
307 case media::VideoCaptureCapability::kColorUnknown: // Color format not set. | |
308 break; | |
309 case media::VideoCaptureCapability::kI420: | |
310 DCHECK(!chopped_width_ && !chopped_height_); | |
311 origin_colorspace = libyuv::FOURCC_I420; | |
312 break; | |
313 case media::VideoCaptureCapability::kYV12: | |
314 DCHECK(!chopped_width_ && !chopped_height_); | |
315 origin_colorspace = libyuv::FOURCC_YV12; | |
316 break; | |
317 case media::VideoCaptureCapability::kNV21: | |
318 DCHECK(!chopped_width_ && !chopped_height_); | |
319 origin_colorspace = libyuv::FOURCC_NV12; | |
320 break; | |
321 case media::VideoCaptureCapability::kYUY2: | |
322 DCHECK(!chopped_width_ && !chopped_height_); | |
323 origin_colorspace = libyuv::FOURCC_YUY2; | |
324 break; | |
325 case media::VideoCaptureCapability::kRGB24: | |
326 origin_colorspace = libyuv::FOURCC_RAW; | |
327 break; | |
328 case media::VideoCaptureCapability::kARGB: | |
329 origin_colorspace = libyuv::FOURCC_ARGB; | |
330 break; | |
331 case media::VideoCaptureCapability::kMJPEG: | |
332 origin_colorspace = libyuv::FOURCC_MJPG; | |
333 break; | |
334 default: | |
335 NOTREACHED(); | |
336 } | |
337 #if defined(OS_WIN) | |
338 // kRGB24 on Windows start at the bottom line and has a negative stride. This | |
339 // is not supported by libyuv, so the media API is used instead. | |
340 // Rotation and flipping is not supported in this case. | |
341 if (frame_info_.color == media::VideoCaptureCapability::kRGB24) { | |
342 int rgb_stride = -3 * (frame_info_.width + chopped_width_); | |
343 const uint8* rgb_src = | |
344 data + 3 * (frame_info_.width + chopped_width_) * | |
345 (frame_info_.height - 1 + chopped_height_); | |
346 media::ConvertRGB24ToYUV(rgb_src, | |
347 yplane, | |
348 uplane, | |
349 vplane, | |
350 frame_info_.width, | |
351 frame_info_.height, | |
352 rgb_stride, | |
353 yplane_stride, | |
354 uv_plane_stride); | |
355 } | |
356 #else | |
357 if (false) {} | |
358 #endif | |
359 else { | |
wjia(left Chromium)
2013/09/06 17:27:36
"if (false)" looks a bit weird. How about the foll
mcasas
2013/09/06 17:46:42
Yes I also thought about adding a flag for this pa
| |
360 libyuv::ConvertToI420( | |
361 data, | |
362 length, | |
363 yplane, | |
364 yplane_stride, | |
365 uplane, | |
366 uv_plane_stride, | |
367 vplane, | |
368 uv_plane_stride, | |
369 crop_x, | |
370 crop_y, | |
371 frame_info_.width, | |
372 frame_info_.height * (flip_vert ^ flip_horiz ? -1 : 1), | |
373 frame_info_.width, | |
374 frame_info_.height, | |
375 rotation_mode, | |
376 origin_colorspace); | |
377 } | |
378 BrowserThread::PostTask( | |
379 BrowserThread::IO, | |
380 FROM_HERE, | |
381 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, | |
382 this, | |
383 dst, | |
384 timestamp)); | |
385 } | |
386 #else | |
387 void VideoCaptureController::OnIncomingCapturedFrame( | |
388 const uint8* data, | |
389 int length, | |
264 base::Time timestamp, | 390 base::Time timestamp, |
265 int rotation, | 391 int rotation, |
266 bool flip_vert, | 392 bool flip_vert, |
267 bool flip_horiz) { | 393 bool flip_horiz) { |
268 DCHECK(frame_info_.color == media::VideoCaptureCapability::kI420 || | 394 DCHECK(frame_info_.color == media::VideoCaptureCapability::kI420 || |
269 frame_info_.color == media::VideoCaptureCapability::kYV12 || | 395 frame_info_.color == media::VideoCaptureCapability::kYV12 || |
270 (rotation == 0 && !flip_vert && !flip_horiz)); | 396 (rotation == 0 && !flip_vert && !flip_horiz)); |
271 | 397 |
272 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedFrame"); | 398 TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedFrame"); |
273 | 399 |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
309 media::ConvertNV21ToYUV(data, yplane, uplane, vplane, frame_info_.width, | 435 media::ConvertNV21ToYUV(data, yplane, uplane, vplane, frame_info_.width, |
310 frame_info_.height); | 436 frame_info_.height); |
311 break; | 437 break; |
312 case media::VideoCaptureCapability::kYUY2: | 438 case media::VideoCaptureCapability::kYUY2: |
313 DCHECK(!chopped_width_ && !chopped_height_); | 439 DCHECK(!chopped_width_ && !chopped_height_); |
314 if (frame_info_.width * frame_info_.height * 2 != length) { | 440 if (frame_info_.width * frame_info_.height * 2 != length) { |
315 // If |length| of |data| does not match the expected width and height | 441 // If |length| of |data| does not match the expected width and height |
316 // we can't convert the frame to I420. YUY2 is 2 bytes per pixel. | 442 // we can't convert the frame to I420. YUY2 is 2 bytes per pixel. |
317 break; | 443 break; |
318 } | 444 } |
319 | |
320 media::ConvertYUY2ToYUV(data, yplane, uplane, vplane, frame_info_.width, | 445 media::ConvertYUY2ToYUV(data, yplane, uplane, vplane, frame_info_.width, |
321 frame_info_.height); | 446 frame_info_.height); |
322 break; | 447 break; |
323 case media::VideoCaptureCapability::kRGB24: { | 448 case media::VideoCaptureCapability::kRGB24: { |
324 int ystride = frame_info_.width; | 449 int ystride = frame_info_.width; |
325 int uvstride = frame_info_.width / 2; | 450 int uvstride = frame_info_.width / 2; |
326 #if defined(OS_WIN) // RGB on Windows start at the bottom line. | |
327 int rgb_stride = -3 * (frame_info_.width + chopped_width_); | |
328 const uint8* rgb_src = data + 3 * (frame_info_.width + chopped_width_) * | |
329 (frame_info_.height -1 + chopped_height_); | |
330 #else | |
331 int rgb_stride = 3 * (frame_info_.width + chopped_width_); | 451 int rgb_stride = 3 * (frame_info_.width + chopped_width_); |
332 const uint8* rgb_src = data; | 452 const uint8* rgb_src = data; |
333 #endif | |
334 media::ConvertRGB24ToYUV(rgb_src, yplane, uplane, vplane, | 453 media::ConvertRGB24ToYUV(rgb_src, yplane, uplane, vplane, |
335 frame_info_.width, frame_info_.height, | 454 frame_info_.width, frame_info_.height, |
336 rgb_stride, ystride, uvstride); | 455 rgb_stride, ystride, uvstride); |
337 break; | 456 break; |
338 } | 457 } |
339 case media::VideoCaptureCapability::kARGB: | 458 case media::VideoCaptureCapability::kARGB: |
340 media::ConvertRGB32ToYUV(data, yplane, uplane, vplane, frame_info_.width, | 459 media::ConvertRGB32ToYUV(data, yplane, uplane, vplane, frame_info_.width, |
341 frame_info_.height, | 460 frame_info_.height, |
342 (frame_info_.width + chopped_width_) * 4, | 461 (frame_info_.width + chopped_width_) * 4, |
343 frame_info_.width, frame_info_.width / 2); | 462 frame_info_.width, frame_info_.width / 2); |
344 break; | 463 break; |
345 #if !defined(OS_IOS) && !defined(OS_ANDROID) | |
346 case media::VideoCaptureCapability::kMJPEG: { | |
347 int yplane_stride = frame_info_.width; | |
348 int uv_plane_stride = (frame_info_.width + 1) / 2; | |
349 int crop_x = 0; | |
350 int crop_y = 0; | |
351 libyuv::ConvertToI420(data, length, yplane, yplane_stride, uplane, | |
352 uv_plane_stride, vplane, uv_plane_stride, crop_x, | |
353 crop_y, frame_info_.width, frame_info_.height, | |
354 frame_info_.width, frame_info_.height, | |
355 libyuv::kRotate0, libyuv::FOURCC_MJPG); | |
356 break; | |
357 } | |
358 #endif | |
359 default: | 464 default: |
360 NOTREACHED(); | 465 NOTREACHED(); |
361 } | 466 } |
362 | 467 |
363 BrowserThread::PostTask(BrowserThread::IO, | 468 BrowserThread::PostTask(BrowserThread::IO, |
364 FROM_HERE, | 469 FROM_HERE, |
365 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, | 470 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, |
366 this, dst, timestamp)); | 471 this, dst, timestamp)); |
367 } | 472 } |
473 #endif // #if !defined(OS_IOS) && !defined(OS_ANDROID) | |
368 | 474 |
369 // OnIncomingCapturedVideoFrame is called the thread running the capture device. | 475 // OnIncomingCapturedVideoFrame is called the thread running the capture device. |
370 void VideoCaptureController::OnIncomingCapturedVideoFrame( | 476 void VideoCaptureController::OnIncomingCapturedVideoFrame( |
371 const scoped_refptr<media::VideoFrame>& frame, | 477 const scoped_refptr<media::VideoFrame>& frame, |
372 base::Time timestamp) { | 478 base::Time timestamp) { |
373 | 479 |
374 scoped_refptr<media::VideoFrame> target; | 480 scoped_refptr<media::VideoFrame> target; |
375 { | 481 { |
376 base::AutoLock lock(buffer_pool_lock_); | 482 base::AutoLock lock(buffer_pool_lock_); |
377 | 483 |
(...skipping 351 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
729 controller_clients_.push_back((*client_it)); | 835 controller_clients_.push_back((*client_it)); |
730 pending_clients_.erase(client_it++); | 836 pending_clients_.erase(client_it++); |
731 } | 837 } |
732 // Request the manager to start the actual capture. | 838 // Request the manager to start the actual capture. |
733 video_capture_manager_->Start(current_params_, this); | 839 video_capture_manager_->Start(current_params_, this); |
734 state_ = VIDEO_CAPTURE_STATE_STARTED; | 840 state_ = VIDEO_CAPTURE_STATE_STARTED; |
735 device_in_use_ = true; | 841 device_in_use_ = true; |
736 } | 842 } |
737 | 843 |
738 } // namespace content | 844 } // namespace content |
OLD | NEW |