OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | 2 // found in the LICENSE file. |
4 | 3 |
5 #include "content/browser/renderer_host/media/video_capture_controller.h" | 4 #include "content/browser/renderer_host/media/video_capture_controller.h" |
6 | 5 |
7 #include <set> | 6 #include <set> |
8 | 7 |
9 #include "base/bind.h" | 8 #include "base/bind.h" |
10 #include "base/debug/trace_event.h" | 9 #include "base/debug/trace_event.h" |
11 #include "base/memory/scoped_ptr.h" | |
12 #include "base/stl_util.h" | 10 #include "base/stl_util.h" |
13 #include "content/browser/renderer_host/media/media_stream_manager.h" | 11 #include "content/browser/renderer_host/media/media_stream_manager.h" |
14 #include "content/browser/renderer_host/media/video_capture_manager.h" | 12 #include "content/browser/renderer_host/media/video_capture_manager.h" |
15 #include "content/public/browser/browser_thread.h" | 13 #include "content/public/browser/browser_thread.h" |
16 #include "media/base/video_frame.h" | 14 #include "media/base/video_frame.h" |
17 #include "media/base/video_util.h" | 15 #include "media/base/video_util.h" |
18 #include "media/base/yuv_convert.h" | 16 #include "media/base/yuv_convert.h" |
19 | 17 |
20 #if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) | 18 #if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) |
21 #include "third_party/libyuv/include/libyuv.h" | 19 #include "third_party/libyuv/include/libyuv.h" |
22 #endif | 20 #endif |
23 | 21 |
24 using media::VideoCaptureCapability; | 22 using media::VideoCaptureCapability; |
25 | 23 |
26 namespace content { | 24 namespace content { |
27 | 25 |
26 namespace { | |
27 | |
28 // The number of buffers that VideoCaptureBufferPool should allocate. | 28 // The number of buffers that VideoCaptureBufferPool should allocate. |
29 static const int kNoOfBuffers = 3; | 29 const int kNoOfBuffers = 3; |
30 | |
31 class PoolBuffer : public media::VideoCaptureDevice::Client::Buffer { | |
32 public: | |
33 PoolBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, | |
34 int buffer_id, | |
35 void* data, | |
36 size_t size) | |
37 : Buffer(buffer_id, data, size), pool_(pool) {} | |
wjia(left Chromium)
2013/11/19 01:47:35
The |pool| shouldn't be NULL pointer here. It's us
sheu
2013/11/19 20:28:17
Done.
| |
38 | |
39 private: | |
40 virtual ~PoolBuffer() { pool_->RelinquishProducerReservation(id()); } | |
41 | |
42 const scoped_refptr<VideoCaptureBufferPool> pool_; | |
43 }; | |
44 | |
45 } // anonymous namespace | |
30 | 46 |
31 struct VideoCaptureController::ControllerClient { | 47 struct VideoCaptureController::ControllerClient { |
32 ControllerClient( | 48 ControllerClient( |
33 const VideoCaptureControllerID& id, | 49 const VideoCaptureControllerID& id, |
34 VideoCaptureControllerEventHandler* handler, | 50 VideoCaptureControllerEventHandler* handler, |
35 base::ProcessHandle render_process, | 51 base::ProcessHandle render_process, |
36 const media::VideoCaptureParams& params) | 52 const media::VideoCaptureParams& params) |
37 : controller_id(id), | 53 : controller_id(id), |
38 event_handler(handler), | 54 event_handler(handler), |
39 render_process_handle(render_process), | 55 render_process_handle(render_process), |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
80 // v4l2_thread on Linux, and the UI thread for tab capture. | 96 // v4l2_thread on Linux, and the UI thread for tab capture. |
81 class VideoCaptureController::VideoCaptureDeviceClient | 97 class VideoCaptureController::VideoCaptureDeviceClient |
82 : public media::VideoCaptureDevice::Client { | 98 : public media::VideoCaptureDevice::Client { |
83 public: | 99 public: |
84 explicit VideoCaptureDeviceClient( | 100 explicit VideoCaptureDeviceClient( |
85 const base::WeakPtr<VideoCaptureController>& controller, | 101 const base::WeakPtr<VideoCaptureController>& controller, |
86 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool); | 102 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool); |
87 virtual ~VideoCaptureDeviceClient(); | 103 virtual ~VideoCaptureDeviceClient(); |
88 | 104 |
89 // VideoCaptureDevice::Client implementation. | 105 // VideoCaptureDevice::Client implementation. |
90 virtual scoped_refptr<media::VideoFrame> ReserveOutputBuffer( | 106 virtual scoped_refptr<Buffer> ReserveOutputBuffer( |
107 media::VideoFrame::Format format, | |
91 const gfx::Size& size) OVERRIDE; | 108 const gfx::Size& size) OVERRIDE; |
92 virtual void OnIncomingCapturedFrame( | 109 virtual void OnIncomingCapturedFrame(const uint8* data, |
93 const uint8* data, | 110 int length, |
94 int length, | 111 base::Time timestamp, |
95 base::Time timestamp, | 112 int rotation, |
96 int rotation, | 113 bool flip_vert, |
97 bool flip_vert, | 114 bool flip_horiz, |
98 bool flip_horiz, | 115 const VideoCaptureCapability& frame_info) |
99 const VideoCaptureCapability& frame_info) OVERRIDE; | 116 OVERRIDE; |
100 virtual void OnIncomingCapturedVideoFrame( | 117 virtual void OnIncomingCapturedBuffer(const scoped_refptr<Buffer>& buffer, |
101 const scoped_refptr<media::VideoFrame>& frame, | 118 media::VideoFrame::Format format, |
102 base::Time timestamp, | 119 const gfx::Size& dimensions, |
103 int frame_rate) OVERRIDE; | 120 base::Time timestamp, |
121 int frame_rate) OVERRIDE; | |
104 virtual void OnError() OVERRIDE; | 122 virtual void OnError() OVERRIDE; |
105 | 123 |
106 private: | 124 private: |
107 scoped_refptr<media::VideoFrame> DoReserveI420VideoFrame( | 125 scoped_refptr<Buffer> DoReserveOutputBuffer(media::VideoFrame::Format format, |
108 const gfx::Size& size, | 126 const gfx::Size& dimensions, |
109 int rotation); | 127 int rotation); |
110 | 128 |
111 // The controller to which we post events. | 129 // The controller to which we post events. |
112 const base::WeakPtr<VideoCaptureController> controller_; | 130 const base::WeakPtr<VideoCaptureController> controller_; |
113 | 131 |
114 // The pool of shared-memory buffers used for capturing. | 132 // The pool of shared-memory buffers used for capturing. |
115 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; | 133 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; |
134 | |
135 // The set of buffers that have been used for rotated capturing. | |
136 std::set<int> rotated_buffers_; | |
116 }; | 137 }; |
117 | 138 |
118 VideoCaptureController::VideoCaptureController() | 139 VideoCaptureController::VideoCaptureController() |
119 : buffer_pool_(new VideoCaptureBufferPool(kNoOfBuffers)), | 140 : buffer_pool_(new VideoCaptureBufferPool(kNoOfBuffers)), |
120 state_(VIDEO_CAPTURE_STATE_STARTED), | 141 state_(VIDEO_CAPTURE_STATE_STARTED), |
121 weak_ptr_factory_(this) { | 142 weak_ptr_factory_(this) { |
122 } | 143 } |
123 | 144 |
124 VideoCaptureController::VideoCaptureDeviceClient::VideoCaptureDeviceClient( | 145 VideoCaptureController::VideoCaptureDeviceClient::VideoCaptureDeviceClient( |
125 const base::WeakPtr<VideoCaptureController>& controller, | 146 const base::WeakPtr<VideoCaptureController>& controller, |
126 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool) | 147 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool) |
127 : controller_(controller), | 148 : controller_(controller), buffer_pool_(buffer_pool) {} |
128 buffer_pool_(buffer_pool) {} | |
129 | 149 |
130 VideoCaptureController::VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {} | 150 VideoCaptureController::VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {} |
131 | 151 |
132 base::WeakPtr<VideoCaptureController> VideoCaptureController::GetWeakPtr() { | 152 base::WeakPtr<VideoCaptureController> VideoCaptureController::GetWeakPtr() { |
133 return weak_ptr_factory_.GetWeakPtr(); | 153 return weak_ptr_factory_.GetWeakPtr(); |
134 } | 154 } |
135 | 155 |
136 scoped_ptr<media::VideoCaptureDevice::Client> | 156 scoped_ptr<media::VideoCaptureDevice::Client> |
137 VideoCaptureController::NewDeviceClient() { | 157 VideoCaptureController::NewDeviceClient() { |
138 scoped_ptr<media::VideoCaptureDevice::Client> result( | 158 scoped_ptr<media::VideoCaptureDevice::Client> result( |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
222 // If this buffer is not held by this client, or this client doesn't exist | 242 // If this buffer is not held by this client, or this client doesn't exist |
223 // in controller, do nothing. | 243 // in controller, do nothing. |
224 if (!client || !client->active_buffers.erase(buffer_id)) { | 244 if (!client || !client->active_buffers.erase(buffer_id)) { |
225 NOTREACHED(); | 245 NOTREACHED(); |
226 return; | 246 return; |
227 } | 247 } |
228 | 248 |
229 buffer_pool_->RelinquishConsumerHold(buffer_id, 1); | 249 buffer_pool_->RelinquishConsumerHold(buffer_id, 1); |
230 } | 250 } |
231 | 251 |
232 scoped_refptr<media::VideoFrame> | 252 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> |
233 VideoCaptureController::VideoCaptureDeviceClient::ReserveOutputBuffer( | 253 VideoCaptureController::VideoCaptureDeviceClient::ReserveOutputBuffer( |
254 media::VideoFrame::Format format, | |
234 const gfx::Size& size) { | 255 const gfx::Size& size) { |
235 return DoReserveI420VideoFrame(size, 0); | 256 return DoReserveOutputBuffer(format, size, 0); |
236 } | 257 } |
237 | 258 |
238 void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame( | 259 void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame( |
239 const uint8* data, | 260 const uint8* data, |
240 int length, | 261 int length, |
241 base::Time timestamp, | 262 base::Time timestamp, |
242 int rotation, | 263 int rotation, |
243 bool flip_vert, | 264 bool flip_vert, |
244 bool flip_horiz, | 265 bool flip_horiz, |
245 const VideoCaptureCapability& frame_info) { | 266 const VideoCaptureCapability& frame_info) { |
(...skipping 11 matching lines...) Expand all Loading... | |
257 | 278 |
258 if (frame_info.width & 1) { | 279 if (frame_info.width & 1) { |
259 --new_width; | 280 --new_width; |
260 chopped_width = 1; | 281 chopped_width = 1; |
261 } | 282 } |
262 if (frame_info.height & 1) { | 283 if (frame_info.height & 1) { |
263 --new_height; | 284 --new_height; |
264 chopped_height = 1; | 285 chopped_height = 1; |
265 } | 286 } |
266 | 287 |
267 scoped_refptr<media::VideoFrame> dst = DoReserveI420VideoFrame( | 288 const gfx::Size dimensions(new_width, new_height); |
268 gfx::Size(new_width, new_height), rotation); | 289 scoped_refptr<Buffer> buffer = |
290 DoReserveOutputBuffer(media::VideoFrame::I420, dimensions, rotation); | |
269 | 291 |
270 if (!dst.get()) | 292 if (!buffer) |
271 return; | 293 return; |
272 #if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) | 294 #if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) |
273 | 295 uint8* yplane = reinterpret_cast<uint8*>(buffer->data()); |
274 uint8* yplane = dst->data(media::VideoFrame::kYPlane); | 296 uint8* uplane = |
275 uint8* uplane = dst->data(media::VideoFrame::kUPlane); | 297 yplane + |
276 uint8* vplane = dst->data(media::VideoFrame::kVPlane); | 298 media::VideoFrame::PlaneAllocationSize( |
299 media::VideoFrame::I420, media::VideoFrame::kYPlane, dimensions); | |
300 uint8* vplane = | |
301 uplane + | |
302 media::VideoFrame::PlaneAllocationSize( | |
303 media::VideoFrame::I420, media::VideoFrame::kUPlane, dimensions); | |
277 int yplane_stride = new_width; | 304 int yplane_stride = new_width; |
278 int uv_plane_stride = (new_width + 1) / 2; | 305 int uv_plane_stride = new_width / 2; |
279 int crop_x = 0; | 306 int crop_x = 0; |
280 int crop_y = 0; | 307 int crop_y = 0; |
281 int destination_width = new_width; | 308 int destination_width = new_width; |
282 int destination_height = new_height; | 309 int destination_height = new_height; |
283 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 310 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
284 // Assuming rotation happens first and flips next, we can consolidate both | 311 // Assuming rotation happens first and flips next, we can consolidate both |
285 // vertical and horizontal flips together with rotation into two variables: | 312 // vertical and horizontal flips together with rotation into two variables: |
286 // new_rotation = (rotation + 180 * vertical_flip) modulo 360 | 313 // new_rotation = (rotation + 180 * vertical_flip) modulo 360 |
287 // new_vertical_flip = horizontal_flip XOR vertical_flip | 314 // new_vertical_flip = horizontal_flip XOR vertical_flip |
288 int new_rotation_angle = (rotation + 180 * flip_vert) % 360; | 315 int new_rotation_angle = (rotation + 180 * flip_vert) % 360; |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
335 // kRGB24 on Windows start at the bottom line and has a negative stride. This | 362 // kRGB24 on Windows start at the bottom line and has a negative stride. This |
336 // is not supported by libyuv, so the media API is used instead. | 363 // is not supported by libyuv, so the media API is used instead. |
337 if (frame_info.color == media::PIXEL_FORMAT_RGB24) { | 364 if (frame_info.color == media::PIXEL_FORMAT_RGB24) { |
338 // Rotation and flipping is not supported in kRGB24 and OS_WIN case. | 365 // Rotation and flipping is not supported in kRGB24 and OS_WIN case. |
339 DCHECK(!rotation && !flip_vert && !flip_horiz); | 366 DCHECK(!rotation && !flip_vert && !flip_horiz); |
340 need_convert_rgb24_on_win = true; | 367 need_convert_rgb24_on_win = true; |
341 } | 368 } |
342 #endif | 369 #endif |
343 if (need_convert_rgb24_on_win) { | 370 if (need_convert_rgb24_on_win) { |
344 int rgb_stride = -3 * (new_width + chopped_width); | 371 int rgb_stride = -3 * (new_width + chopped_width); |
345 const uint8* rgb_src = | 372 const uint8* rgb_src = data + 3 * (new_width + chopped_width) * |
346 data + 3 * (new_width + chopped_width) * | 373 (new_height - 1 + chopped_height); |
347 (new_height - 1 + chopped_height); | |
348 media::ConvertRGB24ToYUV(rgb_src, | 374 media::ConvertRGB24ToYUV(rgb_src, |
349 yplane, | 375 yplane, |
350 uplane, | 376 uplane, |
351 vplane, | 377 vplane, |
352 new_width, | 378 new_width, |
353 new_height, | 379 new_height, |
354 rgb_stride, | 380 rgb_stride, |
355 yplane_stride, | 381 yplane_stride, |
356 uv_plane_stride); | 382 uv_plane_stride); |
357 } else { | 383 } else { |
358 if (new_rotation_angle==90 || new_rotation_angle==270){ | 384 if (new_rotation_angle==90 || new_rotation_angle==270){ |
359 // To be compatible with non-libyuv code in RotatePlaneByPixels, when | 385 // To be compatible with non-libyuv code in RotatePlaneByPixels, when |
360 // rotating by 90/270, only the maximum square portion located in the | 386 // rotating by 90/270, only the maximum square portion located in the |
361 // center of the image is rotated. F.i. 640x480 pixels, only the central | 387 // center of the image is rotated. F.i. 640x480 pixels, only the central |
362 // 480 pixels would be rotated and the leftmost and rightmost 80 columns | 388 // 480 pixels would be rotated and the leftmost and rightmost 80 columns |
363 // would be ignored. This process is called letterboxing. | 389 // would be ignored. This process is called letterboxing. |
364 int letterbox_thickness = abs(new_width - new_height) / 2; | 390 int letterbox_thickness = abs(new_width - new_height) / 2; |
365 if (destination_width > destination_height) { | 391 if (destination_width > destination_height) { |
366 yplane += letterbox_thickness; | 392 yplane += letterbox_thickness; |
367 uplane += letterbox_thickness / 2; | 393 uplane += letterbox_thickness / 2; |
368 vplane += letterbox_thickness / 2; | 394 vplane += letterbox_thickness / 2; |
369 destination_width = destination_height; | 395 destination_width = destination_height; |
370 } else { | 396 } else { |
371 yplane += letterbox_thickness * destination_width; | 397 yplane += letterbox_thickness * destination_width; |
372 uplane += (letterbox_thickness * destination_width) / 2; | 398 uplane += (letterbox_thickness * destination_width) / 2; |
373 vplane += (letterbox_thickness * destination_width) / 2; | 399 vplane += (letterbox_thickness * destination_width) / 2; |
374 destination_height = destination_width; | 400 destination_height = destination_width; |
375 } | 401 } |
376 } | 402 } |
377 libyuv::ConvertToI420( | 403 libyuv::ConvertToI420(data, |
378 data, length, | 404 length, |
379 yplane, yplane_stride, | 405 yplane, |
380 uplane, uv_plane_stride, | 406 yplane_stride, |
381 vplane, uv_plane_stride, | 407 uplane, |
382 crop_x, crop_y, | 408 uv_plane_stride, |
383 new_width + chopped_width, | 409 vplane, |
384 new_height * (flip_vert ^ flip_horiz ? -1 : 1), | 410 uv_plane_stride, |
385 destination_width, | 411 crop_x, |
386 destination_height, | 412 crop_y, |
387 rotation_mode, | 413 new_width + chopped_width, |
388 origin_colorspace); | 414 new_height * (flip_vert ^ flip_horiz ? -1 : 1), |
415 destination_width, | |
416 destination_height, | |
417 rotation_mode, | |
418 origin_colorspace); | |
389 } | 419 } |
390 #else | 420 #else |
391 // Libyuv is not linked in for Android WebView builds, but video capture is | 421 // Libyuv is not linked in for Android WebView builds, but video capture is |
392 // not used in those builds either. Whenever libyuv is added in that build, | 422 // not used in those builds either. Whenever libyuv is added in that build, |
393 // address all these #ifdef parts, see http://crbug.com/299611 . | 423 // address all these #ifdef parts, see http://crbug.com/299611 . |
394 NOTREACHED(); | 424 NOTREACHED(); |
395 #endif // if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) | 425 #endif // if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) |
396 BrowserThread::PostTask( | 426 BrowserThread::PostTask( |
397 BrowserThread::IO, | 427 BrowserThread::IO, |
398 FROM_HERE, | 428 FROM_HERE, |
399 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, | 429 base::Bind( |
400 controller_, | 430 &VideoCaptureController::DoIncomingCapturedI420BufferOnIOThread, |
401 dst, | 431 controller_, |
402 frame_info.frame_rate, | 432 buffer, |
403 timestamp)); | 433 dimensions, |
434 frame_info.frame_rate, | |
435 timestamp)); | |
404 } | 436 } |
405 | 437 |
406 void | 438 void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
407 VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( | 439 const scoped_refptr<Buffer>& buffer, |
408 const scoped_refptr<media::VideoFrame>& frame, | 440 media::VideoFrame::Format format, |
441 const gfx::Size& dimensions, | |
409 base::Time timestamp, | 442 base::Time timestamp, |
410 int frame_rate) { | 443 int frame_rate) { |
411 // If this is a frame that belongs to the buffer pool, we can forward it | 444 // The capture pipeline expects I420 for now. |
412 // directly to the IO thread and be done. | 445 DCHECK_EQ(format, media::VideoFrame::I420) |
413 if (buffer_pool_->RecognizeReservedBuffer( | 446 << "Non-I420 output buffer returned"; |
414 frame->shared_memory_handle()) >= 0) { | |
415 BrowserThread::PostTask(BrowserThread::IO, | |
416 FROM_HERE, | |
417 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, | |
418 controller_, frame, frame_rate, timestamp)); | |
419 return; | |
420 } | |
421 | 447 |
422 NOTREACHED() << "Frames should always belong to the buffer pool."; | 448 BrowserThread::PostTask( |
449 BrowserThread::IO, | |
450 FROM_HERE, | |
451 base::Bind( | |
452 &VideoCaptureController::DoIncomingCapturedI420BufferOnIOThread, | |
453 controller_, | |
454 buffer, | |
455 dimensions, | |
456 frame_rate, | |
457 timestamp)); | |
423 } | 458 } |
424 | 459 |
425 void VideoCaptureController::VideoCaptureDeviceClient::OnError() { | 460 void VideoCaptureController::VideoCaptureDeviceClient::OnError() { |
426 BrowserThread::PostTask(BrowserThread::IO, | 461 BrowserThread::PostTask(BrowserThread::IO, |
427 FROM_HERE, | 462 FROM_HERE, |
428 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_)); | 463 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_)); |
429 } | 464 } |
430 | 465 |
431 scoped_refptr<media::VideoFrame> | 466 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> |
432 VideoCaptureController::VideoCaptureDeviceClient::DoReserveI420VideoFrame( | 467 VideoCaptureController::VideoCaptureDeviceClient::DoReserveOutputBuffer( |
433 const gfx::Size& size, | 468 media::VideoFrame::Format format, |
469 const gfx::Size& dimensions, | |
434 int rotation) { | 470 int rotation) { |
471 // The capture pipeline expects I420 for now. | |
472 DCHECK_EQ(format, media::VideoFrame::I420) | |
473 << "Non-I420 output buffer requested"; | |
474 | |
435 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 475 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
436 scoped_refptr<media::VideoFrame> frame = | 476 const size_t frame_bytes = |
437 buffer_pool_->ReserveI420VideoFrame(size, rotation, &buffer_id_to_drop); | 477 media::VideoFrame::AllocationSize(format, dimensions); |
478 | |
479 int buffer_id = | |
480 buffer_pool_->ReserveForProducer(frame_bytes, &buffer_id_to_drop); | |
481 if (buffer_id == VideoCaptureBufferPool::kInvalidId) | |
482 return NULL; | |
483 void* data; | |
484 size_t size; | |
485 buffer_pool_->GetBufferInfo(buffer_id, &data, &size); | |
486 | |
487 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> output_buffer( | |
488 new PoolBuffer(buffer_pool_, buffer_id, data, size)); | |
489 | |
438 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { | 490 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { |
439 BrowserThread::PostTask(BrowserThread::IO, | 491 BrowserThread::PostTask(BrowserThread::IO, |
440 FROM_HERE, | 492 FROM_HERE, |
441 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, | 493 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, |
442 controller_, buffer_id_to_drop)); | 494 controller_, buffer_id_to_drop)); |
495 rotated_buffers_.erase(buffer_id_to_drop); | |
443 } | 496 } |
444 return frame; | 497 |
498 // If a 90/270 rotation is required, letterboxing will be required. If the | |
499 // returned frame has not been rotated before, then the letterbox borders will | |
500 // not yet have been cleared and we should clear them now. | |
501 if ((rotation % 180) == 0) { | |
502 rotated_buffers_.erase(buffer_id); | |
503 } else { | |
504 if (rotated_buffers_.insert(buffer_id).second) { | |
505 memset(output_buffer->data(), 0, output_buffer->size()); | |
506 rotated_buffers_.insert(buffer_id); | |
wjia(left Chromium)
2013/11/19 01:47:35
nit: no need to call insert again.
sheu
2013/11/19 20:28:17
Done.
| |
507 } | |
508 } | |
509 | |
510 return output_buffer; | |
445 } | 511 } |
446 | 512 |
447 VideoCaptureController::~VideoCaptureController() { | 513 VideoCaptureController::~VideoCaptureController() { |
448 STLDeleteContainerPointers(controller_clients_.begin(), | 514 STLDeleteContainerPointers(controller_clients_.begin(), |
449 controller_clients_.end()); | 515 controller_clients_.end()); |
450 } | 516 } |
451 | 517 |
452 void VideoCaptureController::DoIncomingCapturedFrameOnIOThread( | 518 void VideoCaptureController::DoIncomingCapturedI420BufferOnIOThread( |
453 const scoped_refptr<media::VideoFrame>& reserved_frame, | 519 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> buffer, |
520 const gfx::Size& dimensions, | |
454 int frame_rate, | 521 int frame_rate, |
455 base::Time timestamp) { | 522 base::Time timestamp) { |
456 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 523 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
457 | 524 DCHECK_NE(buffer->id(), VideoCaptureBufferPool::kInvalidId); |
458 int buffer_id = buffer_pool_->RecognizeReservedBuffer( | |
459 reserved_frame->shared_memory_handle()); | |
460 if (buffer_id < 0) { | |
461 NOTREACHED(); | |
462 return; | |
463 } | |
464 | 525 |
465 media::VideoCaptureFormat frame_format( | 526 media::VideoCaptureFormat frame_format( |
466 reserved_frame->coded_size().width(), | 527 dimensions.width(), |
467 reserved_frame->coded_size().height(), | 528 dimensions.height(), |
468 frame_rate, | 529 frame_rate, |
469 media::VariableResolutionVideoCaptureDevice); | 530 media::VariableResolutionVideoCaptureDevice); |
470 | 531 |
471 int count = 0; | 532 int count = 0; |
472 if (state_ == VIDEO_CAPTURE_STATE_STARTED) { | 533 if (state_ == VIDEO_CAPTURE_STATE_STARTED) { |
473 for (ControllerClients::iterator client_it = controller_clients_.begin(); | 534 for (ControllerClients::iterator client_it = controller_clients_.begin(); |
474 client_it != controller_clients_.end(); ++client_it) { | 535 client_it != controller_clients_.end(); ++client_it) { |
475 ControllerClient* client = *client_it; | 536 ControllerClient* client = *client_it; |
476 if (client->session_closed) | 537 if (client->session_closed) |
477 continue; | 538 continue; |
478 | 539 |
479 bool is_new_buffer = client->known_buffers.insert(buffer_id).second; | 540 bool is_new_buffer = client->known_buffers.insert(buffer->id()).second; |
480 if (is_new_buffer) { | 541 if (is_new_buffer) { |
481 // On the first use of a buffer on a client, share the memory handle. | 542 // On the first use of a buffer on a client, share the memory handle. |
482 size_t memory_size = 0; | 543 size_t memory_size = 0; |
483 base::SharedMemoryHandle remote_handle = buffer_pool_->ShareToProcess( | 544 base::SharedMemoryHandle remote_handle = buffer_pool_->ShareToProcess( |
484 buffer_id, client->render_process_handle, &memory_size); | 545 buffer->id(), client->render_process_handle, &memory_size); |
485 client->event_handler->OnBufferCreated(client->controller_id, | 546 client->event_handler->OnBufferCreated( |
486 remote_handle, | 547 client->controller_id, remote_handle, memory_size, buffer->id()); |
487 memory_size, | |
488 buffer_id); | |
489 } | 548 } |
490 | 549 |
491 client->event_handler->OnBufferReady(client->controller_id, | 550 client->event_handler->OnBufferReady( |
492 buffer_id, timestamp, | 551 client->controller_id, buffer->id(), timestamp, frame_format); |
493 frame_format); | 552 bool inserted = client->active_buffers.insert(buffer->id()).second; |
494 bool inserted = client->active_buffers.insert(buffer_id).second; | 553 DCHECK(inserted) << "Unexpected duplicate buffer: " << buffer->id(); |
495 DCHECK(inserted) << "Unexpected duplicate buffer: " << buffer_id; | |
496 count++; | 554 count++; |
497 } | 555 } |
498 } | 556 } |
499 | 557 |
500 buffer_pool_->HoldForConsumers(buffer_id, count); | 558 buffer_pool_->HoldForConsumers(buffer->id(), count); |
501 } | 559 } |
502 | 560 |
503 void VideoCaptureController::DoErrorOnIOThread() { | 561 void VideoCaptureController::DoErrorOnIOThread() { |
504 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 562 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
505 state_ = VIDEO_CAPTURE_STATE_ERROR; | 563 state_ = VIDEO_CAPTURE_STATE_ERROR; |
506 | 564 |
507 for (ControllerClients::iterator client_it = controller_clients_.begin(); | 565 for (ControllerClients::iterator client_it = controller_clients_.begin(); |
508 client_it != controller_clients_.end(); ++client_it) { | 566 client_it != controller_clients_.end(); ++client_it) { |
509 ControllerClient* client = *client_it; | 567 ControllerClient* client = *client_it; |
510 if (client->session_closed) | 568 if (client->session_closed) |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
558 } | 616 } |
559 return NULL; | 617 return NULL; |
560 } | 618 } |
561 | 619 |
562 int VideoCaptureController::GetClientCount() { | 620 int VideoCaptureController::GetClientCount() { |
563 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 621 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
564 return controller_clients_.size(); | 622 return controller_clients_.size(); |
565 } | 623 } |
566 | 624 |
567 } // namespace content | 625 } // namespace content |
OLD | NEW |