OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | 2 // found in the LICENSE file. |
4 | 3 |
5 #include "content/browser/renderer_host/media/video_capture_controller.h" | 4 #include "content/browser/renderer_host/media/video_capture_controller.h" |
6 | 5 |
7 #include <set> | 6 #include <set> |
8 | 7 |
9 #include "base/bind.h" | 8 #include "base/bind.h" |
10 #include "base/debug/trace_event.h" | 9 #include "base/debug/trace_event.h" |
11 #include "base/memory/scoped_ptr.h" | |
12 #include "base/stl_util.h" | 10 #include "base/stl_util.h" |
13 #include "content/browser/renderer_host/media/media_stream_manager.h" | 11 #include "content/browser/renderer_host/media/media_stream_manager.h" |
14 #include "content/browser/renderer_host/media/video_capture_manager.h" | 12 #include "content/browser/renderer_host/media/video_capture_manager.h" |
15 #include "content/public/browser/browser_thread.h" | 13 #include "content/public/browser/browser_thread.h" |
16 #include "media/base/video_frame.h" | 14 #include "media/base/video_frame.h" |
17 #include "media/base/video_util.h" | 15 #include "media/base/video_util.h" |
18 #include "media/base/yuv_convert.h" | 16 #include "media/base/yuv_convert.h" |
19 | 17 |
20 #if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) | 18 #if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) |
21 #include "third_party/libyuv/include/libyuv.h" | 19 #include "third_party/libyuv/include/libyuv.h" |
22 #endif | 20 #endif |
23 | 21 |
24 using media::VideoCaptureCapability; | 22 using media::VideoCaptureCapability; |
25 | 23 |
26 namespace content { | 24 namespace content { |
27 | 25 |
| 26 namespace { |
| 27 |
28 // The number of buffers that VideoCaptureBufferPool should allocate. | 28 // The number of buffers that VideoCaptureBufferPool should allocate. |
29 static const int kNoOfBuffers = 3; | 29 const int kNoOfBuffers = 3; |
| 30 |
| 31 class PoolBuffer : public media::VideoCaptureDevice::Client::Buffer { |
| 32 public: |
| 33 PoolBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool, |
| 34 int buffer_id, |
| 35 void* data, |
| 36 size_t size) |
| 37 : Buffer(buffer_id, data, size), pool_(pool) { |
| 38 DCHECK(pool_); |
| 39 } |
| 40 |
| 41 private: |
| 42 virtual ~PoolBuffer() { pool_->RelinquishProducerReservation(id()); } |
| 43 |
| 44 const scoped_refptr<VideoCaptureBufferPool> pool_; |
| 45 }; |
| 46 |
| 47 } // anonymous namespace |
30 | 48 |
31 struct VideoCaptureController::ControllerClient { | 49 struct VideoCaptureController::ControllerClient { |
32 ControllerClient( | 50 ControllerClient( |
33 const VideoCaptureControllerID& id, | 51 const VideoCaptureControllerID& id, |
34 VideoCaptureControllerEventHandler* handler, | 52 VideoCaptureControllerEventHandler* handler, |
35 base::ProcessHandle render_process, | 53 base::ProcessHandle render_process, |
36 const media::VideoCaptureParams& params) | 54 const media::VideoCaptureParams& params) |
37 : controller_id(id), | 55 : controller_id(id), |
38 event_handler(handler), | 56 event_handler(handler), |
39 render_process_handle(render_process), | 57 render_process_handle(render_process), |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
80 // v4l2_thread on Linux, and the UI thread for tab capture. | 98 // v4l2_thread on Linux, and the UI thread for tab capture. |
81 class VideoCaptureController::VideoCaptureDeviceClient | 99 class VideoCaptureController::VideoCaptureDeviceClient |
82 : public media::VideoCaptureDevice::Client { | 100 : public media::VideoCaptureDevice::Client { |
83 public: | 101 public: |
84 explicit VideoCaptureDeviceClient( | 102 explicit VideoCaptureDeviceClient( |
85 const base::WeakPtr<VideoCaptureController>& controller, | 103 const base::WeakPtr<VideoCaptureController>& controller, |
86 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool); | 104 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool); |
87 virtual ~VideoCaptureDeviceClient(); | 105 virtual ~VideoCaptureDeviceClient(); |
88 | 106 |
89 // VideoCaptureDevice::Client implementation. | 107 // VideoCaptureDevice::Client implementation. |
90 virtual scoped_refptr<media::VideoFrame> ReserveOutputBuffer( | 108 virtual scoped_refptr<Buffer> ReserveOutputBuffer( |
| 109 media::VideoFrame::Format format, |
91 const gfx::Size& size) OVERRIDE; | 110 const gfx::Size& size) OVERRIDE; |
92 virtual void OnIncomingCapturedFrame( | 111 virtual void OnIncomingCapturedFrame(const uint8* data, |
93 const uint8* data, | 112 int length, |
94 int length, | 113 base::Time timestamp, |
95 base::Time timestamp, | 114 int rotation, |
96 int rotation, | 115 bool flip_vert, |
97 bool flip_vert, | 116 bool flip_horiz, |
98 bool flip_horiz, | 117 const VideoCaptureCapability& frame_info) |
99 const VideoCaptureCapability& frame_info) OVERRIDE; | 118 OVERRIDE; |
100 virtual void OnIncomingCapturedVideoFrame( | 119 virtual void OnIncomingCapturedBuffer(const scoped_refptr<Buffer>& buffer, |
101 const scoped_refptr<media::VideoFrame>& frame, | 120 media::VideoFrame::Format format, |
102 base::Time timestamp, | 121 const gfx::Size& dimensions, |
103 int frame_rate) OVERRIDE; | 122 base::Time timestamp, |
| 123 int frame_rate) OVERRIDE; |
104 virtual void OnError() OVERRIDE; | 124 virtual void OnError() OVERRIDE; |
105 | 125 |
106 private: | 126 private: |
107 scoped_refptr<media::VideoFrame> DoReserveI420VideoFrame( | 127 scoped_refptr<Buffer> DoReserveOutputBuffer(media::VideoFrame::Format format, |
108 const gfx::Size& size, | 128 const gfx::Size& dimensions, |
109 int rotation); | 129 int rotation); |
110 | 130 |
111 // The controller to which we post events. | 131 // The controller to which we post events. |
112 const base::WeakPtr<VideoCaptureController> controller_; | 132 const base::WeakPtr<VideoCaptureController> controller_; |
113 | 133 |
114 // The pool of shared-memory buffers used for capturing. | 134 // The pool of shared-memory buffers used for capturing. |
115 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; | 135 const scoped_refptr<VideoCaptureBufferPool> buffer_pool_; |
| 136 |
| 137 // The set of buffers that have been used for rotated capturing. |
| 138 std::set<int> rotated_buffers_; |
116 }; | 139 }; |
117 | 140 |
118 VideoCaptureController::VideoCaptureController() | 141 VideoCaptureController::VideoCaptureController() |
119 : buffer_pool_(new VideoCaptureBufferPool(kNoOfBuffers)), | 142 : buffer_pool_(new VideoCaptureBufferPool(kNoOfBuffers)), |
120 state_(VIDEO_CAPTURE_STATE_STARTED), | 143 state_(VIDEO_CAPTURE_STATE_STARTED), |
121 weak_ptr_factory_(this) { | 144 weak_ptr_factory_(this) { |
122 } | 145 } |
123 | 146 |
124 VideoCaptureController::VideoCaptureDeviceClient::VideoCaptureDeviceClient( | 147 VideoCaptureController::VideoCaptureDeviceClient::VideoCaptureDeviceClient( |
125 const base::WeakPtr<VideoCaptureController>& controller, | 148 const base::WeakPtr<VideoCaptureController>& controller, |
126 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool) | 149 const scoped_refptr<VideoCaptureBufferPool>& buffer_pool) |
127 : controller_(controller), | 150 : controller_(controller), buffer_pool_(buffer_pool) {} |
128 buffer_pool_(buffer_pool) {} | |
129 | 151 |
130 VideoCaptureController::VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {} | 152 VideoCaptureController::VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {} |
131 | 153 |
132 base::WeakPtr<VideoCaptureController> VideoCaptureController::GetWeakPtr() { | 154 base::WeakPtr<VideoCaptureController> VideoCaptureController::GetWeakPtr() { |
133 return weak_ptr_factory_.GetWeakPtr(); | 155 return weak_ptr_factory_.GetWeakPtr(); |
134 } | 156 } |
135 | 157 |
136 scoped_ptr<media::VideoCaptureDevice::Client> | 158 scoped_ptr<media::VideoCaptureDevice::Client> |
137 VideoCaptureController::NewDeviceClient() { | 159 VideoCaptureController::NewDeviceClient() { |
138 scoped_ptr<media::VideoCaptureDevice::Client> result( | 160 scoped_ptr<media::VideoCaptureDevice::Client> result( |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
222 // If this buffer is not held by this client, or this client doesn't exist | 244 // If this buffer is not held by this client, or this client doesn't exist |
223 // in controller, do nothing. | 245 // in controller, do nothing. |
224 if (!client || !client->active_buffers.erase(buffer_id)) { | 246 if (!client || !client->active_buffers.erase(buffer_id)) { |
225 NOTREACHED(); | 247 NOTREACHED(); |
226 return; | 248 return; |
227 } | 249 } |
228 | 250 |
229 buffer_pool_->RelinquishConsumerHold(buffer_id, 1); | 251 buffer_pool_->RelinquishConsumerHold(buffer_id, 1); |
230 } | 252 } |
231 | 253 |
232 scoped_refptr<media::VideoFrame> | 254 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> |
233 VideoCaptureController::VideoCaptureDeviceClient::ReserveOutputBuffer( | 255 VideoCaptureController::VideoCaptureDeviceClient::ReserveOutputBuffer( |
| 256 media::VideoFrame::Format format, |
234 const gfx::Size& size) { | 257 const gfx::Size& size) { |
235 return DoReserveI420VideoFrame(size, 0); | 258 return DoReserveOutputBuffer(format, size, 0); |
236 } | 259 } |
237 | 260 |
238 void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame( | 261 void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame( |
239 const uint8* data, | 262 const uint8* data, |
240 int length, | 263 int length, |
241 base::Time timestamp, | 264 base::Time timestamp, |
242 int rotation, | 265 int rotation, |
243 bool flip_vert, | 266 bool flip_vert, |
244 bool flip_horiz, | 267 bool flip_horiz, |
245 const VideoCaptureCapability& frame_info) { | 268 const VideoCaptureCapability& frame_info) { |
(...skipping 11 matching lines...) Expand all Loading... |
257 | 280 |
258 if (frame_info.width & 1) { | 281 if (frame_info.width & 1) { |
259 --new_width; | 282 --new_width; |
260 chopped_width = 1; | 283 chopped_width = 1; |
261 } | 284 } |
262 if (frame_info.height & 1) { | 285 if (frame_info.height & 1) { |
263 --new_height; | 286 --new_height; |
264 chopped_height = 1; | 287 chopped_height = 1; |
265 } | 288 } |
266 | 289 |
267 scoped_refptr<media::VideoFrame> dst = DoReserveI420VideoFrame( | 290 const gfx::Size dimensions(new_width, new_height); |
268 gfx::Size(new_width, new_height), rotation); | 291 scoped_refptr<Buffer> buffer = |
| 292 DoReserveOutputBuffer(media::VideoFrame::I420, dimensions, rotation); |
269 | 293 |
270 if (!dst.get()) | 294 if (!buffer) |
271 return; | 295 return; |
272 #if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) | 296 #if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) |
273 | 297 uint8* yplane = reinterpret_cast<uint8*>(buffer->data()); |
274 uint8* yplane = dst->data(media::VideoFrame::kYPlane); | 298 uint8* uplane = |
275 uint8* uplane = dst->data(media::VideoFrame::kUPlane); | 299 yplane + |
276 uint8* vplane = dst->data(media::VideoFrame::kVPlane); | 300 media::VideoFrame::PlaneAllocationSize( |
| 301 media::VideoFrame::I420, media::VideoFrame::kYPlane, dimensions); |
| 302 uint8* vplane = |
| 303 uplane + |
| 304 media::VideoFrame::PlaneAllocationSize( |
| 305 media::VideoFrame::I420, media::VideoFrame::kUPlane, dimensions); |
277 int yplane_stride = new_width; | 306 int yplane_stride = new_width; |
278 int uv_plane_stride = (new_width + 1) / 2; | 307 int uv_plane_stride = new_width / 2; |
279 int crop_x = 0; | 308 int crop_x = 0; |
280 int crop_y = 0; | 309 int crop_y = 0; |
281 int destination_width = new_width; | 310 int destination_width = new_width; |
282 int destination_height = new_height; | 311 int destination_height = new_height; |
283 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; | 312 libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY; |
284 // Assuming rotation happens first and flips next, we can consolidate both | 313 // Assuming rotation happens first and flips next, we can consolidate both |
285 // vertical and horizontal flips together with rotation into two variables: | 314 // vertical and horizontal flips together with rotation into two variables: |
286 // new_rotation = (rotation + 180 * vertical_flip) modulo 360 | 315 // new_rotation = (rotation + 180 * vertical_flip) modulo 360 |
287 // new_vertical_flip = horizontal_flip XOR vertical_flip | 316 // new_vertical_flip = horizontal_flip XOR vertical_flip |
288 int new_rotation_angle = (rotation + 180 * flip_vert) % 360; | 317 int new_rotation_angle = (rotation + 180 * flip_vert) % 360; |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
335 // kRGB24 on Windows start at the bottom line and has a negative stride. This | 364 // kRGB24 on Windows start at the bottom line and has a negative stride. This |
336 // is not supported by libyuv, so the media API is used instead. | 365 // is not supported by libyuv, so the media API is used instead. |
337 if (frame_info.color == media::PIXEL_FORMAT_RGB24) { | 366 if (frame_info.color == media::PIXEL_FORMAT_RGB24) { |
338 // Rotation and flipping is not supported in kRGB24 and OS_WIN case. | 367 // Rotation and flipping is not supported in kRGB24 and OS_WIN case. |
339 DCHECK(!rotation && !flip_vert && !flip_horiz); | 368 DCHECK(!rotation && !flip_vert && !flip_horiz); |
340 need_convert_rgb24_on_win = true; | 369 need_convert_rgb24_on_win = true; |
341 } | 370 } |
342 #endif | 371 #endif |
343 if (need_convert_rgb24_on_win) { | 372 if (need_convert_rgb24_on_win) { |
344 int rgb_stride = -3 * (new_width + chopped_width); | 373 int rgb_stride = -3 * (new_width + chopped_width); |
345 const uint8* rgb_src = | 374 const uint8* rgb_src = data + 3 * (new_width + chopped_width) * |
346 data + 3 * (new_width + chopped_width) * | 375 (new_height - 1 + chopped_height); |
347 (new_height - 1 + chopped_height); | |
348 media::ConvertRGB24ToYUV(rgb_src, | 376 media::ConvertRGB24ToYUV(rgb_src, |
349 yplane, | 377 yplane, |
350 uplane, | 378 uplane, |
351 vplane, | 379 vplane, |
352 new_width, | 380 new_width, |
353 new_height, | 381 new_height, |
354 rgb_stride, | 382 rgb_stride, |
355 yplane_stride, | 383 yplane_stride, |
356 uv_plane_stride); | 384 uv_plane_stride); |
357 } else { | 385 } else { |
358 if (new_rotation_angle==90 || new_rotation_angle==270){ | 386 if (new_rotation_angle==90 || new_rotation_angle==270){ |
359 // To be compatible with non-libyuv code in RotatePlaneByPixels, when | 387 // To be compatible with non-libyuv code in RotatePlaneByPixels, when |
360 // rotating by 90/270, only the maximum square portion located in the | 388 // rotating by 90/270, only the maximum square portion located in the |
361 // center of the image is rotated. F.i. 640x480 pixels, only the central | 389 // center of the image is rotated. F.i. 640x480 pixels, only the central |
362 // 480 pixels would be rotated and the leftmost and rightmost 80 columns | 390 // 480 pixels would be rotated and the leftmost and rightmost 80 columns |
363 // would be ignored. This process is called letterboxing. | 391 // would be ignored. This process is called letterboxing. |
364 int letterbox_thickness = abs(new_width - new_height) / 2; | 392 int letterbox_thickness = abs(new_width - new_height) / 2; |
365 if (destination_width > destination_height) { | 393 if (destination_width > destination_height) { |
366 yplane += letterbox_thickness; | 394 yplane += letterbox_thickness; |
367 uplane += letterbox_thickness / 2; | 395 uplane += letterbox_thickness / 2; |
368 vplane += letterbox_thickness / 2; | 396 vplane += letterbox_thickness / 2; |
369 destination_width = destination_height; | 397 destination_width = destination_height; |
370 } else { | 398 } else { |
371 yplane += letterbox_thickness * destination_width; | 399 yplane += letterbox_thickness * destination_width; |
372 uplane += (letterbox_thickness * destination_width) / 2; | 400 uplane += (letterbox_thickness * destination_width) / 2; |
373 vplane += (letterbox_thickness * destination_width) / 2; | 401 vplane += (letterbox_thickness * destination_width) / 2; |
374 destination_height = destination_width; | 402 destination_height = destination_width; |
375 } | 403 } |
376 } | 404 } |
377 libyuv::ConvertToI420( | 405 libyuv::ConvertToI420(data, |
378 data, length, | 406 length, |
379 yplane, yplane_stride, | 407 yplane, |
380 uplane, uv_plane_stride, | 408 yplane_stride, |
381 vplane, uv_plane_stride, | 409 uplane, |
382 crop_x, crop_y, | 410 uv_plane_stride, |
383 new_width + chopped_width, | 411 vplane, |
384 new_height * (flip_vert ^ flip_horiz ? -1 : 1), | 412 uv_plane_stride, |
385 destination_width, | 413 crop_x, |
386 destination_height, | 414 crop_y, |
387 rotation_mode, | 415 new_width + chopped_width, |
388 origin_colorspace); | 416 new_height * (flip_vert ^ flip_horiz ? -1 : 1), |
| 417 destination_width, |
| 418 destination_height, |
| 419 rotation_mode, |
| 420 origin_colorspace); |
389 } | 421 } |
390 #else | 422 #else |
391 // Libyuv is not linked in for Android WebView builds, but video capture is | 423 // Libyuv is not linked in for Android WebView builds, but video capture is |
392 // not used in those builds either. Whenever libyuv is added in that build, | 424 // not used in those builds either. Whenever libyuv is added in that build, |
393 // address all these #ifdef parts, see http://crbug.com/299611 . | 425 // address all these #ifdef parts, see http://crbug.com/299611 . |
394 NOTREACHED(); | 426 NOTREACHED(); |
395 #endif // if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) | 427 #endif // if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW) |
396 BrowserThread::PostTask( | 428 BrowserThread::PostTask( |
397 BrowserThread::IO, | 429 BrowserThread::IO, |
398 FROM_HERE, | 430 FROM_HERE, |
399 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, | 431 base::Bind( |
400 controller_, | 432 &VideoCaptureController::DoIncomingCapturedI420BufferOnIOThread, |
401 dst, | 433 controller_, |
402 frame_info.frame_rate, | 434 buffer, |
403 timestamp)); | 435 dimensions, |
| 436 frame_info.frame_rate, |
| 437 timestamp)); |
404 } | 438 } |
405 | 439 |
406 void | 440 void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedBuffer( |
407 VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame( | 441 const scoped_refptr<Buffer>& buffer, |
408 const scoped_refptr<media::VideoFrame>& frame, | 442 media::VideoFrame::Format format, |
| 443 const gfx::Size& dimensions, |
409 base::Time timestamp, | 444 base::Time timestamp, |
410 int frame_rate) { | 445 int frame_rate) { |
411 // If this is a frame that belongs to the buffer pool, we can forward it | 446 // The capture pipeline expects I420 for now. |
412 // directly to the IO thread and be done. | 447 DCHECK_EQ(format, media::VideoFrame::I420) |
413 if (buffer_pool_->RecognizeReservedBuffer( | 448 << "Non-I420 output buffer returned"; |
414 frame->shared_memory_handle()) >= 0) { | |
415 BrowserThread::PostTask(BrowserThread::IO, | |
416 FROM_HERE, | |
417 base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread, | |
418 controller_, frame, frame_rate, timestamp)); | |
419 return; | |
420 } | |
421 | 449 |
422 NOTREACHED() << "Frames should always belong to the buffer pool."; | 450 BrowserThread::PostTask( |
| 451 BrowserThread::IO, |
| 452 FROM_HERE, |
| 453 base::Bind( |
| 454 &VideoCaptureController::DoIncomingCapturedI420BufferOnIOThread, |
| 455 controller_, |
| 456 buffer, |
| 457 dimensions, |
| 458 frame_rate, |
| 459 timestamp)); |
423 } | 460 } |
424 | 461 |
425 void VideoCaptureController::VideoCaptureDeviceClient::OnError() { | 462 void VideoCaptureController::VideoCaptureDeviceClient::OnError() { |
426 BrowserThread::PostTask(BrowserThread::IO, | 463 BrowserThread::PostTask(BrowserThread::IO, |
427 FROM_HERE, | 464 FROM_HERE, |
428 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_)); | 465 base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_)); |
429 } | 466 } |
430 | 467 |
431 scoped_refptr<media::VideoFrame> | 468 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> |
432 VideoCaptureController::VideoCaptureDeviceClient::DoReserveI420VideoFrame( | 469 VideoCaptureController::VideoCaptureDeviceClient::DoReserveOutputBuffer( |
433 const gfx::Size& size, | 470 media::VideoFrame::Format format, |
| 471 const gfx::Size& dimensions, |
434 int rotation) { | 472 int rotation) { |
| 473 // The capture pipeline expects I420 for now. |
| 474 DCHECK_EQ(format, media::VideoFrame::I420) |
| 475 << "Non-I420 output buffer requested"; |
| 476 |
435 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; | 477 int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId; |
436 scoped_refptr<media::VideoFrame> frame = | 478 const size_t frame_bytes = |
437 buffer_pool_->ReserveI420VideoFrame(size, rotation, &buffer_id_to_drop); | 479 media::VideoFrame::AllocationSize(format, dimensions); |
| 480 |
| 481 int buffer_id = |
| 482 buffer_pool_->ReserveForProducer(frame_bytes, &buffer_id_to_drop); |
| 483 if (buffer_id == VideoCaptureBufferPool::kInvalidId) |
| 484 return NULL; |
| 485 void* data; |
| 486 size_t size; |
| 487 buffer_pool_->GetBufferInfo(buffer_id, &data, &size); |
| 488 |
| 489 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> output_buffer( |
| 490 new PoolBuffer(buffer_pool_, buffer_id, data, size)); |
| 491 |
438 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { | 492 if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) { |
439 BrowserThread::PostTask(BrowserThread::IO, | 493 BrowserThread::PostTask(BrowserThread::IO, |
440 FROM_HERE, | 494 FROM_HERE, |
441 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, | 495 base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread, |
442 controller_, buffer_id_to_drop)); | 496 controller_, buffer_id_to_drop)); |
| 497 rotated_buffers_.erase(buffer_id_to_drop); |
443 } | 498 } |
444 return frame; | 499 |
| 500 // If a 90/270 rotation is required, letterboxing will be required. If the |
| 501 // returned frame has not been rotated before, then the letterbox borders will |
| 502 // not yet have been cleared and we should clear them now. |
| 503 if ((rotation % 180) == 0) { |
| 504 rotated_buffers_.erase(buffer_id); |
| 505 } else { |
| 506 if (rotated_buffers_.insert(buffer_id).second) |
| 507 memset(output_buffer->data(), 0, output_buffer->size()); |
| 508 } |
| 509 |
| 510 return output_buffer; |
445 } | 511 } |
446 | 512 |
447 VideoCaptureController::~VideoCaptureController() { | 513 VideoCaptureController::~VideoCaptureController() { |
448 STLDeleteContainerPointers(controller_clients_.begin(), | 514 STLDeleteContainerPointers(controller_clients_.begin(), |
449 controller_clients_.end()); | 515 controller_clients_.end()); |
450 } | 516 } |
451 | 517 |
452 void VideoCaptureController::DoIncomingCapturedFrameOnIOThread( | 518 void VideoCaptureController::DoIncomingCapturedI420BufferOnIOThread( |
453 const scoped_refptr<media::VideoFrame>& reserved_frame, | 519 scoped_refptr<media::VideoCaptureDevice::Client::Buffer> buffer, |
| 520 const gfx::Size& dimensions, |
454 int frame_rate, | 521 int frame_rate, |
455 base::Time timestamp) { | 522 base::Time timestamp) { |
456 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 523 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
457 | 524 DCHECK_NE(buffer->id(), VideoCaptureBufferPool::kInvalidId); |
458 int buffer_id = buffer_pool_->RecognizeReservedBuffer( | |
459 reserved_frame->shared_memory_handle()); | |
460 if (buffer_id < 0) { | |
461 NOTREACHED(); | |
462 return; | |
463 } | |
464 | 525 |
465 media::VideoCaptureFormat frame_format( | 526 media::VideoCaptureFormat frame_format( |
466 reserved_frame->coded_size().width(), | 527 dimensions.width(), |
467 reserved_frame->coded_size().height(), | 528 dimensions.height(), |
468 frame_rate, | 529 frame_rate, |
469 media::VariableResolutionVideoCaptureDevice); | 530 media::VariableResolutionVideoCaptureDevice); |
470 | 531 |
471 int count = 0; | 532 int count = 0; |
472 if (state_ == VIDEO_CAPTURE_STATE_STARTED) { | 533 if (state_ == VIDEO_CAPTURE_STATE_STARTED) { |
473 for (ControllerClients::iterator client_it = controller_clients_.begin(); | 534 for (ControllerClients::iterator client_it = controller_clients_.begin(); |
474 client_it != controller_clients_.end(); ++client_it) { | 535 client_it != controller_clients_.end(); ++client_it) { |
475 ControllerClient* client = *client_it; | 536 ControllerClient* client = *client_it; |
476 if (client->session_closed) | 537 if (client->session_closed) |
477 continue; | 538 continue; |
478 | 539 |
479 bool is_new_buffer = client->known_buffers.insert(buffer_id).second; | 540 bool is_new_buffer = client->known_buffers.insert(buffer->id()).second; |
480 if (is_new_buffer) { | 541 if (is_new_buffer) { |
481 // On the first use of a buffer on a client, share the memory handle. | 542 // On the first use of a buffer on a client, share the memory handle. |
482 size_t memory_size = 0; | 543 size_t memory_size = 0; |
483 base::SharedMemoryHandle remote_handle = buffer_pool_->ShareToProcess( | 544 base::SharedMemoryHandle remote_handle = buffer_pool_->ShareToProcess( |
484 buffer_id, client->render_process_handle, &memory_size); | 545 buffer->id(), client->render_process_handle, &memory_size); |
485 client->event_handler->OnBufferCreated(client->controller_id, | 546 client->event_handler->OnBufferCreated( |
486 remote_handle, | 547 client->controller_id, remote_handle, memory_size, buffer->id()); |
487 memory_size, | |
488 buffer_id); | |
489 } | 548 } |
490 | 549 |
491 client->event_handler->OnBufferReady(client->controller_id, | 550 client->event_handler->OnBufferReady( |
492 buffer_id, timestamp, | 551 client->controller_id, buffer->id(), timestamp, frame_format); |
493 frame_format); | 552 bool inserted = client->active_buffers.insert(buffer->id()).second; |
494 bool inserted = client->active_buffers.insert(buffer_id).second; | 553 DCHECK(inserted) << "Unexpected duplicate buffer: " << buffer->id(); |
495 DCHECK(inserted) << "Unexpected duplicate buffer: " << buffer_id; | |
496 count++; | 554 count++; |
497 } | 555 } |
498 } | 556 } |
499 | 557 |
500 buffer_pool_->HoldForConsumers(buffer_id, count); | 558 buffer_pool_->HoldForConsumers(buffer->id(), count); |
501 } | 559 } |
502 | 560 |
503 void VideoCaptureController::DoErrorOnIOThread() { | 561 void VideoCaptureController::DoErrorOnIOThread() { |
504 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 562 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
505 state_ = VIDEO_CAPTURE_STATE_ERROR; | 563 state_ = VIDEO_CAPTURE_STATE_ERROR; |
506 | 564 |
507 for (ControllerClients::iterator client_it = controller_clients_.begin(); | 565 for (ControllerClients::iterator client_it = controller_clients_.begin(); |
508 client_it != controller_clients_.end(); ++client_it) { | 566 client_it != controller_clients_.end(); ++client_it) { |
509 ControllerClient* client = *client_it; | 567 ControllerClient* client = *client_it; |
510 if (client->session_closed) | 568 if (client->session_closed) |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
558 } | 616 } |
559 return NULL; | 617 return NULL; |
560 } | 618 } |
561 | 619 |
562 int VideoCaptureController::GetClientCount() { | 620 int VideoCaptureController::GetClientCount() { |
563 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); | 621 DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
564 return controller_clients_.size(); | 622 return controller_clients_.size(); |
565 } | 623 } |
566 | 624 |
567 } // namespace content | 625 } // namespace content |
OLD | NEW |