OLD | NEW |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/browser/media/capture/desktop_capture_device.h" | 5 #include "content/browser/media/capture/desktop_capture_device.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/location.h" | 8 #include "base/location.h" |
9 #include "base/logging.h" | 9 #include "base/logging.h" |
10 #include "base/metrics/histogram.h" | 10 #include "base/metrics/histogram.h" |
11 #include "base/strings/string_number_conversions.h" | 11 #include "base/strings/string_number_conversions.h" |
12 #include "base/synchronization/lock.h" | 12 #include "base/synchronization/lock.h" |
13 #include "base/threading/thread.h" | 13 #include "base/threading/thread.h" |
14 #include "base/timer/timer.h" | 14 #include "base/timer/timer.h" |
| 15 #include "content/browser/media/capture/capture_resolution_chooser.h" |
15 #include "content/browser/media/capture/desktop_capture_device_uma_types.h" | 16 #include "content/browser/media/capture/desktop_capture_device_uma_types.h" |
16 #include "content/public/browser/browser_thread.h" | 17 #include "content/public/browser/browser_thread.h" |
17 #include "content/public/browser/desktop_media_id.h" | 18 #include "content/public/browser/desktop_media_id.h" |
18 #include "content/public/browser/power_save_blocker.h" | 19 #include "content/public/browser/power_save_blocker.h" |
19 #include "media/base/video_util.h" | 20 #include "media/base/video_util.h" |
20 #include "third_party/libyuv/include/libyuv/scale_argb.h" | 21 #include "third_party/libyuv/include/libyuv/scale_argb.h" |
21 #include "third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h" | 22 #include "third_party/webrtc/modules/desktop_capture/cropping_window_capturer.h" |
22 #include "third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer
.h" | 23 #include "third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer
.h" |
23 #include "third_party/webrtc/modules/desktop_capture/desktop_capture_options.h" | 24 #include "third_party/webrtc/modules/desktop_capture/desktop_capture_options.h" |
24 #include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h" | 25 #include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h" |
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
65 scoped_ptr<Client> client); | 66 scoped_ptr<Client> client); |
66 | 67 |
67 void SetNotificationWindowId(gfx::NativeViewId window_id); | 68 void SetNotificationWindowId(gfx::NativeViewId window_id); |
68 | 69 |
69 private: | 70 private: |
70 | 71 |
71 // webrtc::DesktopCapturer::Callback interface | 72 // webrtc::DesktopCapturer::Callback interface |
72 webrtc::SharedMemory* CreateSharedMemory(size_t size) override; | 73 webrtc::SharedMemory* CreateSharedMemory(size_t size) override; |
73 void OnCaptureCompleted(webrtc::DesktopFrame* frame) override; | 74 void OnCaptureCompleted(webrtc::DesktopFrame* frame) override; |
74 | 75 |
75 // Chooses new output properties based on the supplied source size and the | |
76 // properties requested to Allocate(), and dispatches OnFrameInfo[Changed] | |
77 // notifications. | |
78 void RefreshCaptureFormat(const webrtc::DesktopSize& frame_size); | |
79 | |
80 // Method that is scheduled on |task_runner_| to be called on regular interval | 76 // Method that is scheduled on |task_runner_| to be called on regular interval |
81 // to capture a frame. | 77 // to capture a frame. |
82 void OnCaptureTimer(); | 78 void OnCaptureTimer(); |
83 | 79 |
84 // Captures a frame and schedules timer for the next one. | 80 // Captures a frame and schedules timer for the next one. |
85 void CaptureFrameAndScheduleNext(); | 81 void CaptureFrameAndScheduleNext(); |
86 | 82 |
87 // Captures a single frame. | 83 // Captures a single frame. |
88 void DoCapture(); | 84 void DoCapture(); |
89 | 85 |
90 // Task runner used for capturing operations. | 86 // Task runner used for capturing operations. |
91 scoped_refptr<base::SingleThreadTaskRunner> task_runner_; | 87 scoped_refptr<base::SingleThreadTaskRunner> task_runner_; |
92 | 88 |
93 // The underlying DesktopCapturer instance used to capture frames. | 89 // The underlying DesktopCapturer instance used to capture frames. |
94 scoped_ptr<webrtc::DesktopCapturer> desktop_capturer_; | 90 scoped_ptr<webrtc::DesktopCapturer> desktop_capturer_; |
95 | 91 |
96 // The device client which proxies device events to the controller. Accessed | 92 // The device client which proxies device events to the controller. Accessed |
97 // on the task_runner_ thread. | 93 // on the task_runner_ thread. |
98 scoped_ptr<Client> client_; | 94 scoped_ptr<Client> client_; |
99 | 95 |
100 // Requested video capture format (width, height, frame rate, etc). | 96 // Requested video capture frame rate. |
101 media::VideoCaptureParams requested_params_; | 97 float requested_frame_rate_; |
102 | |
103 // Actual video capture format being generated. | |
104 media::VideoCaptureFormat capture_format_; | |
105 | 98 |
106 // Size of frame most recently captured from the source. | 99 // Size of frame most recently captured from the source. |
107 webrtc::DesktopSize previous_frame_size_; | 100 webrtc::DesktopSize previous_frame_size_; |
108 | 101 |
| 102 // Determines the size of frames to deliver to the |client_|. |
| 103 scoped_ptr<CaptureResolutionChooser> resolution_chooser_; |
| 104 |
109 // DesktopFrame into which captured frames are down-scaled and/or letterboxed, | 105 // DesktopFrame into which captured frames are down-scaled and/or letterboxed, |
110 // depending upon the caller's requested capture capabilities. If frames can | 106 // depending upon the caller's requested capture capabilities. If frames can |
111 // be returned to the caller directly then this is NULL. | 107 // be returned to the caller directly then this is NULL. |
112 scoped_ptr<webrtc::DesktopFrame> output_frame_; | 108 scoped_ptr<webrtc::DesktopFrame> output_frame_; |
113 | 109 |
114 // Sub-rectangle of |output_frame_| into which the source will be scaled | |
115 // and/or letterboxed. | |
116 webrtc::DesktopRect output_rect_; | |
117 | |
118 // Timer used to capture the frame. | 110 // Timer used to capture the frame. |
119 base::OneShotTimer<Core> capture_timer_; | 111 base::OneShotTimer<Core> capture_timer_; |
120 | 112 |
121 // True when waiting for |desktop_capturer_| to capture current frame. | 113 // True when waiting for |desktop_capturer_| to capture current frame. |
122 bool capture_in_progress_; | 114 bool capture_in_progress_; |
123 | 115 |
124 // True if the first capture call has returned. Used to log the first capture | 116 // True if the first capture call has returned. Used to log the first capture |
125 // result. | 117 // result. |
126 bool first_capture_returned_; | 118 bool first_capture_returned_; |
127 | 119 |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
160 const media::VideoCaptureParams& params, | 152 const media::VideoCaptureParams& params, |
161 scoped_ptr<Client> client) { | 153 scoped_ptr<Client> client) { |
162 DCHECK(task_runner_->BelongsToCurrentThread()); | 154 DCHECK(task_runner_->BelongsToCurrentThread()); |
163 DCHECK_GT(params.requested_format.frame_size.GetArea(), 0); | 155 DCHECK_GT(params.requested_format.frame_size.GetArea(), 0); |
164 DCHECK_GT(params.requested_format.frame_rate, 0); | 156 DCHECK_GT(params.requested_format.frame_rate, 0); |
165 DCHECK(desktop_capturer_); | 157 DCHECK(desktop_capturer_); |
166 DCHECK(client.get()); | 158 DCHECK(client.get()); |
167 DCHECK(!client_.get()); | 159 DCHECK(!client_.get()); |
168 | 160 |
169 client_ = client.Pass(); | 161 client_ = client.Pass(); |
170 requested_params_ = params; | 162 requested_frame_rate_ = params.requested_format.frame_rate; |
171 | 163 resolution_chooser_.reset(new CaptureResolutionChooser( |
172 capture_format_ = requested_params_.requested_format; | 164 params.requested_format.frame_size, |
173 | 165 params.resolution_change_policy)); |
174 // This capturer always outputs ARGB, non-interlaced. | |
175 capture_format_.pixel_format = media::PIXEL_FORMAT_ARGB; | |
176 | 166 |
177 power_save_blocker_.reset( | 167 power_save_blocker_.reset( |
178 PowerSaveBlocker::Create( | 168 PowerSaveBlocker::Create( |
179 PowerSaveBlocker::kPowerSaveBlockPreventDisplaySleep, | 169 PowerSaveBlocker::kPowerSaveBlockPreventDisplaySleep, |
180 PowerSaveBlocker::kReasonOther, | 170 PowerSaveBlocker::kReasonOther, |
181 "DesktopCaptureDevice is running").release()); | 171 "DesktopCaptureDevice is running").release()); |
182 | 172 |
183 desktop_capturer_->Start(this); | 173 desktop_capturer_->Start(this); |
184 | 174 |
185 CaptureFrameAndScheduleNext(); | 175 CaptureFrameAndScheduleNext(); |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
231 // The two UMA_ blocks must be put in its own scope since it creates a static | 221 // The two UMA_ blocks must be put in its own scope since it creates a static |
232 // variable which expected constant histogram name. | 222 // variable which expected constant histogram name. |
233 if (capturer_type_ == DesktopMediaID::TYPE_SCREEN) { | 223 if (capturer_type_ == DesktopMediaID::TYPE_SCREEN) { |
234 UMA_HISTOGRAM_TIMES(kUmaScreenCaptureTime, capture_time); | 224 UMA_HISTOGRAM_TIMES(kUmaScreenCaptureTime, capture_time); |
235 } else { | 225 } else { |
236 UMA_HISTOGRAM_TIMES(kUmaWindowCaptureTime, capture_time); | 226 UMA_HISTOGRAM_TIMES(kUmaWindowCaptureTime, capture_time); |
237 } | 227 } |
238 | 228 |
239 scoped_ptr<webrtc::DesktopFrame> owned_frame(frame); | 229 scoped_ptr<webrtc::DesktopFrame> owned_frame(frame); |
240 | 230 |
| 231 // If the frame size has changed, drop the output frame (if any), and |
| 232 // determine the new output size. |
| 233 if (!previous_frame_size_.equals(frame->size())) { |
| 234 output_frame_.reset(); |
| 235 resolution_chooser_->SetSourceSize(gfx::Size(frame->size().width(), |
| 236 frame->size().height())); |
| 237 previous_frame_size_ = frame->size(); |
| 238 } |
| 239 // Align to 2x2 pixel boundaries, as required by OnIncomingCapturedData() so |
| 240 // it can convert the frame to I420 format. |
| 241 const webrtc::DesktopSize output_size( |
| 242 resolution_chooser_->capture_size().width() & ~1, |
| 243 resolution_chooser_->capture_size().height() & ~1); |
| 244 if (output_size.is_empty()) |
| 245 return; |
| 246 |
241 // On OSX We receive a 1x1 frame when the shared window is minimized. It | 247 // On OSX We receive a 1x1 frame when the shared window is minimized. It |
242 // cannot be subsampled to I420 and will be dropped downstream. So we replace | 248 // cannot be subsampled to I420 and will be dropped downstream. So we replace |
243 // it with a black frame to avoid the video appearing frozen at the last | 249 // it with a black frame to avoid the video appearing frozen at the last |
244 // frame. | 250 // frame. |
245 if (frame->size().width() == 1 || frame->size().height() == 1) { | 251 if (frame->size().width() == 1 || frame->size().height() == 1) { |
246 if (!black_frame_.get()) { | 252 if (!black_frame_.get()) { |
247 black_frame_.reset( | 253 black_frame_.reset(new webrtc::BasicDesktopFrame(output_size)); |
248 new webrtc::BasicDesktopFrame( | |
249 webrtc::DesktopSize(capture_format_.frame_size.width(), | |
250 capture_format_.frame_size.height()))); | |
251 memset(black_frame_->data(), | 254 memset(black_frame_->data(), |
252 0, | 255 0, |
253 black_frame_->stride() * black_frame_->size().height()); | 256 black_frame_->stride() * black_frame_->size().height()); |
254 } | 257 } |
255 owned_frame.reset(); | 258 owned_frame.reset(); |
256 frame = black_frame_.get(); | 259 frame = black_frame_.get(); |
257 } | 260 } |
258 | 261 |
259 // Handle initial frame size and size changes. | |
260 RefreshCaptureFormat(frame->size()); | |
261 | |
262 webrtc::DesktopSize output_size(capture_format_.frame_size.width(), | |
263 capture_format_.frame_size.height()); | |
264 size_t output_bytes = output_size.width() * output_size.height() * | 262 size_t output_bytes = output_size.width() * output_size.height() * |
265 webrtc::DesktopFrame::kBytesPerPixel; | 263 webrtc::DesktopFrame::kBytesPerPixel; |
266 const uint8_t* output_data = NULL; | 264 const uint8_t* output_data = NULL; |
267 | 265 |
268 if (!frame->size().equals(output_size)) { | 266 if (!frame->size().equals(output_size)) { |
269 // Down-scale and/or letterbox to the target format if the frame does not | 267 // Down-scale and/or letterbox to the target format if the frame does not |
270 // match the output size. | 268 // match the output size. |
271 | 269 |
272 // Allocate a buffer of the correct size to scale the frame into. | 270 // Allocate a buffer of the correct size to scale the frame into. |
273 // |output_frame_| is cleared whenever |output_rect_| changes, so we don't | 271 // |output_frame_| is cleared whenever the output size changes, so we don't |
274 // need to worry about clearing out stale pixel data in letterboxed areas. | 272 // need to worry about clearing out stale pixel data in letterboxed areas. |
275 if (!output_frame_) { | 273 if (!output_frame_) { |
276 output_frame_.reset(new webrtc::BasicDesktopFrame(output_size)); | 274 output_frame_.reset(new webrtc::BasicDesktopFrame(output_size)); |
277 memset(output_frame_->data(), 0, output_bytes); | 275 memset(output_frame_->data(), 0, output_bytes); |
278 } | 276 } |
279 DCHECK(output_frame_->size().equals(output_size)); | 277 DCHECK(output_frame_->size().equals(output_size)); |
280 | 278 |
281 // TODO(wez): Optimize this to scale only changed portions of the output, | 279 // TODO(wez): Optimize this to scale only changed portions of the output, |
282 // using ARGBScaleClip(). | 280 // using ARGBScaleClip(). |
| 281 const webrtc::DesktopRect output_rect = |
| 282 ComputeLetterboxRect(output_size, frame->size()); |
283 uint8_t* output_rect_data = output_frame_->data() + | 283 uint8_t* output_rect_data = output_frame_->data() + |
284 output_frame_->stride() * output_rect_.top() + | 284 output_frame_->stride() * output_rect.top() + |
285 webrtc::DesktopFrame::kBytesPerPixel * output_rect_.left(); | 285 webrtc::DesktopFrame::kBytesPerPixel * output_rect.left(); |
286 libyuv::ARGBScale(frame->data(), frame->stride(), | 286 libyuv::ARGBScale(frame->data(), frame->stride(), |
287 frame->size().width(), frame->size().height(), | 287 frame->size().width(), frame->size().height(), |
288 output_rect_data, output_frame_->stride(), | 288 output_rect_data, output_frame_->stride(), |
289 output_rect_.width(), output_rect_.height(), | 289 output_rect.width(), output_rect.height(), |
290 libyuv::kFilterBilinear); | 290 libyuv::kFilterBilinear); |
291 output_data = output_frame_->data(); | 291 output_data = output_frame_->data(); |
292 } else if (IsFrameUnpackedOrInverted(frame)) { | 292 } else if (IsFrameUnpackedOrInverted(frame)) { |
293 // If |frame| is not packed top-to-bottom then create a packed top-to-bottom | 293 // If |frame| is not packed top-to-bottom then create a packed top-to-bottom |
294 // copy. | 294 // copy. |
295 // This is required if the frame is inverted (see crbug.com/306876), or if | 295 // This is required if the frame is inverted (see crbug.com/306876), or if |
296 // |frame| is cropped form a larger frame (see crbug.com/437740). | 296 // |frame| is cropped form a larger frame (see crbug.com/437740). |
297 if (!output_frame_) { | 297 if (!output_frame_) { |
298 output_frame_.reset(new webrtc::BasicDesktopFrame(output_size)); | 298 output_frame_.reset(new webrtc::BasicDesktopFrame(output_size)); |
299 memset(output_frame_->data(), 0, output_bytes); | 299 memset(output_frame_->data(), 0, output_bytes); |
300 } | 300 } |
301 | 301 |
302 output_frame_->CopyPixelsFrom( | 302 output_frame_->CopyPixelsFrom( |
303 *frame, | 303 *frame, |
304 webrtc::DesktopVector(), | 304 webrtc::DesktopVector(), |
305 webrtc::DesktopRect::MakeSize(frame->size())); | 305 webrtc::DesktopRect::MakeSize(frame->size())); |
306 output_data = output_frame_->data(); | 306 output_data = output_frame_->data(); |
307 } else { | 307 } else { |
308 // If the captured frame matches the output size, we can return the pixel | 308 // If the captured frame matches the output size, we can return the pixel |
309 // data directly. | 309 // data directly. |
310 output_data = frame->data(); | 310 output_data = frame->data(); |
311 } | 311 } |
312 | 312 |
313 client_->OnIncomingCapturedData( | 313 client_->OnIncomingCapturedData( |
314 output_data, output_bytes, capture_format_, 0, base::TimeTicks::Now()); | 314 output_data, |
315 } | 315 output_bytes, |
316 | 316 media::VideoCaptureFormat(gfx::Size(output_size.width(), |
317 void DesktopCaptureDevice::Core::RefreshCaptureFormat( | 317 output_size.height()), |
318 const webrtc::DesktopSize& frame_size) { | 318 requested_frame_rate_, |
319 if (previous_frame_size_.equals(frame_size)) | 319 media::PIXEL_FORMAT_ARGB), |
320 return; | 320 0, |
321 | 321 base::TimeTicks::Now()); |
322 // Clear the output frame, if any, since it will either need resizing, or | |
323 // clearing of stale data in letterbox areas, anyway. | |
324 output_frame_.reset(); | |
325 | |
326 if (previous_frame_size_.is_empty() || | |
327 requested_params_.resolution_change_policy == | |
328 media::RESOLUTION_POLICY_ANY_WITHIN_LIMIT) { | |
329 // If this is the first frame, or the receiver supports variable resolution | |
330 // then determine the output size by treating the requested width & height | |
331 // as maxima. | |
332 if (frame_size.width() > | |
333 requested_params_.requested_format.frame_size.width() || | |
334 frame_size.height() > | |
335 requested_params_.requested_format.frame_size.height()) { | |
336 output_rect_ = ComputeLetterboxRect( | |
337 webrtc::DesktopSize( | |
338 requested_params_.requested_format.frame_size.width(), | |
339 requested_params_.requested_format.frame_size.height()), | |
340 frame_size); | |
341 output_rect_.Translate(-output_rect_.left(), -output_rect_.top()); | |
342 } else { | |
343 output_rect_ = webrtc::DesktopRect::MakeSize(frame_size); | |
344 } | |
345 capture_format_.frame_size.SetSize(output_rect_.width(), | |
346 output_rect_.height()); | |
347 } else { | |
348 // Otherwise the output frame size cannot change, so just scale and | |
349 // letterbox. | |
350 output_rect_ = ComputeLetterboxRect( | |
351 webrtc::DesktopSize(capture_format_.frame_size.width(), | |
352 capture_format_.frame_size.height()), | |
353 frame_size); | |
354 } | |
355 | |
356 previous_frame_size_ = frame_size; | |
357 } | 322 } |
358 | 323 |
359 void DesktopCaptureDevice::Core::OnCaptureTimer() { | 324 void DesktopCaptureDevice::Core::OnCaptureTimer() { |
360 DCHECK(task_runner_->BelongsToCurrentThread()); | 325 DCHECK(task_runner_->BelongsToCurrentThread()); |
361 | 326 |
362 if (!client_) | 327 if (!client_) |
363 return; | 328 return; |
364 | 329 |
365 CaptureFrameAndScheduleNext(); | 330 CaptureFrameAndScheduleNext(); |
366 } | 331 } |
367 | 332 |
368 void DesktopCaptureDevice::Core::CaptureFrameAndScheduleNext() { | 333 void DesktopCaptureDevice::Core::CaptureFrameAndScheduleNext() { |
369 DCHECK(task_runner_->BelongsToCurrentThread()); | 334 DCHECK(task_runner_->BelongsToCurrentThread()); |
370 | 335 |
371 base::TimeTicks started_time = base::TimeTicks::Now(); | 336 base::TimeTicks started_time = base::TimeTicks::Now(); |
372 DoCapture(); | 337 DoCapture(); |
373 base::TimeDelta last_capture_duration = base::TimeTicks::Now() - started_time; | 338 base::TimeDelta last_capture_duration = base::TimeTicks::Now() - started_time; |
374 | 339 |
375 // Limit frame-rate to reduce CPU consumption. | 340 // Limit frame-rate to reduce CPU consumption. |
376 base::TimeDelta capture_period = std::max( | 341 base::TimeDelta capture_period = std::max( |
377 (last_capture_duration * 100) / kMaximumCpuConsumptionPercentage, | 342 (last_capture_duration * 100) / kMaximumCpuConsumptionPercentage, |
378 base::TimeDelta::FromSeconds(1) / capture_format_.frame_rate); | 343 base::TimeDelta::FromMicroseconds(static_cast<int64>( |
| 344 1000000.0 / requested_frame_rate_ + 0.5 /* round to nearest int */))); |
379 | 345 |
380 // Schedule a task for the next frame. | 346 // Schedule a task for the next frame. |
381 capture_timer_.Start(FROM_HERE, capture_period - last_capture_duration, | 347 capture_timer_.Start(FROM_HERE, capture_period - last_capture_duration, |
382 this, &Core::OnCaptureTimer); | 348 this, &Core::OnCaptureTimer); |
383 } | 349 } |
384 | 350 |
385 void DesktopCaptureDevice::Core::DoCapture() { | 351 void DesktopCaptureDevice::Core::DoCapture() { |
386 DCHECK(task_runner_->BelongsToCurrentThread()); | 352 DCHECK(task_runner_->BelongsToCurrentThread()); |
387 DCHECK(!capture_in_progress_); | 353 DCHECK(!capture_in_progress_); |
388 | 354 |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
488 #else | 454 #else |
489 base::MessageLoop::Type thread_type = base::MessageLoop::TYPE_DEFAULT; | 455 base::MessageLoop::Type thread_type = base::MessageLoop::TYPE_DEFAULT; |
490 #endif | 456 #endif |
491 | 457 |
492 thread_.StartWithOptions(base::Thread::Options(thread_type, 0)); | 458 thread_.StartWithOptions(base::Thread::Options(thread_type, 0)); |
493 | 459 |
494 core_.reset(new Core(thread_.task_runner(), capturer.Pass(), type)); | 460 core_.reset(new Core(thread_.task_runner(), capturer.Pass(), type)); |
495 } | 461 } |
496 | 462 |
497 } // namespace content | 463 } // namespace content |
OLD | NEW |