OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/protocol/webrtc_video_capturer_adapter.h" | 5 #include "remoting/protocol/webrtc_video_capturer_adapter.h" |
6 | 6 |
7 #include <utility> | 7 #include <utility> |
8 | 8 |
9 #include "third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h" | |
10 #include "third_party/libyuv/include/libyuv/convert.h" | |
9 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" | 11 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" |
10 | 12 |
11 namespace remoting { | 13 namespace remoting { |
14 namespace protocol { | |
12 | 15 |
13 // Number of frames to be captured per second. | 16 // Number of frames to be captured per second. |
14 const int kFramesPerSec = 30; | 17 const int kFramesPerSec = 30; |
15 | 18 |
16 WebrtcVideoCapturerAdapter::WebrtcVideoCapturerAdapter( | 19 WebrtcVideoCapturerAdapter::WebrtcVideoCapturerAdapter( |
17 scoped_ptr<webrtc::DesktopCapturer> capturer) | 20 scoped_ptr<webrtc::DesktopCapturer> capturer) |
18 : desktop_capturer_(std::move(capturer)) { | 21 : desktop_capturer_(std::move(capturer)) { |
19 DCHECK(desktop_capturer_); | 22 DCHECK(desktop_capturer_); |
20 | 23 |
21 thread_checker_.DetachFromThread(); | |
22 | |
23 // Disable video adaptation since we don't intend to use it. | 24 // Disable video adaptation since we don't intend to use it. |
24 set_enable_video_adapter(false); | 25 set_enable_video_adapter(false); |
25 } | 26 } |
26 | 27 |
27 WebrtcVideoCapturerAdapter::~WebrtcVideoCapturerAdapter() { | 28 WebrtcVideoCapturerAdapter::~WebrtcVideoCapturerAdapter() { |
28 DCHECK(!capture_timer_); | 29 DCHECK(!capture_timer_); |
29 } | 30 } |
30 | 31 |
31 webrtc::SharedMemory* WebrtcVideoCapturerAdapter::CreateSharedMemory( | |
32 size_t size) { | |
33 return nullptr; | |
34 } | |
35 | |
36 void WebrtcVideoCapturerAdapter::OnCaptureCompleted( | |
37 webrtc::DesktopFrame* frame) { | |
38 scoped_ptr<webrtc::DesktopFrame> owned_frame(frame); | |
39 | |
40 // Drop the owned_frame if there were no changes. | |
41 if (!owned_frame || owned_frame->updated_region().is_empty()) { | |
42 owned_frame.reset(); | |
43 return; | |
44 } | |
45 | |
46 // Convert the webrtc::DesktopFrame to a cricket::CapturedFrame. | |
47 cricket::CapturedFrame captured_frame; | |
48 captured_frame.width = owned_frame->size().width(); | |
49 captured_frame.height = owned_frame->size().height(); | |
50 base::TimeTicks current_time = base::TimeTicks::Now(); | |
51 captured_frame.time_stamp = | |
52 current_time.ToInternalValue() * base::Time::kNanosecondsPerMicrosecond; | |
53 captured_frame.data = owned_frame->data(); | |
54 | |
55 // The data_size attribute must be set. If multiple formats are supported, | |
56 // this should be set appropriately for each one. | |
57 captured_frame.data_size = | |
58 (captured_frame.width * webrtc::DesktopFrame::kBytesPerPixel * 8 + 7) / | |
59 8 * captured_frame.height; | |
60 captured_frame.fourcc = cricket::FOURCC_ARGB; | |
61 | |
62 SignalFrameCaptured(this, &captured_frame); | |
63 } | |
64 | |
65 bool WebrtcVideoCapturerAdapter::GetBestCaptureFormat( | 32 bool WebrtcVideoCapturerAdapter::GetBestCaptureFormat( |
66 const cricket::VideoFormat& desired, | 33 const cricket::VideoFormat& desired, |
67 cricket::VideoFormat* best_format) { | 34 cricket::VideoFormat* best_format) { |
68 DCHECK(thread_checker_.CalledOnValidThread()); | 35 DCHECK(thread_checker_.CalledOnValidThread()); |
69 | 36 |
70 // For now, just used the desired width and height. | 37 // The |capture_format| passed to Start() is always ignored, so copy |
71 best_format->width = desired.width; | 38 // |best_format| to |desired_format|. |
72 best_format->height = desired.height; | 39 *best_format = desired; |
73 best_format->fourcc = cricket::FOURCC_ARGB; | |
74 best_format->interval = FPS_TO_INTERVAL(kFramesPerSec); | |
75 return true; | 40 return true; |
76 } | 41 } |
77 | 42 |
78 cricket::CaptureState WebrtcVideoCapturerAdapter::Start( | 43 cricket::CaptureState WebrtcVideoCapturerAdapter::Start( |
79 const cricket::VideoFormat& capture_format) { | 44 const cricket::VideoFormat& capture_format) { |
80 DCHECK(thread_checker_.CalledOnValidThread()); | 45 DCHECK(thread_checker_.CalledOnValidThread()); |
81 DCHECK(!capture_timer_); | 46 DCHECK(!capture_timer_); |
82 DCHECK_EQ(capture_format.fourcc, | |
83 (static_cast<uint32_t>(cricket::FOURCC_ARGB))); | |
84 | 47 |
85 if (!desktop_capturer_) { | 48 if (!desktop_capturer_) { |
86 VLOG(1) << "WebrtcVideoCapturerAdapter failed to start."; | 49 VLOG(1) << "WebrtcVideoCapturerAdapter failed to start."; |
87 return cricket::CS_FAILED; | 50 return cricket::CS_FAILED; |
88 } | 51 } |
89 | 52 |
90 // This is required to tell the cricket::VideoCapturer base class what the | |
91 // capture format will be. | |
92 SetCaptureFormat(&capture_format); | |
93 | |
94 desktop_capturer_->Start(this); | 53 desktop_capturer_->Start(this); |
95 | 54 |
96 capture_timer_.reset(new base::RepeatingTimer()); | 55 capture_timer_.reset(new base::RepeatingTimer()); |
97 capture_timer_->Start(FROM_HERE, | 56 capture_timer_->Start(FROM_HERE, |
98 base::TimeDelta::FromMicroseconds( | 57 base::TimeDelta::FromSeconds(1) / kFramesPerSec, this, |
99 GetCaptureFormat()->interval / | |
100 (base::Time::kNanosecondsPerMicrosecond)), | |
101 this, | |
102 &WebrtcVideoCapturerAdapter::CaptureNextFrame); | 58 &WebrtcVideoCapturerAdapter::CaptureNextFrame); |
103 | 59 |
104 return cricket::CS_RUNNING; | 60 return cricket::CS_RUNNING; |
105 } | 61 } |
106 | 62 |
107 // Similar to the base class implementation with some important differences: | 63 // Similar to the base class implementation with some important differences: |
108 // 1. Does not call either Stop() or Start(), as those would affect the state of | 64 // 1. Does not call either Stop() or Start(), as those would affect the state of |
109 // |desktop_capturer_|. | 65 // |desktop_capturer_|. |
110 // 2. Does not support unpausing after stopping the capturer. It is unclear | 66 // 2. Does not support unpausing after stopping the capturer. It is unclear |
111 // if that flow needs to be supported. | 67 // if that flow needs to be supported. |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
163 desktop_capturer_.reset(); | 119 desktop_capturer_.reset(); |
164 | 120 |
165 SetCaptureFormat(nullptr); | 121 SetCaptureFormat(nullptr); |
166 SetCaptureState(cricket::CS_STOPPED); | 122 SetCaptureState(cricket::CS_STOPPED); |
167 | 123 |
168 VLOG(1) << "WebrtcVideoCapturerAdapter stopped."; | 124 VLOG(1) << "WebrtcVideoCapturerAdapter stopped."; |
169 } | 125 } |
170 | 126 |
171 bool WebrtcVideoCapturerAdapter::IsRunning() { | 127 bool WebrtcVideoCapturerAdapter::IsRunning() { |
172 DCHECK(thread_checker_.CalledOnValidThread()); | 128 DCHECK(thread_checker_.CalledOnValidThread()); |
173 | |
174 return capture_timer_->IsRunning(); | 129 return capture_timer_->IsRunning(); |
175 } | 130 } |
176 | 131 |
177 bool WebrtcVideoCapturerAdapter::IsScreencast() const { | 132 bool WebrtcVideoCapturerAdapter::IsScreencast() const { |
178 return true; | 133 return true; |
179 } | 134 } |
180 | 135 |
181 bool WebrtcVideoCapturerAdapter::GetPreferredFourccs( | 136 bool WebrtcVideoCapturerAdapter::GetPreferredFourccs( |
182 std::vector<uint32_t>* fourccs) { | 137 std::vector<uint32_t>* fourccs) { |
138 return false; | |
139 } | |
140 | |
141 webrtc::SharedMemory* WebrtcVideoCapturerAdapter::CreateSharedMemory( | |
142 size_t size) { | |
143 return nullptr; | |
144 } | |
145 | |
146 void WebrtcVideoCapturerAdapter::OnCaptureCompleted( | |
147 webrtc::DesktopFrame* frame) { | |
183 DCHECK(thread_checker_.CalledOnValidThread()); | 148 DCHECK(thread_checker_.CalledOnValidThread()); |
184 if (!fourccs) | 149 |
185 return false; | 150 DCHECK(capture_pending_); |
186 fourccs->push_back(cricket::FOURCC_ARGB); | 151 capture_pending_ = false; |
187 return true; | 152 |
153 scoped_ptr<webrtc::DesktopFrame> owned_frame(frame); | |
154 | |
155 // Drop the frame if there were no changes. | |
156 if (!owned_frame || owned_frame->updated_region().is_empty()) | |
157 return; | |
158 | |
159 size_t width = frame->size().width(); | |
160 size_t height = frame->size().height(); | |
161 if (!yuv_frame_ || yuv_frame_->GetWidth() != width || | |
162 yuv_frame_->GetHeight() != height) { | |
163 scoped_ptr<cricket::WebRtcVideoFrame> webrtc_frame( | |
164 new cricket::WebRtcVideoFrame()); | |
165 webrtc_frame->InitToEmptyBuffer(width, height, 1, 1, 0); | |
166 yuv_frame_ = std::move(webrtc_frame); | |
167 | |
168 // Set updated_region so the whole frame is converted to YUV below. | |
169 frame->mutable_updated_region()->SetRect( | |
170 webrtc::DesktopRect::MakeWH(width, height)); | |
171 } | |
172 | |
173 // TODO(sergeyu): This will copy the buffer if it's being used. Optimize it by | |
174 // keeping a queue of frames. | |
175 CHECK(yuv_frame_->MakeExclusive()); | |
176 | |
177 yuv_frame_->SetTimeStamp(base::TimeTicks::Now().ToInternalValue() * | |
178 base::Time::kNanosecondsPerMicrosecond); | |
179 | |
180 for (webrtc::DesktopRegion::Iterator i(frame->updated_region()); !i.IsAtEnd(); | |
181 i.Advance()) { | |
182 int left = i.rect().left(); | |
Jamie
2016/01/08 23:33:52
Nit: Unnecessary extra space after =
Sergey Ulanov
2016/01/09 01:33:29
Done.
| |
183 int top = i.rect().top(); | |
184 int width = i.rect().width(); | |
185 int height = i.rect().height(); | |
186 | |
187 if (left % 2 == 1) { | |
188 --left; | |
189 ++width; | |
190 } | |
191 if (top % 2 == 1) { | |
192 --top; | |
193 ++height; | |
194 } | |
Jamie
2016/01/08 23:33:52
Is it only the top-left corner that must be on a 2
Sergey Ulanov
2016/01/09 01:33:29
ARGBToI420() supports odd width and height.
| |
195 libyuv::ARGBToI420( | |
196 frame->data() + frame->stride() * top + | |
197 left * webrtc::DesktopFrame::kBytesPerPixel, | |
198 frame->stride(), | |
199 yuv_frame_->GetYPlane() + yuv_frame_->GetYPitch() * top + left, | |
200 yuv_frame_->GetYPitch(), | |
201 yuv_frame_->GetUPlane() + yuv_frame_->GetUPitch() * top / 2 + | |
202 left / 2, | |
203 yuv_frame_->GetUPitch(), | |
204 yuv_frame_->GetVPlane() + yuv_frame_->GetVPitch() * top / 2 + | |
205 left / 2, | |
206 yuv_frame_->GetVPitch(), width, height); | |
207 } | |
208 | |
209 SignalVideoFrame(this, yuv_frame_.get()); | |
188 } | 210 } |
189 | 211 |
190 void WebrtcVideoCapturerAdapter::CaptureNextFrame() { | 212 void WebrtcVideoCapturerAdapter::CaptureNextFrame() { |
191 // If we are paused, then don't capture. | 213 DCHECK(thread_checker_.CalledOnValidThread()); |
192 if (!IsRunning()) | 214 |
215 if (capture_pending_) | |
193 return; | 216 return; |
194 | 217 capture_pending_ = true; |
195 desktop_capturer_->Capture(webrtc::DesktopRegion()); | 218 desktop_capturer_->Capture(webrtc::DesktopRegion()); |
196 } | 219 } |
197 | 220 |
221 } // namespace protocol | |
198 } // namespace remoting | 222 } // namespace remoting |
OLD | NEW |