OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/protocol/webrtc_video_stream.h" | 5 #include "remoting/protocol/webrtc_video_stream.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "base/single_thread_task_runner.h" | 8 #include "base/single_thread_task_runner.h" |
9 #include "base/task_runner_util.h" | 9 #include "base/task_runner_util.h" |
10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
11 #include "remoting/base/constants.h" | 11 #include "remoting/base/constants.h" |
12 #include "remoting/codec/webrtc_video_encoder_vpx.h" | 12 #include "remoting/codec/webrtc_video_encoder_vpx.h" |
13 #include "remoting/protocol/frame_stats.h" | 13 #include "remoting/protocol/frame_stats.h" |
14 #include "remoting/protocol/host_video_stats_dispatcher.h" | 14 #include "remoting/protocol/host_video_stats_dispatcher.h" |
15 #include "remoting/protocol/webrtc_dummy_video_capturer.h" | 15 #include "remoting/protocol/webrtc_dummy_video_capturer.h" |
| 16 #include "remoting/protocol/webrtc_frame_scheduler_simple.h" |
16 #include "remoting/protocol/webrtc_transport.h" | 17 #include "remoting/protocol/webrtc_transport.h" |
17 #include "third_party/webrtc/api/mediastreaminterface.h" | 18 #include "third_party/webrtc/api/mediastreaminterface.h" |
18 #include "third_party/webrtc/api/peerconnectioninterface.h" | 19 #include "third_party/webrtc/api/peerconnectioninterface.h" |
19 #include "third_party/webrtc/api/test/fakeconstraints.h" | 20 #include "third_party/webrtc/api/test/fakeconstraints.h" |
20 #include "third_party/webrtc/media/base/videocapturer.h" | 21 #include "third_party/webrtc/media/base/videocapturer.h" |
21 | 22 |
22 namespace remoting { | 23 namespace remoting { |
23 namespace protocol { | 24 namespace protocol { |
24 | 25 |
25 namespace { | 26 namespace { |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
127 // Register for target bitrate notifications. | 128 // Register for target bitrate notifications. |
128 webrtc_transport_->video_encoder_factory()->SetTargetBitrateCallback( | 129 webrtc_transport_->video_encoder_factory()->SetTargetBitrateCallback( |
129 base::Bind(&PostTaskOnTaskRunnerWithParam<int>, | 130 base::Bind(&PostTaskOnTaskRunnerWithParam<int>, |
130 base::ThreadTaskRunnerHandle::Get(), | 131 base::ThreadTaskRunnerHandle::Get(), |
131 base::Bind(&WebrtcVideoStream::SetTargetBitrate, | 132 base::Bind(&WebrtcVideoStream::SetTargetBitrate, |
132 weak_factory_.GetWeakPtr()))); | 133 weak_factory_.GetWeakPtr()))); |
133 | 134 |
134 video_stats_dispatcher_.Init(webrtc_transport_->CreateOutgoingChannel( | 135 video_stats_dispatcher_.Init(webrtc_transport_->CreateOutgoingChannel( |
135 video_stats_dispatcher_.channel_name()), | 136 video_stats_dispatcher_.channel_name()), |
136 this); | 137 this); |
| 138 |
| 139 scheduler_.reset(new WebrtcFrameSchedulerSimple()); |
| 140 |
137 return true; | 141 return true; |
138 } | 142 } |
139 | 143 |
140 void WebrtcVideoStream::Pause(bool pause) { | 144 void WebrtcVideoStream::Pause(bool pause) { |
141 DCHECK(thread_checker_.CalledOnValidThread()); | 145 DCHECK(thread_checker_.CalledOnValidThread()); |
142 if (pause) { | 146 scheduler_->Pause(pause); |
143 capture_timer_.Stop(); | |
144 } else { | |
145 if (received_first_frame_request_) { | |
146 StartCaptureTimer(); | |
147 } | |
148 } | |
149 } | 147 } |
150 | 148 |
151 void WebrtcVideoStream::OnInputEventReceived(int64_t event_timestamp) { | 149 void WebrtcVideoStream::OnInputEventReceived(int64_t event_timestamp) { |
152 DCHECK(thread_checker_.CalledOnValidThread()); | 150 DCHECK(thread_checker_.CalledOnValidThread()); |
153 | 151 |
154 if (!next_frame_timestamps_) | 152 if (!next_frame_timestamps_) |
155 next_frame_timestamps_.reset(new FrameTimestamps()); | 153 next_frame_timestamps_.reset(new FrameTimestamps()); |
156 next_frame_timestamps_->input_event_client_timestamp = event_timestamp; | 154 next_frame_timestamps_->input_event_client_timestamp = event_timestamp; |
157 next_frame_timestamps_->input_event_received_time = base::TimeTicks::Now(); | 155 next_frame_timestamps_->input_event_received_time = base::TimeTicks::Now(); |
158 } | 156 } |
159 | 157 |
160 void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) { | 158 void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) { |
161 NOTIMPLEMENTED(); | 159 NOTIMPLEMENTED(); |
162 } | 160 } |
163 | 161 |
164 void WebrtcVideoStream::SetLosslessColor(bool want_lossless) { | 162 void WebrtcVideoStream::SetLosslessColor(bool want_lossless) { |
165 NOTIMPLEMENTED(); | 163 NOTIMPLEMENTED(); |
166 } | 164 } |
167 | 165 |
168 void WebrtcVideoStream::SetObserver(Observer* observer) { | 166 void WebrtcVideoStream::SetObserver(Observer* observer) { |
169 DCHECK(thread_checker_.CalledOnValidThread()); | 167 DCHECK(thread_checker_.CalledOnValidThread()); |
170 observer_ = observer; | 168 observer_ = observer; |
171 } | 169 } |
172 | 170 |
173 void WebrtcVideoStream::SetKeyFrameRequest() { | 171 void WebrtcVideoStream::SetKeyFrameRequest() { |
174 DCHECK(thread_checker_.CalledOnValidThread()); | 172 DCHECK(thread_checker_.CalledOnValidThread()); |
175 | 173 |
176 key_frame_request_ = true; | 174 scheduler_->SetKeyFrameRequest(); |
| 175 |
| 176 // Create capture scheduler when the first key frame request is received. |
177 if (!received_first_frame_request_) { | 177 if (!received_first_frame_request_) { |
178 received_first_frame_request_ = true; | 178 received_first_frame_request_ = true; |
179 StartCaptureTimer(); | 179 scheduler_->Start(base::Bind(&WebrtcVideoStream::CaptureNextFrame, |
180 base::ThreadTaskRunnerHandle::Get()->PostTask( | 180 base::Unretained(this))); |
181 FROM_HERE, base::Bind(&WebrtcVideoStream::StartCaptureTimer, | |
182 weak_factory_.GetWeakPtr())); | |
183 } | 181 } |
184 } | 182 } |
185 | 183 |
186 void WebrtcVideoStream::StartCaptureTimer() { | |
187 DCHECK(thread_checker_.CalledOnValidThread()); | |
188 capture_timer_.Start(FROM_HERE, base::TimeDelta::FromSeconds(1) / 30, this, | |
189 &WebrtcVideoStream::CaptureNextFrame); | |
190 } | |
191 | |
192 void WebrtcVideoStream::SetTargetBitrate(int target_bitrate_kbps) { | 184 void WebrtcVideoStream::SetTargetBitrate(int target_bitrate_kbps) { |
193 DCHECK(thread_checker_.CalledOnValidThread()); | 185 DCHECK(thread_checker_.CalledOnValidThread()); |
194 | 186 |
195 VLOG(1) << "Set Target bitrate " << target_bitrate_kbps; | 187 VLOG(1) << "Set Target bitrate " << target_bitrate_kbps; |
196 target_bitrate_kbps_ = target_bitrate_kbps; | 188 scheduler_->SetTargetBitrate(target_bitrate_kbps); |
197 } | |
198 | |
199 bool WebrtcVideoStream::ClearAndGetKeyFrameRequest() { | |
200 DCHECK(thread_checker_.CalledOnValidThread()); | |
201 | |
202 bool key_frame_request = key_frame_request_; | |
203 key_frame_request_ = false; | |
204 return key_frame_request; | |
205 } | 189 } |
206 | 190 |
207 void WebrtcVideoStream::OnCaptureResult( | 191 void WebrtcVideoStream::OnCaptureResult( |
208 webrtc::DesktopCapturer::Result result, | 192 webrtc::DesktopCapturer::Result result, |
209 std::unique_ptr<webrtc::DesktopFrame> frame) { | 193 std::unique_ptr<webrtc::DesktopFrame> frame) { |
210 DCHECK(thread_checker_.CalledOnValidThread()); | 194 DCHECK(thread_checker_.CalledOnValidThread()); |
211 DCHECK(capture_pending_); | |
212 capture_pending_ = false; | |
213 | |
214 | |
215 if (encode_pending_) { | |
216 // TODO(isheriff): consider queuing here | |
217 VLOG(1) << "Dropping captured frame since encoder is still busy"; | |
218 return; | |
219 } | |
220 | 195 |
221 // TODO(sergeyu): Handle ERROR_PERMANENT result here. | 196 // TODO(sergeyu): Handle ERROR_PERMANENT result here. |
222 | 197 |
223 webrtc::DesktopVector dpi = | 198 webrtc::DesktopVector dpi = |
224 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi) | 199 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi) |
225 : frame->dpi(); | 200 : frame->dpi(); |
226 | 201 |
227 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) { | 202 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) { |
228 frame_size_ = frame->size(); | 203 frame_size_ = frame->size(); |
229 frame_dpi_ = dpi; | 204 frame_dpi_ = dpi; |
230 if (observer_) | 205 if (observer_) |
231 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_); | 206 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_); |
232 } | 207 } |
233 | 208 |
234 captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now(); | 209 captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now(); |
235 captured_frame_timestamps_->capture_delay = | 210 captured_frame_timestamps_->capture_delay = |
236 base::TimeDelta::FromMilliseconds(frame->capture_time_ms()); | 211 base::TimeDelta::FromMilliseconds(frame->capture_time_ms()); |
237 | 212 |
238 encode_pending_ = true; | |
239 | |
240 // TODO(sergeyu): Currently frame_duration is always set to 1/15 of a second. | |
241 // Experiment with different values, and try changing it dynamically. | |
242 WebrtcVideoEncoder::FrameParams frame_params; | 213 WebrtcVideoEncoder::FrameParams frame_params; |
243 frame_params.bitrate_kbps = target_bitrate_kbps_; | 214 if (!scheduler_->GetEncoderFrameParams(*frame, &frame_params)) |
244 frame_params.duration = base::TimeDelta::FromSeconds(1) / 15; | 215 return; |
245 frame_params.key_frame = ClearAndGetKeyFrameRequest(); | |
246 | 216 |
247 base::PostTaskAndReplyWithResult( | 217 base::PostTaskAndReplyWithResult( |
248 encode_task_runner_.get(), FROM_HERE, | 218 encode_task_runner_.get(), FROM_HERE, |
249 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(), | 219 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(), |
250 base::Passed(std::move(frame)), frame_params, | 220 base::Passed(std::move(frame)), frame_params, |
251 base::Passed(std::move(captured_frame_timestamps_))), | 221 base::Passed(std::move(captured_frame_timestamps_))), |
252 base::Bind(&WebrtcVideoStream::OnFrameEncoded, | 222 base::Bind(&WebrtcVideoStream::OnFrameEncoded, |
253 weak_factory_.GetWeakPtr())); | 223 weak_factory_.GetWeakPtr())); |
254 } | 224 } |
255 | 225 |
256 void WebrtcVideoStream::OnChannelInitialized( | 226 void WebrtcVideoStream::OnChannelInitialized( |
257 ChannelDispatcherBase* channel_dispatcher) { | 227 ChannelDispatcherBase* channel_dispatcher) { |
258 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); | 228 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
259 } | 229 } |
260 void WebrtcVideoStream::OnChannelClosed( | 230 void WebrtcVideoStream::OnChannelClosed( |
261 ChannelDispatcherBase* channel_dispatcher) { | 231 ChannelDispatcherBase* channel_dispatcher) { |
262 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); | 232 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
263 LOG(WARNING) << "video_stats channel was closed."; | 233 LOG(WARNING) << "video_stats channel was closed."; |
264 } | 234 } |
265 | 235 |
266 void WebrtcVideoStream::CaptureNextFrame() { | 236 void WebrtcVideoStream::CaptureNextFrame() { |
267 DCHECK(thread_checker_.CalledOnValidThread()); | 237 DCHECK(thread_checker_.CalledOnValidThread()); |
268 | 238 |
269 if (capture_pending_ || encode_pending_) { | |
270 VLOG(1) << "Capture/encode still pending.."; | |
271 return; | |
272 } | |
273 | |
274 base::TimeTicks now = base::TimeTicks::Now(); | |
275 | |
276 capture_pending_ = true; | |
277 VLOG(1) << "Capture next frame after " | |
278 << (base::TimeTicks::Now() - last_capture_started_ticks_) | |
279 .InMilliseconds(); | |
280 last_capture_started_ticks_ = now; | |
281 | |
282 | |
283 // |next_frame_timestamps_| is not set if no input events were received since | 239 // |next_frame_timestamps_| is not set if no input events were received since |
284 // the previous frame. In that case create FrameTimestamps instance without | 240 // the previous frame. In that case create FrameTimestamps instance without |
285 // setting |input_event_client_timestamp| and |input_event_received_time|. | 241 // setting |input_event_client_timestamp| and |input_event_received_time|. |
286 if (!next_frame_timestamps_) | 242 if (!next_frame_timestamps_) |
287 next_frame_timestamps_.reset(new FrameTimestamps()); | 243 next_frame_timestamps_.reset(new FrameTimestamps()); |
288 | 244 |
289 captured_frame_timestamps_ = std::move(next_frame_timestamps_); | 245 captured_frame_timestamps_ = std::move(next_frame_timestamps_); |
290 captured_frame_timestamps_->capture_started_time = now; | 246 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now(); |
291 | 247 |
292 capturer_->Capture(webrtc::DesktopRegion()); | 248 capturer_->Capture(webrtc::DesktopRegion()); |
293 } | 249 } |
294 | 250 |
295 // static | 251 // static |
296 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( | 252 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( |
297 WebrtcVideoEncoder* encoder, | 253 WebrtcVideoEncoder* encoder, |
298 std::unique_ptr<webrtc::DesktopFrame> frame, | 254 std::unique_ptr<webrtc::DesktopFrame> frame, |
299 WebrtcVideoEncoder::FrameParams params, | 255 WebrtcVideoEncoder::FrameParams params, |
300 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { | 256 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { |
301 EncodedFrameWithTimestamps result; | 257 EncodedFrameWithTimestamps result; |
302 result.timestamps = std::move(timestamps); | 258 result.timestamps = std::move(timestamps); |
303 result.timestamps->encode_started_time = base::TimeTicks::Now(); | 259 result.timestamps->encode_started_time = base::TimeTicks::Now(); |
304 result.frame = encoder->Encode(*frame, params); | 260 result.frame = encoder->Encode(*frame, params); |
305 result.timestamps->encode_ended_time = base::TimeTicks::Now(); | 261 result.timestamps->encode_ended_time = base::TimeTicks::Now(); |
306 return result; | 262 return result; |
307 } | 263 } |
308 | 264 |
309 void WebrtcVideoStream::OnFrameEncoded(EncodedFrameWithTimestamps frame) { | 265 void WebrtcVideoStream::OnFrameEncoded(EncodedFrameWithTimestamps frame) { |
310 DCHECK(thread_checker_.CalledOnValidThread()); | 266 DCHECK(thread_checker_.CalledOnValidThread()); |
311 | 267 |
312 encode_pending_ = false; | 268 // Send the frame itself. |
313 | 269 webrtc::EncodedImageCallback::Result result = |
314 size_t frame_size = frame.frame ? frame.frame->data.size() : 0; | 270 webrtc_transport_->video_encoder_factory()->SendEncodedFrame( |
315 | 271 *frame.frame, frame.timestamps->capture_started_time); |
316 // Generate HostFrameStats. | 272 if (result.error != webrtc::EncodedImageCallback::Result::OK) { |
317 HostFrameStats stats; | 273 // TODO(sergeyu): Stop the stream. |
318 stats.frame_size = frame_size; | 274 LOG(ERROR) << "Failed to send video frame."; |
319 | 275 return; |
320 if (!frame.timestamps->input_event_received_time.is_null()) { | |
321 stats.capture_pending_delay = frame.timestamps->capture_started_time - | |
322 frame.timestamps->input_event_received_time; | |
323 stats.latest_event_timestamp = base::TimeTicks::FromInternalValue( | |
324 frame.timestamps->input_event_client_timestamp); | |
325 } | 276 } |
326 | 277 |
327 stats.capture_delay = frame.timestamps->capture_delay; | 278 scheduler_->OnFrameEncoded(*frame.frame, result); |
328 | |
329 // Total overhead time for IPC and threading when capturing frames. | |
330 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - | |
331 frame.timestamps->capture_started_time) - | |
332 stats.capture_delay; | |
333 | |
334 stats.encode_pending_delay = frame.timestamps->encode_started_time - | |
335 frame.timestamps->capture_ended_time; | |
336 | |
337 stats.encode_delay = frame.timestamps->encode_ended_time - | |
338 frame.timestamps->encode_started_time; | |
339 | |
340 // TODO(sergeyu): Figure out how to measure send_pending time with WebRTC and | |
341 // set it here. | |
342 stats.send_pending_delay = base::TimeDelta(); | |
343 | |
344 uint32_t frame_id = 0; | |
345 if (frame.frame) { | |
346 // Send the frame itself. | |
347 webrtc::EncodedImageCallback::Result result = | |
348 webrtc_transport_->video_encoder_factory()->SendEncodedFrame( | |
349 std::move(frame.frame), frame.timestamps->capture_started_time); | |
350 if (result.error != webrtc::EncodedImageCallback::Result::OK) { | |
351 // TODO(sergeyu): Stop the stream. | |
352 LOG(ERROR) << "Failed to send video frame."; | |
353 return; | |
354 } | |
355 frame_id = result.frame_id; | |
356 } | |
357 | 279 |
358 // Send FrameStats message. | 280 // Send FrameStats message. |
359 if (video_stats_dispatcher_.is_connected()) | 281 if (video_stats_dispatcher_.is_connected()) { |
360 video_stats_dispatcher_.OnVideoFrameStats(frame_id, stats); | 282 HostFrameStats stats; |
| 283 stats.frame_size = frame.frame->data.size(); |
361 | 284 |
362 // Simplistic adaptation of frame polling in the range 5 FPS to 30 FPS. | 285 if (!frame.timestamps->input_event_received_time.is_null()) { |
363 // TODO(sergeyu): Move this logic to a separate class. | 286 stats.capture_pending_delay = frame.timestamps->capture_started_time - |
364 float encoded_bits = frame_size * 8.0; | 287 frame.timestamps->input_event_received_time; |
365 uint32_t next_sched_ms = std::max( | 288 stats.latest_event_timestamp = base::TimeTicks::FromInternalValue( |
366 33, std::min(static_cast<int>(encoded_bits / target_bitrate_kbps_), 200)); | 289 frame.timestamps->input_event_client_timestamp); |
367 capture_timer_.Start(FROM_HERE, | 290 } |
368 base::TimeDelta::FromMilliseconds(next_sched_ms), this, | 291 |
369 &WebrtcVideoStream::CaptureNextFrame); | 292 stats.capture_delay = frame.timestamps->capture_delay; |
| 293 |
| 294 // Total overhead time for IPC and threading when capturing frames. |
| 295 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - |
| 296 frame.timestamps->capture_started_time) - |
| 297 stats.capture_delay; |
| 298 |
| 299 stats.encode_pending_delay = frame.timestamps->encode_started_time - |
| 300 frame.timestamps->capture_ended_time; |
| 301 |
| 302 stats.encode_delay = frame.timestamps->encode_ended_time - |
| 303 frame.timestamps->encode_started_time; |
| 304 |
| 305 // TODO(sergeyu): Figure out how to measure send_pending time with WebRTC |
| 306 // and set it here. |
| 307 stats.send_pending_delay = base::TimeDelta(); |
| 308 |
| 309 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); |
| 310 } |
370 } | 311 } |
371 | 312 |
372 } // namespace protocol | 313 } // namespace protocol |
373 } // namespace remoting | 314 } // namespace remoting |
OLD | NEW |