OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/protocol/webrtc_video_stream.h" | 5 #include "remoting/protocol/webrtc_video_stream.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "base/single_thread_task_runner.h" | 8 #include "base/single_thread_task_runner.h" |
9 #include "base/task_runner_util.h" | 9 #include "base/task_runner_util.h" |
10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
11 #include "remoting/base/constants.h" | 11 #include "remoting/base/constants.h" |
12 #include "remoting/codec/webrtc_video_encoder_vpx.h" | 12 #include "remoting/codec/webrtc_video_encoder_vpx.h" |
13 #include "remoting/protocol/frame_stats.h" | 13 #include "remoting/protocol/frame_stats.h" |
14 #include "remoting/protocol/host_video_stats_dispatcher.h" | 14 #include "remoting/protocol/host_video_stats_dispatcher.h" |
15 #include "remoting/protocol/webrtc_dummy_video_capturer.h" | 15 #include "remoting/protocol/webrtc_dummy_video_capturer.h" |
16 #include "remoting/protocol/webrtc_frame_scheduler_simple.h" | 16 #include "remoting/protocol/webrtc_frame_scheduler_simple.h" |
17 #include "remoting/protocol/webrtc_transport.h" | 17 #include "remoting/protocol/webrtc_transport.h" |
18 #include "third_party/webrtc/api/mediastreaminterface.h" | 18 #include "third_party/webrtc/api/mediastreaminterface.h" |
19 #include "third_party/webrtc/api/peerconnectioninterface.h" | 19 #include "third_party/webrtc/api/peerconnectioninterface.h" |
20 #include "third_party/webrtc/api/test/fakeconstraints.h" | 20 #include "third_party/webrtc/api/test/fakeconstraints.h" |
21 #include "third_party/webrtc/media/base/videocapturer.h" | 21 #include "third_party/webrtc/media/base/videocapturer.h" |
22 | 22 |
23 namespace remoting { | 23 namespace remoting { |
24 namespace protocol { | 24 namespace protocol { |
25 | 25 |
26 const char kStreamLabel[] = "screen_stream"; | 26 const char kStreamLabel[] = "screen_stream"; |
27 const char kVideoLabel[] = "screen_video"; | 27 const char kVideoLabel[] = "screen_video"; |
28 | 28 |
29 struct WebrtcVideoStream::FrameTimestamps { | 29 struct WebrtcVideoStream::FrameTimestamps { |
30 // The following two fields are set only for one frame after each incoming | 30 // The following fields is not null only for one frame after each incoming |
31 // input event. |input_event_client_timestamp| is event timestamp | 31 // input event. |
32 // received from the client. |input_event_received_time| is local time when | 32 InputEventTimestamps input_event_timestamps; |
33 // the event was received. | |
34 int64_t input_event_client_timestamp = -1; | |
35 base::TimeTicks input_event_received_time; | |
36 | 33 |
37 base::TimeTicks capture_started_time; | 34 base::TimeTicks capture_started_time; |
38 base::TimeTicks capture_ended_time; | 35 base::TimeTicks capture_ended_time; |
39 base::TimeDelta capture_delay; | 36 base::TimeDelta capture_delay; |
40 base::TimeTicks encode_started_time; | 37 base::TimeTicks encode_started_time; |
41 base::TimeTicks encode_ended_time; | 38 base::TimeTicks encode_ended_time; |
42 }; | 39 }; |
43 | 40 |
44 struct WebrtcVideoStream::EncodedFrameWithTimestamps { | 41 struct WebrtcVideoStream::EncodedFrameWithTimestamps { |
45 std::unique_ptr<WebrtcVideoEncoder::EncodedFrame> frame; | 42 std::unique_ptr<WebrtcVideoEncoder::EncodedFrame> frame; |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
107 scheduler_.reset(new WebrtcFrameSchedulerSimple()); | 104 scheduler_.reset(new WebrtcFrameSchedulerSimple()); |
108 scheduler_->Start( | 105 scheduler_->Start( |
109 webrtc_transport_->video_encoder_factory(), | 106 webrtc_transport_->video_encoder_factory(), |
110 base::Bind(&WebrtcVideoStream::CaptureNextFrame, base::Unretained(this))); | 107 base::Bind(&WebrtcVideoStream::CaptureNextFrame, base::Unretained(this))); |
111 | 108 |
112 video_stats_dispatcher_.Init(webrtc_transport_->CreateOutgoingChannel( | 109 video_stats_dispatcher_.Init(webrtc_transport_->CreateOutgoingChannel( |
113 video_stats_dispatcher_.channel_name()), | 110 video_stats_dispatcher_.channel_name()), |
114 this); | 111 this); |
115 } | 112 } |
116 | 113 |
| 114 void WebrtcVideoStream::SetEventTimestampsSource( |
| 115 scoped_refptr<InputEventTimestampsSource> event_timestamps_source) { |
| 116 event_timestamps_source_ = event_timestamps_source; |
| 117 } |
| 118 |
117 void WebrtcVideoStream::Pause(bool pause) { | 119 void WebrtcVideoStream::Pause(bool pause) { |
118 DCHECK(thread_checker_.CalledOnValidThread()); | 120 DCHECK(thread_checker_.CalledOnValidThread()); |
119 scheduler_->Pause(pause); | 121 scheduler_->Pause(pause); |
120 } | 122 } |
121 | 123 |
122 void WebrtcVideoStream::OnInputEventReceived(int64_t event_timestamp) { | |
123 DCHECK(thread_checker_.CalledOnValidThread()); | |
124 | |
125 if (!next_frame_timestamps_) | |
126 next_frame_timestamps_.reset(new FrameTimestamps()); | |
127 next_frame_timestamps_->input_event_client_timestamp = event_timestamp; | |
128 next_frame_timestamps_->input_event_received_time = base::TimeTicks::Now(); | |
129 } | |
130 | |
131 void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) { | 124 void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) { |
132 NOTIMPLEMENTED(); | 125 NOTIMPLEMENTED(); |
133 } | 126 } |
134 | 127 |
135 void WebrtcVideoStream::SetLosslessColor(bool want_lossless) { | 128 void WebrtcVideoStream::SetLosslessColor(bool want_lossless) { |
136 NOTIMPLEMENTED(); | 129 NOTIMPLEMENTED(); |
137 } | 130 } |
138 | 131 |
139 void WebrtcVideoStream::SetObserver(Observer* observer) { | 132 void WebrtcVideoStream::SetObserver(Observer* observer) { |
140 DCHECK(thread_checker_.CalledOnValidThread()); | 133 DCHECK(thread_checker_.CalledOnValidThread()); |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
182 } | 175 } |
183 void WebrtcVideoStream::OnChannelClosed( | 176 void WebrtcVideoStream::OnChannelClosed( |
184 ChannelDispatcherBase* channel_dispatcher) { | 177 ChannelDispatcherBase* channel_dispatcher) { |
185 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); | 178 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
186 LOG(WARNING) << "video_stats channel was closed."; | 179 LOG(WARNING) << "video_stats channel was closed."; |
187 } | 180 } |
188 | 181 |
189 void WebrtcVideoStream::CaptureNextFrame() { | 182 void WebrtcVideoStream::CaptureNextFrame() { |
190 DCHECK(thread_checker_.CalledOnValidThread()); | 183 DCHECK(thread_checker_.CalledOnValidThread()); |
191 | 184 |
192 // |next_frame_timestamps_| is not set if no input events were received since | 185 captured_frame_timestamps_.reset(new FrameTimestamps()); |
193 // the previous frame. In that case create FrameTimestamps instance without | 186 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now(); |
194 // setting |input_event_client_timestamp| and |input_event_received_time|. | |
195 if (!next_frame_timestamps_) | |
196 next_frame_timestamps_.reset(new FrameTimestamps()); | |
197 | 187 |
198 captured_frame_timestamps_ = std::move(next_frame_timestamps_); | 188 if (event_timestamps_source_) { |
199 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now(); | 189 captured_frame_timestamps_->input_event_timestamps = |
| 190 event_timestamps_source_->TakeLastEventTimestamps(); |
| 191 } |
200 | 192 |
201 capturer_->Capture(webrtc::DesktopRegion()); | 193 capturer_->Capture(webrtc::DesktopRegion()); |
202 } | 194 } |
203 | 195 |
204 // static | 196 // static |
205 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( | 197 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( |
206 WebrtcVideoEncoder* encoder, | 198 WebrtcVideoEncoder* encoder, |
207 std::unique_ptr<webrtc::DesktopFrame> frame, | 199 std::unique_ptr<webrtc::DesktopFrame> frame, |
208 WebrtcVideoEncoder::FrameParams params, | 200 WebrtcVideoEncoder::FrameParams params, |
209 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { | 201 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { |
(...skipping 15 matching lines...) Expand all Loading... |
225 // TODO(sergeyu): Stop the stream. | 217 // TODO(sergeyu): Stop the stream. |
226 LOG(ERROR) << "Failed to send video frame."; | 218 LOG(ERROR) << "Failed to send video frame."; |
227 return; | 219 return; |
228 } | 220 } |
229 | 221 |
230 // Send FrameStats message. | 222 // Send FrameStats message. |
231 if (video_stats_dispatcher_.is_connected()) { | 223 if (video_stats_dispatcher_.is_connected()) { |
232 HostFrameStats stats; | 224 HostFrameStats stats; |
233 stats.frame_size = frame.frame->data.size(); | 225 stats.frame_size = frame.frame->data.size(); |
234 | 226 |
235 if (!frame.timestamps->input_event_received_time.is_null()) { | 227 if (!frame.timestamps->input_event_timestamps.is_null()) { |
236 stats.capture_pending_delay = frame.timestamps->capture_started_time - | 228 stats.capture_pending_delay = |
237 frame.timestamps->input_event_received_time; | 229 frame.timestamps->capture_started_time - |
238 stats.latest_event_timestamp = base::TimeTicks::FromInternalValue( | 230 frame.timestamps->input_event_timestamps.host_timestamp; |
239 frame.timestamps->input_event_client_timestamp); | 231 stats.latest_event_timestamp = |
| 232 frame.timestamps->input_event_timestamps.host_timestamp; |
240 } | 233 } |
241 | 234 |
242 stats.capture_delay = frame.timestamps->capture_delay; | 235 stats.capture_delay = frame.timestamps->capture_delay; |
243 | 236 |
244 // Total overhead time for IPC and threading when capturing frames. | 237 // Total overhead time for IPC and threading when capturing frames. |
245 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - | 238 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - |
246 frame.timestamps->capture_started_time) - | 239 frame.timestamps->capture_started_time) - |
247 stats.capture_delay; | 240 stats.capture_delay; |
248 | 241 |
249 stats.encode_pending_delay = frame.timestamps->encode_started_time - | 242 stats.encode_pending_delay = frame.timestamps->encode_started_time - |
250 frame.timestamps->capture_ended_time; | 243 frame.timestamps->capture_ended_time; |
251 | 244 |
252 stats.encode_delay = frame.timestamps->encode_ended_time - | 245 stats.encode_delay = frame.timestamps->encode_ended_time - |
253 frame.timestamps->encode_started_time; | 246 frame.timestamps->encode_started_time; |
254 | 247 |
255 // TODO(sergeyu): Figure out how to measure send_pending time with WebRTC | 248 // TODO(sergeyu): Figure out how to measure send_pending time with WebRTC |
256 // and set it here. | 249 // and set it here. |
257 stats.send_pending_delay = base::TimeDelta(); | 250 stats.send_pending_delay = base::TimeDelta(); |
258 | 251 |
259 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); | 252 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); |
260 } | 253 } |
261 } | 254 } |
262 | 255 |
263 } // namespace protocol | 256 } // namespace protocol |
264 } // namespace remoting | 257 } // namespace remoting |
OLD | NEW |