OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/protocol/webrtc_video_stream.h" | 5 #include "remoting/protocol/webrtc_video_stream.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "base/single_thread_task_runner.h" | 8 #include "base/single_thread_task_runner.h" |
9 #include "base/task_runner_util.h" | 9 #include "base/task_runner_util.h" |
10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
11 #include "remoting/base/constants.h" | 11 #include "remoting/base/constants.h" |
12 #include "remoting/codec/webrtc_video_encoder_vpx.h" | 12 #include "remoting/codec/webrtc_video_encoder_vpx.h" |
13 #include "remoting/protocol/frame_stats.h" | 13 #include "remoting/protocol/frame_stats.h" |
14 #include "remoting/protocol/host_video_stats_dispatcher.h" | 14 #include "remoting/protocol/host_video_stats_dispatcher.h" |
15 #include "remoting/protocol/webrtc_dummy_video_capturer.h" | 15 #include "remoting/protocol/webrtc_dummy_video_capturer.h" |
16 #include "remoting/protocol/webrtc_frame_scheduler_simple.h" | 16 #include "remoting/protocol/webrtc_frame_scheduler_simple.h" |
17 #include "remoting/protocol/webrtc_transport.h" | 17 #include "remoting/protocol/webrtc_transport.h" |
18 #include "third_party/webrtc/api/mediastreaminterface.h" | 18 #include "third_party/webrtc/api/mediastreaminterface.h" |
19 #include "third_party/webrtc/api/peerconnectioninterface.h" | 19 #include "third_party/webrtc/api/peerconnectioninterface.h" |
20 #include "third_party/webrtc/api/test/fakeconstraints.h" | 20 #include "third_party/webrtc/api/test/fakeconstraints.h" |
21 #include "third_party/webrtc/media/base/videocapturer.h" | 21 #include "third_party/webrtc/media/base/videocapturer.h" |
22 | 22 |
23 namespace remoting { | 23 namespace remoting { |
24 namespace protocol { | 24 namespace protocol { |
25 | 25 |
26 const char kStreamLabel[] = "screen_stream"; | 26 const char kStreamLabel[] = "screen_stream"; |
27 const char kVideoLabel[] = "screen_video"; | 27 const char kVideoLabel[] = "screen_video"; |
28 | 28 |
29 struct WebrtcVideoStream::FrameStats { | 29 struct WebrtcVideoStream::FrameTimestamps { |
30 // The following fields is not null only for one frame after each incoming | 30 // The following fields is not null only for one frame after each incoming |
31 // input event. | 31 // input event. |
32 InputEventTimestamps input_event_timestamps; | 32 InputEventTimestamps input_event_timestamps; |
33 | 33 |
34 base::TimeTicks capture_started_time; | 34 base::TimeTicks capture_started_time; |
35 base::TimeTicks capture_ended_time; | 35 base::TimeTicks capture_ended_time; |
36 base::TimeDelta capture_delay; | 36 base::TimeDelta capture_delay; |
37 base::TimeTicks encode_started_time; | 37 base::TimeTicks encode_started_time; |
38 base::TimeTicks encode_ended_time; | 38 base::TimeTicks encode_ended_time; |
39 | |
40 uint32_t capturer_id = 0; | |
41 }; | 39 }; |
42 | 40 |
43 struct WebrtcVideoStream::EncodedFrameWithStats { | 41 struct WebrtcVideoStream::EncodedFrameWithTimestamps { |
44 std::unique_ptr<WebrtcVideoEncoder::EncodedFrame> frame; | 42 std::unique_ptr<WebrtcVideoEncoder::EncodedFrame> frame; |
45 std::unique_ptr<FrameStats> stats; | 43 std::unique_ptr<FrameTimestamps> timestamps; |
46 }; | 44 }; |
47 | 45 |
48 WebrtcVideoStream::WebrtcVideoStream() | 46 WebrtcVideoStream::WebrtcVideoStream() |
49 : video_stats_dispatcher_(kStreamLabel), weak_factory_(this) {} | 47 : video_stats_dispatcher_(kStreamLabel), weak_factory_(this) {} |
50 | 48 |
51 WebrtcVideoStream::~WebrtcVideoStream() { | 49 WebrtcVideoStream::~WebrtcVideoStream() { |
52 if (stream_) { | 50 if (stream_) { |
53 for (const auto& track : stream_->GetVideoTracks()) { | 51 for (const auto& track : stream_->GetVideoTracks()) { |
54 stream_->RemoveTrack(track.get()); | 52 stream_->RemoveTrack(track.get()); |
55 } | 53 } |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
134 void WebrtcVideoStream::SetObserver(Observer* observer) { | 132 void WebrtcVideoStream::SetObserver(Observer* observer) { |
135 DCHECK(thread_checker_.CalledOnValidThread()); | 133 DCHECK(thread_checker_.CalledOnValidThread()); |
136 observer_ = observer; | 134 observer_ = observer; |
137 } | 135 } |
138 | 136 |
139 void WebrtcVideoStream::OnCaptureResult( | 137 void WebrtcVideoStream::OnCaptureResult( |
140 webrtc::DesktopCapturer::Result result, | 138 webrtc::DesktopCapturer::Result result, |
141 std::unique_ptr<webrtc::DesktopFrame> frame) { | 139 std::unique_ptr<webrtc::DesktopFrame> frame) { |
142 DCHECK(thread_checker_.CalledOnValidThread()); | 140 DCHECK(thread_checker_.CalledOnValidThread()); |
143 | 141 |
144 captured_frame_stats_->capture_ended_time = base::TimeTicks::Now(); | 142 captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now(); |
145 captured_frame_stats_->capture_delay = | 143 captured_frame_timestamps_->capture_delay = |
146 base::TimeDelta::FromMilliseconds(frame ? frame->capture_time_ms() : 0); | 144 base::TimeDelta::FromMilliseconds(frame ? frame->capture_time_ms() : 0); |
147 | 145 |
148 WebrtcVideoEncoder::FrameParams frame_params; | 146 WebrtcVideoEncoder::FrameParams frame_params; |
149 if (!scheduler_->OnFrameCaptured(frame.get(), &frame_params)) { | 147 if (!scheduler_->OnFrameCaptured(frame.get(), &frame_params)) { |
150 return; | 148 return; |
151 } | 149 } |
152 | 150 |
153 // TODO(sergeyu): Handle ERROR_PERMANENT result here. | 151 // TODO(sergeyu): Handle ERROR_PERMANENT result here. |
154 if (frame) { | 152 if (frame) { |
155 webrtc::DesktopVector dpi = | 153 webrtc::DesktopVector dpi = |
156 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi) | 154 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi) |
157 : frame->dpi(); | 155 : frame->dpi(); |
158 | 156 |
159 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) { | 157 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) { |
160 frame_size_ = frame->size(); | 158 frame_size_ = frame->size(); |
161 frame_dpi_ = dpi; | 159 frame_dpi_ = dpi; |
162 if (observer_) | 160 if (observer_) |
163 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_); | 161 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_); |
164 } | 162 } |
165 } | 163 } |
166 | 164 |
167 base::PostTaskAndReplyWithResult( | 165 base::PostTaskAndReplyWithResult( |
168 encode_task_runner_.get(), FROM_HERE, | 166 encode_task_runner_.get(), FROM_HERE, |
169 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(), | 167 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(), |
170 base::Passed(std::move(frame)), frame_params, | 168 base::Passed(std::move(frame)), frame_params, |
171 base::Passed(std::move(captured_frame_stats_))), | 169 base::Passed(std::move(captured_frame_timestamps_))), |
172 base::Bind(&WebrtcVideoStream::OnFrameEncoded, | 170 base::Bind(&WebrtcVideoStream::OnFrameEncoded, |
173 weak_factory_.GetWeakPtr())); | 171 weak_factory_.GetWeakPtr())); |
174 } | 172 } |
175 | 173 |
176 void WebrtcVideoStream::OnChannelInitialized( | 174 void WebrtcVideoStream::OnChannelInitialized( |
177 ChannelDispatcherBase* channel_dispatcher) { | 175 ChannelDispatcherBase* channel_dispatcher) { |
178 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); | 176 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
179 } | 177 } |
180 void WebrtcVideoStream::OnChannelClosed( | 178 void WebrtcVideoStream::OnChannelClosed( |
181 ChannelDispatcherBase* channel_dispatcher) { | 179 ChannelDispatcherBase* channel_dispatcher) { |
182 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); | 180 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
183 LOG(WARNING) << "video_stats channel was closed."; | 181 LOG(WARNING) << "video_stats channel was closed."; |
184 } | 182 } |
185 | 183 |
186 void WebrtcVideoStream::CaptureNextFrame() { | 184 void WebrtcVideoStream::CaptureNextFrame() { |
187 DCHECK(thread_checker_.CalledOnValidThread()); | 185 DCHECK(thread_checker_.CalledOnValidThread()); |
188 | 186 |
189 captured_frame_stats_.reset(new FrameStats()); | 187 captured_frame_timestamps_.reset(new FrameTimestamps()); |
190 captured_frame_stats_->capture_started_time = base::TimeTicks::Now(); | 188 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now(); |
191 captured_frame_stats_->input_event_timestamps = | 189 captured_frame_timestamps_->input_event_timestamps = |
192 event_timestamps_source_->TakeLastEventTimestamps(); | 190 event_timestamps_source_->TakeLastEventTimestamps(); |
193 | 191 |
194 capturer_->CaptureFrame(); | 192 capturer_->CaptureFrame(); |
195 } | 193 } |
196 | 194 |
197 // static | 195 // static |
198 WebrtcVideoStream::EncodedFrameWithStats WebrtcVideoStream::EncodeFrame( | 196 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( |
199 WebrtcVideoEncoder* encoder, | 197 WebrtcVideoEncoder* encoder, |
200 std::unique_ptr<webrtc::DesktopFrame> frame, | 198 std::unique_ptr<webrtc::DesktopFrame> frame, |
201 WebrtcVideoEncoder::FrameParams params, | 199 WebrtcVideoEncoder::FrameParams params, |
202 std::unique_ptr<WebrtcVideoStream::FrameStats> stats) { | 200 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { |
203 EncodedFrameWithStats result; | 201 EncodedFrameWithTimestamps result; |
204 result.stats = std::move(stats); | 202 result.timestamps = std::move(timestamps); |
205 result.stats->encode_started_time = base::TimeTicks::Now(); | 203 result.timestamps->encode_started_time = base::TimeTicks::Now(); |
206 result.frame = encoder->Encode(frame.get(), params); | 204 result.frame = encoder->Encode(frame.get(), params); |
207 result.stats->encode_ended_time = base::TimeTicks::Now(); | 205 result.timestamps->encode_ended_time = base::TimeTicks::Now(); |
208 result.stats->capturer_id = frame->capturer_id(); | |
209 return result; | 206 return result; |
210 } | 207 } |
211 | 208 |
212 void WebrtcVideoStream::OnFrameEncoded(EncodedFrameWithStats frame) { | 209 void WebrtcVideoStream::OnFrameEncoded(EncodedFrameWithTimestamps frame) { |
213 DCHECK(thread_checker_.CalledOnValidThread()); | 210 DCHECK(thread_checker_.CalledOnValidThread()); |
214 | 211 |
215 HostFrameStats stats; | 212 HostFrameStats stats; |
216 scheduler_->OnFrameEncoded(frame.frame.get(), &stats); | 213 scheduler_->OnFrameEncoded(frame.frame.get(), &stats); |
217 | 214 |
218 if (!frame.frame) { | 215 if (!frame.frame) { |
219 return; | 216 return; |
220 } | 217 } |
221 | 218 |
222 webrtc::EncodedImageCallback::Result result = | 219 webrtc::EncodedImageCallback::Result result = |
223 webrtc_transport_->video_encoder_factory()->SendEncodedFrame( | 220 webrtc_transport_->video_encoder_factory()->SendEncodedFrame( |
224 *frame.frame, frame.stats->capture_started_time); | 221 *frame.frame, frame.timestamps->capture_started_time); |
225 if (result.error != webrtc::EncodedImageCallback::Result::OK) { | 222 if (result.error != webrtc::EncodedImageCallback::Result::OK) { |
226 // TODO(sergeyu): Stop the stream. | 223 // TODO(sergeyu): Stop the stream. |
227 LOG(ERROR) << "Failed to send video frame."; | 224 LOG(ERROR) << "Failed to send video frame."; |
228 return; | 225 return; |
229 } | 226 } |
230 | 227 |
231 // Send FrameStats message. | 228 // Send FrameStats message. |
232 if (video_stats_dispatcher_.is_connected()) { | 229 if (video_stats_dispatcher_.is_connected()) { |
233 stats.frame_size = frame.frame ? frame.frame->data.size() : 0; | 230 stats.frame_size = frame.frame ? frame.frame->data.size() : 0; |
234 | 231 |
235 if (!frame.stats->input_event_timestamps.is_null()) { | 232 if (!frame.timestamps->input_event_timestamps.is_null()) { |
236 stats.capture_pending_delay = | 233 stats.capture_pending_delay = |
237 frame.stats->capture_started_time - | 234 frame.timestamps->capture_started_time - |
238 frame.stats->input_event_timestamps.host_timestamp; | 235 frame.timestamps->input_event_timestamps.host_timestamp; |
239 stats.latest_event_timestamp = | 236 stats.latest_event_timestamp = |
240 frame.stats->input_event_timestamps.client_timestamp; | 237 frame.timestamps->input_event_timestamps.client_timestamp; |
241 } | 238 } |
242 | 239 |
243 stats.capture_delay = frame.stats->capture_delay; | 240 stats.capture_delay = frame.timestamps->capture_delay; |
244 | 241 |
245 // Total overhead time for IPC and threading when capturing frames. | 242 // Total overhead time for IPC and threading when capturing frames. |
246 stats.capture_overhead_delay = (frame.stats->capture_ended_time - | 243 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - |
247 frame.stats->capture_started_time) - | 244 frame.timestamps->capture_started_time) - |
248 stats.capture_delay; | 245 stats.capture_delay; |
249 | 246 |
250 stats.encode_pending_delay = frame.stats->encode_started_time - | 247 stats.encode_pending_delay = frame.timestamps->encode_started_time - |
251 frame.stats->capture_ended_time; | 248 frame.timestamps->capture_ended_time; |
252 | 249 |
253 stats.encode_delay = frame.stats->encode_ended_time - | 250 stats.encode_delay = frame.timestamps->encode_ended_time - |
254 frame.stats->encode_started_time; | 251 frame.timestamps->encode_started_time; |
255 | |
256 stats.capturer_id = frame.stats->capturer_id; | |
257 | 252 |
258 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); | 253 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); |
259 } | 254 } |
260 } | 255 } |
261 | 256 |
262 } // namespace protocol | 257 } // namespace protocol |
263 } // namespace remoting | 258 } // namespace remoting |
OLD | NEW |