OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/protocol/webrtc_video_stream.h" | 5 #include "remoting/protocol/webrtc_video_stream.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "base/single_thread_task_runner.h" | 8 #include "base/single_thread_task_runner.h" |
9 #include "base/task_runner_util.h" | 9 #include "base/task_runner_util.h" |
10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
11 #include "remoting/base/constants.h" | 11 #include "remoting/base/constants.h" |
12 #include "remoting/codec/webrtc_video_encoder_vpx.h" | 12 #include "remoting/codec/webrtc_video_encoder_vpx.h" |
13 #include "remoting/protocol/frame_stats.h" | 13 #include "remoting/protocol/frame_stats.h" |
14 #include "remoting/protocol/host_video_stats_dispatcher.h" | 14 #include "remoting/protocol/host_video_stats_dispatcher.h" |
15 #include "remoting/protocol/webrtc_dummy_video_capturer.h" | 15 #include "remoting/protocol/webrtc_dummy_video_capturer.h" |
16 #include "remoting/protocol/webrtc_frame_scheduler_simple.h" | 16 #include "remoting/protocol/webrtc_frame_scheduler_simple.h" |
17 #include "remoting/protocol/webrtc_transport.h" | 17 #include "remoting/protocol/webrtc_transport.h" |
18 #include "third_party/webrtc/api/mediastreaminterface.h" | 18 #include "third_party/webrtc/api/mediastreaminterface.h" |
19 #include "third_party/webrtc/api/peerconnectioninterface.h" | 19 #include "third_party/webrtc/api/peerconnectioninterface.h" |
20 #include "third_party/webrtc/api/test/fakeconstraints.h" | 20 #include "third_party/webrtc/api/test/fakeconstraints.h" |
21 #include "third_party/webrtc/media/base/videocapturer.h" | 21 #include "third_party/webrtc/media/base/videocapturer.h" |
22 | 22 |
23 namespace remoting { | 23 namespace remoting { |
24 namespace protocol { | 24 namespace protocol { |
25 | 25 |
26 const char kStreamLabel[] = "screen_stream"; | 26 const char kStreamLabel[] = "screen_stream"; |
27 const char kVideoLabel[] = "screen_video"; | 27 const char kVideoLabel[] = "screen_video"; |
28 | 28 |
29 struct WebrtcVideoStream::FrameTimestamps { | 29 struct WebrtcVideoStream::FrameStats { |
30 // The following fields is not null only for one frame after each incoming | 30 // The following fields is not null only for one frame after each incoming |
31 // input event. | 31 // input event. |
32 InputEventTimestamps input_event_timestamps; | 32 InputEventTimestamps input_event_timestamps; |
33 | 33 |
34 base::TimeTicks capture_started_time; | 34 base::TimeTicks capture_started_time; |
35 base::TimeTicks capture_ended_time; | 35 base::TimeTicks capture_ended_time; |
36 base::TimeDelta capture_delay; | 36 base::TimeDelta capture_delay; |
37 base::TimeTicks encode_started_time; | 37 base::TimeTicks encode_started_time; |
38 base::TimeTicks encode_ended_time; | 38 base::TimeTicks encode_ended_time; |
| 39 |
| 40 uint32_t capturer_id = 0; |
39 }; | 41 }; |
40 | 42 |
41 struct WebrtcVideoStream::EncodedFrameWithTimestamps { | 43 struct WebrtcVideoStream::EncodedFrameWithStats { |
42 std::unique_ptr<WebrtcVideoEncoder::EncodedFrame> frame; | 44 std::unique_ptr<WebrtcVideoEncoder::EncodedFrame> frame; |
43 std::unique_ptr<FrameTimestamps> timestamps; | 45 std::unique_ptr<FrameStats> stats; |
44 }; | 46 }; |
45 | 47 |
46 WebrtcVideoStream::WebrtcVideoStream() | 48 WebrtcVideoStream::WebrtcVideoStream() |
47 : video_stats_dispatcher_(kStreamLabel), weak_factory_(this) {} | 49 : video_stats_dispatcher_(kStreamLabel), weak_factory_(this) {} |
48 | 50 |
49 WebrtcVideoStream::~WebrtcVideoStream() { | 51 WebrtcVideoStream::~WebrtcVideoStream() { |
50 if (stream_) { | 52 if (stream_) { |
51 for (const auto& track : stream_->GetVideoTracks()) { | 53 for (const auto& track : stream_->GetVideoTracks()) { |
52 stream_->RemoveTrack(track.get()); | 54 stream_->RemoveTrack(track.get()); |
53 } | 55 } |
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
132 void WebrtcVideoStream::SetObserver(Observer* observer) { | 134 void WebrtcVideoStream::SetObserver(Observer* observer) { |
133 DCHECK(thread_checker_.CalledOnValidThread()); | 135 DCHECK(thread_checker_.CalledOnValidThread()); |
134 observer_ = observer; | 136 observer_ = observer; |
135 } | 137 } |
136 | 138 |
137 void WebrtcVideoStream::OnCaptureResult( | 139 void WebrtcVideoStream::OnCaptureResult( |
138 webrtc::DesktopCapturer::Result result, | 140 webrtc::DesktopCapturer::Result result, |
139 std::unique_ptr<webrtc::DesktopFrame> frame) { | 141 std::unique_ptr<webrtc::DesktopFrame> frame) { |
140 DCHECK(thread_checker_.CalledOnValidThread()); | 142 DCHECK(thread_checker_.CalledOnValidThread()); |
141 | 143 |
142 captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now(); | 144 captured_frame_stats_->capture_ended_time = base::TimeTicks::Now(); |
143 captured_frame_timestamps_->capture_delay = | 145 captured_frame_stats_->capture_delay = |
144 base::TimeDelta::FromMilliseconds(frame ? frame->capture_time_ms() : 0); | 146 base::TimeDelta::FromMilliseconds(frame ? frame->capture_time_ms() : 0); |
145 | 147 |
146 WebrtcVideoEncoder::FrameParams frame_params; | 148 WebrtcVideoEncoder::FrameParams frame_params; |
147 if (!scheduler_->OnFrameCaptured(frame.get(), &frame_params)) { | 149 if (!scheduler_->OnFrameCaptured(frame.get(), &frame_params)) { |
148 return; | 150 return; |
149 } | 151 } |
150 | 152 |
151 // TODO(sergeyu): Handle ERROR_PERMANENT result here. | 153 // TODO(sergeyu): Handle ERROR_PERMANENT result here. |
152 if (frame) { | 154 if (frame) { |
153 webrtc::DesktopVector dpi = | 155 webrtc::DesktopVector dpi = |
154 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi) | 156 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi) |
155 : frame->dpi(); | 157 : frame->dpi(); |
156 | 158 |
157 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) { | 159 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) { |
158 frame_size_ = frame->size(); | 160 frame_size_ = frame->size(); |
159 frame_dpi_ = dpi; | 161 frame_dpi_ = dpi; |
160 if (observer_) | 162 if (observer_) |
161 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_); | 163 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_); |
162 } | 164 } |
163 } | 165 } |
164 | 166 |
165 base::PostTaskAndReplyWithResult( | 167 base::PostTaskAndReplyWithResult( |
166 encode_task_runner_.get(), FROM_HERE, | 168 encode_task_runner_.get(), FROM_HERE, |
167 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(), | 169 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(), |
168 base::Passed(std::move(frame)), frame_params, | 170 base::Passed(std::move(frame)), frame_params, |
169 base::Passed(std::move(captured_frame_timestamps_))), | 171 base::Passed(std::move(captured_frame_stats_))), |
170 base::Bind(&WebrtcVideoStream::OnFrameEncoded, | 172 base::Bind(&WebrtcVideoStream::OnFrameEncoded, |
171 weak_factory_.GetWeakPtr())); | 173 weak_factory_.GetWeakPtr())); |
172 } | 174 } |
173 | 175 |
174 void WebrtcVideoStream::OnChannelInitialized( | 176 void WebrtcVideoStream::OnChannelInitialized( |
175 ChannelDispatcherBase* channel_dispatcher) { | 177 ChannelDispatcherBase* channel_dispatcher) { |
176 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); | 178 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
177 } | 179 } |
178 void WebrtcVideoStream::OnChannelClosed( | 180 void WebrtcVideoStream::OnChannelClosed( |
179 ChannelDispatcherBase* channel_dispatcher) { | 181 ChannelDispatcherBase* channel_dispatcher) { |
180 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); | 182 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
181 LOG(WARNING) << "video_stats channel was closed."; | 183 LOG(WARNING) << "video_stats channel was closed."; |
182 } | 184 } |
183 | 185 |
184 void WebrtcVideoStream::CaptureNextFrame() { | 186 void WebrtcVideoStream::CaptureNextFrame() { |
185 DCHECK(thread_checker_.CalledOnValidThread()); | 187 DCHECK(thread_checker_.CalledOnValidThread()); |
186 | 188 |
187 captured_frame_timestamps_.reset(new FrameTimestamps()); | 189 captured_frame_stats_.reset(new FrameStats()); |
188 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now(); | 190 captured_frame_stats_->capture_started_time = base::TimeTicks::Now(); |
189 captured_frame_timestamps_->input_event_timestamps = | 191 captured_frame_stats_->input_event_timestamps = |
190 event_timestamps_source_->TakeLastEventTimestamps(); | 192 event_timestamps_source_->TakeLastEventTimestamps(); |
191 | 193 |
192 capturer_->CaptureFrame(); | 194 capturer_->CaptureFrame(); |
193 } | 195 } |
194 | 196 |
195 // static | 197 // static |
196 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( | 198 WebrtcVideoStream::EncodedFrameWithStats WebrtcVideoStream::EncodeFrame( |
197 WebrtcVideoEncoder* encoder, | 199 WebrtcVideoEncoder* encoder, |
198 std::unique_ptr<webrtc::DesktopFrame> frame, | 200 std::unique_ptr<webrtc::DesktopFrame> frame, |
199 WebrtcVideoEncoder::FrameParams params, | 201 WebrtcVideoEncoder::FrameParams params, |
200 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { | 202 std::unique_ptr<WebrtcVideoStream::FrameStats> stats) { |
201 EncodedFrameWithTimestamps result; | 203 EncodedFrameWithStats result; |
202 result.timestamps = std::move(timestamps); | 204 result.stats = std::move(stats); |
203 result.timestamps->encode_started_time = base::TimeTicks::Now(); | 205 result.stats->encode_started_time = base::TimeTicks::Now(); |
204 result.frame = encoder->Encode(frame.get(), params); | 206 result.frame = encoder->Encode(frame.get(), params); |
205 result.timestamps->encode_ended_time = base::TimeTicks::Now(); | 207 result.stats->encode_ended_time = base::TimeTicks::Now(); |
| 208 if (frame) { |
| 209 result.stats->capturer_id = frame->capturer_id(); |
| 210 } |
206 return result; | 211 return result; |
207 } | 212 } |
208 | 213 |
209 void WebrtcVideoStream::OnFrameEncoded(EncodedFrameWithTimestamps frame) { | 214 void WebrtcVideoStream::OnFrameEncoded(EncodedFrameWithStats frame) { |
210 DCHECK(thread_checker_.CalledOnValidThread()); | 215 DCHECK(thread_checker_.CalledOnValidThread()); |
211 | 216 |
212 HostFrameStats stats; | 217 HostFrameStats stats; |
213 scheduler_->OnFrameEncoded(frame.frame.get(), &stats); | 218 scheduler_->OnFrameEncoded(frame.frame.get(), &stats); |
214 | 219 |
215 if (!frame.frame) { | 220 if (!frame.frame) { |
216 return; | 221 return; |
217 } | 222 } |
218 | 223 |
219 webrtc::EncodedImageCallback::Result result = | 224 webrtc::EncodedImageCallback::Result result = |
220 webrtc_transport_->video_encoder_factory()->SendEncodedFrame( | 225 webrtc_transport_->video_encoder_factory()->SendEncodedFrame( |
221 *frame.frame, frame.timestamps->capture_started_time); | 226 *frame.frame, frame.stats->capture_started_time); |
222 if (result.error != webrtc::EncodedImageCallback::Result::OK) { | 227 if (result.error != webrtc::EncodedImageCallback::Result::OK) { |
223 // TODO(sergeyu): Stop the stream. | 228 // TODO(sergeyu): Stop the stream. |
224 LOG(ERROR) << "Failed to send video frame."; | 229 LOG(ERROR) << "Failed to send video frame."; |
225 return; | 230 return; |
226 } | 231 } |
227 | 232 |
228 // Send FrameStats message. | 233 // Send FrameStats message. |
229 if (video_stats_dispatcher_.is_connected()) { | 234 if (video_stats_dispatcher_.is_connected()) { |
230 stats.frame_size = frame.frame ? frame.frame->data.size() : 0; | 235 stats.frame_size = frame.frame ? frame.frame->data.size() : 0; |
231 | 236 |
232 if (!frame.timestamps->input_event_timestamps.is_null()) { | 237 if (!frame.stats->input_event_timestamps.is_null()) { |
233 stats.capture_pending_delay = | 238 stats.capture_pending_delay = |
234 frame.timestamps->capture_started_time - | 239 frame.stats->capture_started_time - |
235 frame.timestamps->input_event_timestamps.host_timestamp; | 240 frame.stats->input_event_timestamps.host_timestamp; |
236 stats.latest_event_timestamp = | 241 stats.latest_event_timestamp = |
237 frame.timestamps->input_event_timestamps.client_timestamp; | 242 frame.stats->input_event_timestamps.client_timestamp; |
238 } | 243 } |
239 | 244 |
240 stats.capture_delay = frame.timestamps->capture_delay; | 245 stats.capture_delay = frame.stats->capture_delay; |
241 | 246 |
242 // Total overhead time for IPC and threading when capturing frames. | 247 // Total overhead time for IPC and threading when capturing frames. |
243 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - | 248 stats.capture_overhead_delay = |
244 frame.timestamps->capture_started_time) - | 249 (frame.stats->capture_ended_time - frame.stats->capture_started_time) - |
245 stats.capture_delay; | 250 stats.capture_delay; |
246 | 251 |
247 stats.encode_pending_delay = frame.timestamps->encode_started_time - | 252 stats.encode_pending_delay = |
248 frame.timestamps->capture_ended_time; | 253 frame.stats->encode_started_time - frame.stats->capture_ended_time; |
249 | 254 |
250 stats.encode_delay = frame.timestamps->encode_ended_time - | 255 stats.encode_delay = |
251 frame.timestamps->encode_started_time; | 256 frame.stats->encode_ended_time - frame.stats->encode_started_time; |
| 257 |
| 258 stats.capturer_id = frame.stats->capturer_id; |
252 | 259 |
253 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); | 260 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); |
254 } | 261 } |
255 } | 262 } |
256 | 263 |
257 } // namespace protocol | 264 } // namespace protocol |
258 } // namespace remoting | 265 } // namespace remoting |
OLD | NEW |