Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(781)

Side by Side Diff: remoting/protocol/webrtc_video_stream.cc

Issue 2200273003: Enable video stats reporting when using WebRTC (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: TODO Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "remoting/protocol/webrtc_video_stream.h" 5 #include "remoting/protocol/webrtc_video_stream.h"
6 6
7 #include "base/logging.h" 7 #include "base/logging.h"
8 #include "base/single_thread_task_runner.h" 8 #include "base/single_thread_task_runner.h"
9 #include "base/task_runner_util.h" 9 #include "base/task_runner_util.h"
10 #include "base/threading/thread_task_runner_handle.h" 10 #include "base/threading/thread_task_runner_handle.h"
11 #include "remoting/base/constants.h" 11 #include "remoting/base/constants.h"
12 #include "remoting/proto/video.pb.h" 12 #include "remoting/proto/video.pb.h"
13 #include "remoting/protocol/frame_stats.h"
14 #include "remoting/protocol/host_video_stats_dispatcher.h"
13 #include "remoting/protocol/webrtc_dummy_video_capturer.h" 15 #include "remoting/protocol/webrtc_dummy_video_capturer.h"
14 #include "remoting/protocol/webrtc_transport.h" 16 #include "remoting/protocol/webrtc_transport.h"
15 #include "third_party/webrtc/api/mediastreaminterface.h" 17 #include "third_party/webrtc/api/mediastreaminterface.h"
16 #include "third_party/webrtc/api/peerconnectioninterface.h" 18 #include "third_party/webrtc/api/peerconnectioninterface.h"
17 #include "third_party/webrtc/api/test/fakeconstraints.h" 19 #include "third_party/webrtc/api/test/fakeconstraints.h"
18 #include "third_party/webrtc/media/base/videocapturer.h" 20 #include "third_party/webrtc/media/base/videocapturer.h"
19 21
20 namespace remoting { 22 namespace remoting {
21 namespace protocol { 23 namespace protocol {
22 24
23 namespace { 25 namespace {
24 26
25 // Task running on the encoder thread to encode the |frame|.
26 std::unique_ptr<VideoPacket> EncodeFrame(
27 VideoEncoder* encoder,
28 std::unique_ptr<webrtc::DesktopFrame> frame,
29 uint32_t target_bitrate_kbps,
30 bool key_frame_request,
31 int64_t capture_time_ms) {
32 uint32_t flags = 0;
33 if (key_frame_request)
34 flags |= VideoEncoder::REQUEST_KEY_FRAME;
35
36 base::TimeTicks current = base::TimeTicks::Now();
37 encoder->UpdateTargetBitrate(target_bitrate_kbps);
38 std::unique_ptr<VideoPacket> packet = encoder->Encode(*frame, flags);
39 if (!packet)
40 return nullptr;
41 // TODO(isheriff): Note that while VideoPacket capture time is supposed
42 // to be capture duration, we (ab)use it for capture timestamp here. This
43 // will go away when we move away from VideoPacket.
44 packet->set_capture_time_ms(capture_time_ms);
45
46 VLOG(1) << "Encode duration "
47 << (base::TimeTicks::Now() - current).InMilliseconds()
48 << " payload size " << packet->data().size();
49 return packet;
50 }
51
52 void PostTaskOnTaskRunner( 27 void PostTaskOnTaskRunner(
53 scoped_refptr<base::SingleThreadTaskRunner> task_runner, 28 scoped_refptr<base::SingleThreadTaskRunner> task_runner,
54 const base::Closure& task) { 29 const base::Closure& task) {
55 task_runner->PostTask(FROM_HERE, task); 30 task_runner->PostTask(FROM_HERE, task);
56 } 31 }
57 32
58 template <typename ParamType> 33 template <typename ParamType>
59 void PostTaskOnTaskRunnerWithParam( 34 void PostTaskOnTaskRunnerWithParam(
60 scoped_refptr<base::SingleThreadTaskRunner> task_runner, 35 scoped_refptr<base::SingleThreadTaskRunner> task_runner,
61 const base::Callback<void(ParamType param)>& task, 36 const base::Callback<void(ParamType param)>& task,
62 ParamType param) { 37 ParamType param) {
63 task_runner->PostTask(FROM_HERE, base::Bind(task, param)); 38 task_runner->PostTask(FROM_HERE, base::Bind(task, param));
64 } 39 }
65 40
66 } // namespace 41 } // namespace
67 42
68 const char kStreamLabel[] = "screen_stream"; 43 const char kStreamLabel[] = "screen_stream";
69 const char kVideoLabel[] = "screen_video"; 44 const char kVideoLabel[] = "screen_video";
70 45
46 struct WebrtcVideoStream::FrameTimestamps {
47 // The following two fields are set only for one frame after each incoming
48 // input event. |input_event_client_timestamp| is event timestamp
49 // received from the client. |input_event_received_time| is local time when
50 // the event was received.
51 int64_t input_event_client_timestamp = -1;
52 base::TimeTicks input_event_received_time;
53
54 base::TimeTicks capture_started_time;
55 base::TimeTicks capture_ended_time;
56 base::TimeDelta capture_delay;
57 base::TimeTicks encode_started_time;
58 base::TimeTicks encode_ended_time;
59 base::TimeTicks can_send_time;
Irfan 2016/08/09 17:00:37 what is this for ?
Sergey Ulanov 2016/08/10 18:07:44 It wasn't used, removed now.
60 };
61
62 struct WebrtcVideoStream::PacketWithTimestamps {
63 std::unique_ptr<VideoPacket> packet;
64 std::unique_ptr<FrameTimestamps> timestamps;
Irfan 2016/08/09 17:00:37 It would be nice to have a consistency on the nami
Sergey Ulanov 2016/08/10 18:07:45 packet = encoded frame Calling it VideoFrame would
65 };
66
71 WebrtcVideoStream::WebrtcVideoStream() 67 WebrtcVideoStream::WebrtcVideoStream()
72 : main_task_runner_(base::ThreadTaskRunnerHandle::Get()), 68 : video_stats_dispatcher_(kStreamLabel), weak_factory_(this) {}
73 weak_factory_(this) {}
74 69
75 WebrtcVideoStream::~WebrtcVideoStream() { 70 WebrtcVideoStream::~WebrtcVideoStream() {
76 if (stream_) { 71 if (stream_) {
77 for (const auto& track : stream_->GetVideoTracks()) { 72 for (const auto& track : stream_->GetVideoTracks()) {
78 track->GetSource()->Stop(); 73 track->GetSource()->Stop();
79 stream_->RemoveTrack(track.get()); 74 stream_->RemoveTrack(track.get());
80 } 75 }
81 peer_connection_->RemoveStream(stream_.get()); 76 peer_connection_->RemoveStream(stream_.get());
82 } 77 }
83 encode_task_runner_->DeleteSoon(FROM_HERE, encoder_.release()); 78 encode_task_runner_->DeleteSoon(FROM_HERE, encoder_.release());
(...skipping 13 matching lines...) Expand all
97 scoped_refptr<webrtc::PeerConnectionFactoryInterface> peer_connection_factory( 92 scoped_refptr<webrtc::PeerConnectionFactoryInterface> peer_connection_factory(
98 webrtc_transport->peer_connection_factory()); 93 webrtc_transport->peer_connection_factory());
99 peer_connection_ = webrtc_transport->peer_connection(); 94 peer_connection_ = webrtc_transport->peer_connection();
100 DCHECK(peer_connection_factory); 95 DCHECK(peer_connection_factory);
101 DCHECK(peer_connection_); 96 DCHECK(peer_connection_);
102 97
103 encode_task_runner_ = encode_task_runner; 98 encode_task_runner_ = encode_task_runner;
104 capturer_ = std::move(desktop_capturer); 99 capturer_ = std::move(desktop_capturer);
105 webrtc_transport_ = webrtc_transport; 100 webrtc_transport_ = webrtc_transport;
106 encoder_ = std::move(video_encoder); 101 encoder_ = std::move(video_encoder);
107 capture_timer_.reset(new base::RepeatingTimer());
108
109 capturer_->Start(this); 102 capturer_->Start(this);
110 103
111 // Set video stream constraints. 104 // Set video stream constraints.
112 webrtc::FakeConstraints video_constraints; 105 webrtc::FakeConstraints video_constraints;
113 video_constraints.AddMandatory( 106 video_constraints.AddMandatory(
114 webrtc::MediaConstraintsInterface::kMinFrameRate, 5); 107 webrtc::MediaConstraintsInterface::kMinFrameRate, 5);
115 108
116 rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> src = 109 rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> src =
117 peer_connection_factory->CreateVideoSource(new WebrtcDummyVideoCapturer(), 110 peer_connection_factory->CreateVideoSource(new WebrtcDummyVideoCapturer(),
118 &video_constraints); 111 &video_constraints);
119 rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track = 112 rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track =
120 peer_connection_factory->CreateVideoTrack(kVideoLabel, src); 113 peer_connection_factory->CreateVideoTrack(kVideoLabel, src);
121 114
122 stream_ = peer_connection_factory->CreateLocalMediaStream(kStreamLabel); 115 stream_ = peer_connection_factory->CreateLocalMediaStream(kStreamLabel);
123 116
124 if (!stream_->AddTrack(video_track.get()) || 117 if (!stream_->AddTrack(video_track.get()) ||
125 !peer_connection_->AddStream(stream_.get())) { 118 !peer_connection_->AddStream(stream_.get())) {
126 stream_ = nullptr; 119 stream_ = nullptr;
127 peer_connection_ = nullptr; 120 peer_connection_ = nullptr;
128 return false; 121 return false;
129 } 122 }
130 123
131 // Register for PLI requests. 124 // Register for PLI requests.
132 webrtc_transport_->video_encoder_factory()->SetKeyFrameRequestCallback( 125 webrtc_transport_->video_encoder_factory()->SetKeyFrameRequestCallback(
133 base::Bind(&PostTaskOnTaskRunner, main_task_runner_, 126 base::Bind(&PostTaskOnTaskRunner, base::ThreadTaskRunnerHandle::Get(),
134 base::Bind(&WebrtcVideoStream::SetKeyFrameRequest, 127 base::Bind(&WebrtcVideoStream::SetKeyFrameRequest,
135 weak_factory_.GetWeakPtr()))); 128 weak_factory_.GetWeakPtr())));
136 129
137 // Register for target bitrate notifications. 130 // Register for target bitrate notifications.
138 webrtc_transport_->video_encoder_factory()->SetTargetBitrateCallback( 131 webrtc_transport_->video_encoder_factory()->SetTargetBitrateCallback(
139 base::Bind(&PostTaskOnTaskRunnerWithParam<int>, main_task_runner_, 132 base::Bind(&PostTaskOnTaskRunnerWithParam<int>,
133 base::ThreadTaskRunnerHandle::Get(),
140 base::Bind(&WebrtcVideoStream::SetTargetBitrate, 134 base::Bind(&WebrtcVideoStream::SetTargetBitrate,
141 weak_factory_.GetWeakPtr()))); 135 weak_factory_.GetWeakPtr())));
142 136
137 video_stats_dispatcher_.Init(webrtc_transport_->CreateOutgoingChannel(
138 video_stats_dispatcher_.channel_name()),
139 this);
143 return true; 140 return true;
144 } 141 }
145 142
146 void WebrtcVideoStream::Pause(bool pause) { 143 void WebrtcVideoStream::Pause(bool pause) {
147 DCHECK(thread_checker_.CalledOnValidThread()); 144 DCHECK(thread_checker_.CalledOnValidThread());
148 if (pause) { 145 if (pause) {
149 capture_timer_->Stop(); 146 capture_timer_.Stop();
150 } else { 147 } else {
151 if (received_first_frame_request_) { 148 if (received_first_frame_request_) {
152 StartCaptureTimer(); 149 StartCaptureTimer();
153 } 150 }
154 } 151 }
155 } 152 }
156 153
157 void WebrtcVideoStream::OnInputEventReceived(int64_t event_timestamp) { 154 void WebrtcVideoStream::OnInputEventReceived(int64_t event_timestamp) {
158 NOTIMPLEMENTED(); 155 DCHECK(thread_checker_.CalledOnValidThread());
156
157 if (!next_frame_timestamps_)
158 next_frame_timestamps_.reset(new FrameTimestamps());
159 next_frame_timestamps_->input_event_client_timestamp = event_timestamp;
160 next_frame_timestamps_->input_event_received_time = base::TimeTicks::Now();
159 } 161 }
160 162
161 void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) { 163 void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) {
162 NOTIMPLEMENTED(); 164 NOTIMPLEMENTED();
163 } 165 }
164 166
165 void WebrtcVideoStream::SetLosslessColor(bool want_lossless) { 167 void WebrtcVideoStream::SetLosslessColor(bool want_lossless) {
166 NOTIMPLEMENTED(); 168 NOTIMPLEMENTED();
167 } 169 }
168 170
169 void WebrtcVideoStream::SetObserver(Observer* observer) { 171 void WebrtcVideoStream::SetObserver(Observer* observer) {
170 DCHECK(thread_checker_.CalledOnValidThread()); 172 DCHECK(thread_checker_.CalledOnValidThread());
171 observer_ = observer; 173 observer_ = observer;
172 } 174 }
173 175
174 void WebrtcVideoStream::SetKeyFrameRequest() { 176 void WebrtcVideoStream::SetKeyFrameRequest() {
175 DCHECK(thread_checker_.CalledOnValidThread()); 177 DCHECK(thread_checker_.CalledOnValidThread());
176 178
177 key_frame_request_ = true; 179 key_frame_request_ = true;
178 if (!received_first_frame_request_) { 180 if (!received_first_frame_request_) {
179 received_first_frame_request_ = true; 181 received_first_frame_request_ = true;
180 StartCaptureTimer(); 182 StartCaptureTimer();
181 main_task_runner_->PostTask( 183 base::ThreadTaskRunnerHandle::Get()->PostTask(
182 FROM_HERE, base::Bind(&WebrtcVideoStream::StartCaptureTimer, 184 FROM_HERE, base::Bind(&WebrtcVideoStream::StartCaptureTimer,
183 weak_factory_.GetWeakPtr())); 185 weak_factory_.GetWeakPtr()));
184 } 186 }
185 } 187 }
186 188
187 void WebrtcVideoStream::StartCaptureTimer() { 189 void WebrtcVideoStream::StartCaptureTimer() {
188 DCHECK(thread_checker_.CalledOnValidThread()); 190 DCHECK(thread_checker_.CalledOnValidThread());
189 capture_timer_->Start(FROM_HERE, base::TimeDelta::FromSeconds(1) / 30, this, 191 capture_timer_.Start(FROM_HERE, base::TimeDelta::FromSeconds(1) / 30, this,
190 &WebrtcVideoStream::CaptureNextFrame); 192 &WebrtcVideoStream::CaptureNextFrame);
191 } 193 }
192 194
193 void WebrtcVideoStream::SetTargetBitrate(int target_bitrate_kbps) { 195 void WebrtcVideoStream::SetTargetBitrate(int target_bitrate_kbps) {
194 DCHECK(thread_checker_.CalledOnValidThread()); 196 DCHECK(thread_checker_.CalledOnValidThread());
195 197
196 VLOG(1) << "Set Target bitrate " << target_bitrate_kbps; 198 VLOG(1) << "Set Target bitrate " << target_bitrate_kbps;
197 target_bitrate_kbps_ = target_bitrate_kbps; 199 target_bitrate_kbps_ = target_bitrate_kbps;
198 } 200 }
199 201
200 bool WebrtcVideoStream::ClearAndGetKeyFrameRequest() { 202 bool WebrtcVideoStream::ClearAndGetKeyFrameRequest() {
201 DCHECK(thread_checker_.CalledOnValidThread()); 203 DCHECK(thread_checker_.CalledOnValidThread());
202 204
203 bool key_frame_request = key_frame_request_; 205 bool key_frame_request = key_frame_request_;
204 key_frame_request_ = false; 206 key_frame_request_ = false;
205 return key_frame_request; 207 return key_frame_request;
206 } 208 }
207 209
208 void WebrtcVideoStream::OnCaptureResult( 210 void WebrtcVideoStream::OnCaptureResult(
209 webrtc::DesktopCapturer::Result result, 211 webrtc::DesktopCapturer::Result result,
210 std::unique_ptr<webrtc::DesktopFrame> frame) { 212 std::unique_ptr<webrtc::DesktopFrame> frame) {
211 DCHECK(thread_checker_.CalledOnValidThread()); 213 DCHECK(thread_checker_.CalledOnValidThread());
214 DCHECK(capture_pending_);
215 capture_pending_ = false;
212 216
213 base::TimeTicks captured_ticks = base::TimeTicks::Now();
214 int64_t capture_timestamp_ms =
215 (captured_ticks - base::TimeTicks()).InMilliseconds();
216 capture_pending_ = false;
217 217
218 if (encode_pending_) { 218 if (encode_pending_) {
219 // TODO(isheriff): consider queuing here 219 // TODO(isheriff): consider queuing here
220 VLOG(1) << "Dropping captured frame since encoder is still busy"; 220 VLOG(1) << "Dropping captured frame since encoder is still busy";
221 return; 221 return;
222 } 222 }
223 223
224 // TODO(sergeyu): Handle ERROR_PERMANENT result here. 224 // TODO(sergeyu): Handle ERROR_PERMANENT result here.
225 225
226 webrtc::DesktopVector dpi = 226 webrtc::DesktopVector dpi =
227 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi) 227 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi)
228 : frame->dpi(); 228 : frame->dpi();
229 229
230 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) { 230 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) {
231 frame_size_ = frame->size(); 231 frame_size_ = frame->size();
232 frame_dpi_ = dpi; 232 frame_dpi_ = dpi;
233 if (observer_) 233 if (observer_)
234 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_); 234 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_);
235 } 235 }
236
237 captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now();
238 captured_frame_timestamps_->capture_delay =
239 base::TimeDelta::FromMilliseconds(frame->capture_time_ms());
240
236 encode_pending_ = true; 241 encode_pending_ = true;
237 base::PostTaskAndReplyWithResult( 242 base::PostTaskAndReplyWithResult(
238 encode_task_runner_.get(), FROM_HERE, 243 encode_task_runner_.get(), FROM_HERE,
239 base::Bind(&EncodeFrame, encoder_.get(), base::Passed(std::move(frame)), 244 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(),
240 target_bitrate_kbps_, ClearAndGetKeyFrameRequest(), 245 base::Passed(std::move(frame)),
241 capture_timestamp_ms), 246 base::Passed(std::move(captured_frame_timestamps_)),
247 target_bitrate_kbps_, ClearAndGetKeyFrameRequest()),
242 base::Bind(&WebrtcVideoStream::OnFrameEncoded, 248 base::Bind(&WebrtcVideoStream::OnFrameEncoded,
243 weak_factory_.GetWeakPtr())); 249 weak_factory_.GetWeakPtr()));
244 } 250 }
245 251
252 void WebrtcVideoStream::OnChannelInitialized(
253 ChannelDispatcherBase* channel_dispatcher) {
254 DCHECK(&video_stats_dispatcher_ == channel_dispatcher);
Irfan 2016/08/09 17:00:37 Are these callbacks helpful ? It sounds like we al
Sergey Ulanov 2016/08/10 18:07:45 OnChannelClosed() is useful if only to log the war
255 }
256 void WebrtcVideoStream::OnChannelClosed(
257 ChannelDispatcherBase* channel_dispatcher) {
258 DCHECK(&video_stats_dispatcher_ == channel_dispatcher);
259 LOG(WARNING) << "video_stats channel was closing";
260 }
261
246 void WebrtcVideoStream::CaptureNextFrame() { 262 void WebrtcVideoStream::CaptureNextFrame() {
247 DCHECK(thread_checker_.CalledOnValidThread()); 263 DCHECK(thread_checker_.CalledOnValidThread());
248 264
249 if (capture_pending_ || encode_pending_) { 265 if (capture_pending_ || encode_pending_) {
250 VLOG(1) << "Capture/encode still pending.."; 266 VLOG(1) << "Capture/encode still pending..";
251 return; 267 return;
252 } 268 }
253 269
270 base::TimeTicks now = base::TimeTicks::Now();
271
254 capture_pending_ = true; 272 capture_pending_ = true;
255 VLOG(1) << "Capture next frame after " 273 VLOG(1) << "Capture next frame after "
256 << (base::TimeTicks::Now() - last_capture_started_ticks_) 274 << (base::TimeTicks::Now() - last_capture_started_ticks_)
257 .InMilliseconds(); 275 .InMilliseconds();
258 last_capture_started_ticks_ = base::TimeTicks::Now(); 276 last_capture_started_ticks_ = now;
277
278
279 // |next_frame_timestamps_| is not set if no input events were received since
280 // the previous frame. In that case create FrameTimestamps instance without
281 // setting |input_event_client_timestamp| and |input_event_received_time|.
282 if (!next_frame_timestamps_)
283 next_frame_timestamps_.reset(new FrameTimestamps());
284
285 captured_frame_timestamps_ = std::move(next_frame_timestamps_);
286 captured_frame_timestamps_->capture_started_time = now;
287
259 capturer_->Capture(webrtc::DesktopRegion()); 288 capturer_->Capture(webrtc::DesktopRegion());
260 } 289 }
261 290
262 void WebrtcVideoStream::OnFrameEncoded(std::unique_ptr<VideoPacket> packet) { 291 // static
292 WebrtcVideoStream::PacketWithTimestamps WebrtcVideoStream::EncodeFrame(
293 VideoEncoder* encoder,
294 std::unique_ptr<webrtc::DesktopFrame> frame,
295 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps,
296 uint32_t target_bitrate_kbps,
297 bool key_frame_request) {
298 PacketWithTimestamps result;
299 result.timestamps = std::move(timestamps);
300 result.timestamps->encode_started_time = base::TimeTicks::Now();
301
302 encoder->UpdateTargetBitrate(target_bitrate_kbps);
303 result.packet = encoder->Encode(
304 *frame, key_frame_request ? VideoEncoder::REQUEST_KEY_FRAME : 0);
305
306 result.timestamps->encode_ended_time = base::TimeTicks::Now();
307
308 return result;
309 }
310
311 void WebrtcVideoStream::OnFrameEncoded(PacketWithTimestamps packet) {
263 DCHECK(thread_checker_.CalledOnValidThread()); 312 DCHECK(thread_checker_.CalledOnValidThread());
264 313
265 encode_pending_ = false; 314 encode_pending_ = false;
266 if (!packet) 315
267 return; 316 size_t packet_size = packet.packet ? packet.packet->data().size() : 0;
268 base::TimeTicks current = base::TimeTicks::Now(); 317
269 float encoded_bits = packet->data().size() * 8.0; 318 // Generate HostFrameStats.
319 HostFrameStats stats;
320 stats.frame_size = packet_size;
321 stats.latest_event_timestamp = base::TimeTicks::FromInternalValue(
322 packet.timestamps->input_event_client_timestamp);
323
324 if (!packet.timestamps->input_event_received_time.is_null()) {
325 stats.capture_pending_delay = packet.timestamps->capture_started_time -
Irfan 2016/08/09 17:00:37 update capture_pending_delay when there is no inpu
Sergey Ulanov 2016/08/10 18:07:45 capture_pending_delay is defined as the delay betw
326 packet.timestamps->input_event_received_time;
327 stats.latest_event_timestamp = base::TimeTicks::FromInternalValue(
Irfan 2016/08/09 17:00:37 This is duplicated above
Sergey Ulanov 2016/08/10 18:07:45 Removed it from above.
328 packet.timestamps->input_event_client_timestamp);
329 }
330
331 stats.capture_delay = packet.timestamps->capture_delay;
332 stats.capture_overhead_delay = (packet.timestamps->capture_ended_time -
Irfan 2016/08/09 17:00:37 Will be helpful to comment what capture_overhead_d
Sergey Ulanov 2016/08/10 18:07:45 Done.
333 packet.timestamps->capture_started_time) -
334 stats.capture_delay;
335
336 stats.encode_pending_delay = packet.timestamps->encode_started_time -
337 packet.timestamps->capture_ended_time;
338
339 stats.encode_delay = packet.timestamps->encode_ended_time -
340 packet.timestamps->encode_started_time;
341
342 // TODO(sergeyu): Figure out how to measure send_pending time with WebRTC and
343 // set it here.
344 stats.send_pending_delay = base::TimeDelta();
Irfan 2016/08/09 17:00:37 what is send_pending_delay ? How long webrtc waits
Sergey Ulanov 2016/08/10 18:07:45 It may wait if the are other frames in the queue.
345
346 uint32_t frame_id = 0;
347 if (packet.packet) {
348 // Send the frame itself.
349 webrtc::EncodedImageCallback::Result result =
350 webrtc_transport_->video_encoder_factory()->SendEncodedFrame(
351 std::move(packet.packet), packet.timestamps->capture_started_time);
352 if (result.error != webrtc::EncodedImageCallback::Result::OK) {
353 // TODO(sergeyu): Stop the stream.
354 LOG(ERROR) << "Failed to send video frame.";
355 return;
356 }
357 frame_id = result.frame_id;
358 }
359
360 // Send FrameStats message.
361 if (video_stats_dispatcher_.is_connected())
362 video_stats_dispatcher_.OnVideoFrameStats(frame_id, stats);
270 363
271 // Simplistic adaptation of frame polling in the range 5 FPS to 30 FPS. 364 // Simplistic adaptation of frame polling in the range 5 FPS to 30 FPS.
365 // TODO(sergeyu): Move this logic to a separate class.
366 float encoded_bits = packet_size * 8.0;
272 uint32_t next_sched_ms = std::max( 367 uint32_t next_sched_ms = std::max(
273 33, std::min(static_cast<int>(encoded_bits / target_bitrate_kbps_), 200)); 368 33, std::min(static_cast<int>(encoded_bits / target_bitrate_kbps_), 200));
274 if (webrtc_transport_->video_encoder_factory()->SendEncodedFrame( 369 capture_timer_.Start(FROM_HERE,
275 std::move(packet)) >= 0) { 370 base::TimeDelta::FromMilliseconds(next_sched_ms), this,
276 VLOG(1) << "Send duration " 371 &WebrtcVideoStream::CaptureNextFrame);
277 << (base::TimeTicks::Now() - current).InMilliseconds()
278 << ", next sched " << next_sched_ms;
279 } else {
280 LOG(ERROR) << "SendEncodedFrame() failed";
281 }
282 capture_timer_->Start(FROM_HERE,
283 base::TimeDelta::FromMilliseconds(next_sched_ms), this,
284 &WebrtcVideoStream::CaptureNextFrame);
285 } 372 }
286 373
287 } // namespace protocol 374 } // namespace protocol
288 } // namespace remoting 375 } // namespace remoting
OLDNEW
« remoting/protocol/webrtc_video_renderer_adapter.cc ('K') | « remoting/protocol/webrtc_video_stream.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698