Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(372)

Side by Side Diff: remoting/protocol/webrtc_video_stream.cc

Issue 2616213002: Fix WebrtcVideoStream to handle failed capture requests. (Closed)
Patch Set: . Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « remoting/protocol/webrtc_video_stream.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "remoting/protocol/webrtc_video_stream.h" 5 #include "remoting/protocol/webrtc_video_stream.h"
6 6
7 #include "base/logging.h" 7 #include "base/logging.h"
8 #include "base/single_thread_task_runner.h" 8 #include "base/single_thread_task_runner.h"
9 #include "base/task_runner_util.h" 9 #include "base/task_runner_util.h"
10 #include "base/threading/thread_task_runner_handle.h" 10 #include "base/threading/thread_task_runner_handle.h"
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
132 void WebrtcVideoStream::SetObserver(Observer* observer) { 132 void WebrtcVideoStream::SetObserver(Observer* observer) {
133 DCHECK(thread_checker_.CalledOnValidThread()); 133 DCHECK(thread_checker_.CalledOnValidThread());
134 observer_ = observer; 134 observer_ = observer;
135 } 135 }
136 136
137 void WebrtcVideoStream::OnCaptureResult( 137 void WebrtcVideoStream::OnCaptureResult(
138 webrtc::DesktopCapturer::Result result, 138 webrtc::DesktopCapturer::Result result,
139 std::unique_ptr<webrtc::DesktopFrame> frame) { 139 std::unique_ptr<webrtc::DesktopFrame> frame) {
140 DCHECK(thread_checker_.CalledOnValidThread()); 140 DCHECK(thread_checker_.CalledOnValidThread());
141 141
142 // TODO(sergeyu): Handle ERROR_PERMANENT result here. 142 captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now();
143 captured_frame_timestamps_->capture_delay =
144 base::TimeDelta::FromMilliseconds(frame ? frame->capture_time_ms() : 0);
143 145
144 webrtc::DesktopVector dpi = 146 WebrtcVideoEncoder::FrameParams frame_params;
145 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi) 147 if (!scheduler_->OnFrameCaptured(frame.get(), &frame_params)) {
146 : frame->dpi(); 148 return;
147
148 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) {
149 frame_size_ = frame->size();
150 frame_dpi_ = dpi;
151 if (observer_)
152 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_);
153 } 149 }
154 150
155 captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now(); 151 // TODO(sergeyu): Handle ERROR_PERMANENT result here.
156 captured_frame_timestamps_->capture_delay = 152 if (frame) {
157 base::TimeDelta::FromMilliseconds(frame->capture_time_ms()); 153 webrtc::DesktopVector dpi =
154 frame->dpi().is_zero() ? webrtc::DesktopVector(kDefaultDpi, kDefaultDpi)
155 : frame->dpi();
158 156
159 WebrtcVideoEncoder::FrameParams frame_params; 157 if (!frame_size_.equals(frame->size()) || !frame_dpi_.equals(dpi)) {
160 if (!scheduler_->GetEncoderFrameParams(*frame, &frame_params)) 158 frame_size_ = frame->size();
161 return; 159 frame_dpi_ = dpi;
160 if (observer_)
161 observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_);
162 }
163 } else {
164 // Save event timestamps to be used for the next frame.
165 next_frame_input_event_timestamps_ =
166 captured_frame_timestamps_->input_event_timestamps;
167 captured_frame_timestamps_->input_event_timestamps = InputEventTimestamps();
168 }
162 169
163 base::PostTaskAndReplyWithResult( 170 base::PostTaskAndReplyWithResult(
164 encode_task_runner_.get(), FROM_HERE, 171 encode_task_runner_.get(), FROM_HERE,
165 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(), 172 base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(),
166 base::Passed(std::move(frame)), frame_params, 173 base::Passed(std::move(frame)), frame_params,
167 base::Passed(std::move(captured_frame_timestamps_))), 174 base::Passed(std::move(captured_frame_timestamps_))),
168 base::Bind(&WebrtcVideoStream::OnFrameEncoded, 175 base::Bind(&WebrtcVideoStream::OnFrameEncoded,
169 weak_factory_.GetWeakPtr())); 176 weak_factory_.GetWeakPtr()));
170 } 177 }
171 178
172 void WebrtcVideoStream::OnChannelInitialized( 179 void WebrtcVideoStream::OnChannelInitialized(
173 ChannelDispatcherBase* channel_dispatcher) { 180 ChannelDispatcherBase* channel_dispatcher) {
174 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); 181 DCHECK(&video_stats_dispatcher_ == channel_dispatcher);
175 } 182 }
176 void WebrtcVideoStream::OnChannelClosed( 183 void WebrtcVideoStream::OnChannelClosed(
177 ChannelDispatcherBase* channel_dispatcher) { 184 ChannelDispatcherBase* channel_dispatcher) {
178 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); 185 DCHECK(&video_stats_dispatcher_ == channel_dispatcher);
179 LOG(WARNING) << "video_stats channel was closed."; 186 LOG(WARNING) << "video_stats channel was closed.";
180 } 187 }
181 188
182 void WebrtcVideoStream::CaptureNextFrame() { 189 void WebrtcVideoStream::CaptureNextFrame() {
183 DCHECK(thread_checker_.CalledOnValidThread()); 190 DCHECK(thread_checker_.CalledOnValidThread());
184 191
185 captured_frame_timestamps_.reset(new FrameTimestamps()); 192 captured_frame_timestamps_.reset(new FrameTimestamps());
186 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now(); 193 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now();
187 194
188 if (event_timestamps_source_) { 195 if (!next_frame_input_event_timestamps_.is_null()) {
196 captured_frame_timestamps_->input_event_timestamps =
197 next_frame_input_event_timestamps_;
198 next_frame_input_event_timestamps_ = InputEventTimestamps();
199 } else if (event_timestamps_source_) {
189 captured_frame_timestamps_->input_event_timestamps = 200 captured_frame_timestamps_->input_event_timestamps =
190 event_timestamps_source_->TakeLastEventTimestamps(); 201 event_timestamps_source_->TakeLastEventTimestamps();
191 } 202 }
192 203
193 capturer_->CaptureFrame(); 204 capturer_->CaptureFrame();
194 } 205 }
195 206
196 // static 207 // static
197 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( 208 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame(
198 WebrtcVideoEncoder* encoder, 209 WebrtcVideoEncoder* encoder,
199 std::unique_ptr<webrtc::DesktopFrame> frame, 210 std::unique_ptr<webrtc::DesktopFrame> frame,
200 WebrtcVideoEncoder::FrameParams params, 211 WebrtcVideoEncoder::FrameParams params,
201 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { 212 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) {
202 EncodedFrameWithTimestamps result; 213 EncodedFrameWithTimestamps result;
203 result.timestamps = std::move(timestamps); 214 result.timestamps = std::move(timestamps);
204 result.timestamps->encode_started_time = base::TimeTicks::Now(); 215 result.timestamps->encode_started_time = base::TimeTicks::Now();
205 result.frame = encoder->Encode(*frame, params); 216 result.frame = encoder->Encode(frame.get(), params);
206 result.timestamps->encode_ended_time = base::TimeTicks::Now(); 217 result.timestamps->encode_ended_time = base::TimeTicks::Now();
207 return result; 218 return result;
208 } 219 }
209 220
210 void WebrtcVideoStream::OnFrameEncoded(EncodedFrameWithTimestamps frame) { 221 void WebrtcVideoStream::OnFrameEncoded(EncodedFrameWithTimestamps frame) {
211 DCHECK(thread_checker_.CalledOnValidThread()); 222 DCHECK(thread_checker_.CalledOnValidThread());
212 223
224 HostFrameStats stats;
225 scheduler_->OnFrameEncoded(frame.frame.get(), &stats);
226
227 if (!frame.frame) {
228 return;
229 }
230
213 webrtc::EncodedImageCallback::Result result = 231 webrtc::EncodedImageCallback::Result result =
214 webrtc_transport_->video_encoder_factory()->SendEncodedFrame( 232 webrtc_transport_->video_encoder_factory()->SendEncodedFrame(
215 *frame.frame, frame.timestamps->capture_started_time); 233 *frame.frame, frame.timestamps->capture_started_time);
216 if (result.error != webrtc::EncodedImageCallback::Result::OK) { 234 if (result.error != webrtc::EncodedImageCallback::Result::OK) {
217 // TODO(sergeyu): Stop the stream. 235 // TODO(sergeyu): Stop the stream.
218 LOG(ERROR) << "Failed to send video frame."; 236 LOG(ERROR) << "Failed to send video frame.";
219 return; 237 return;
220 } 238 }
221 239
222 HostFrameStats stats;
223 scheduler_->OnFrameEncoded(*frame.frame, result, &stats);
224
225 // Send FrameStats message. 240 // Send FrameStats message.
226 if (video_stats_dispatcher_.is_connected()) { 241 if (video_stats_dispatcher_.is_connected()) {
227 stats.frame_size = frame.frame->data.size(); 242 stats.frame_size = frame.frame ? frame.frame->data.size() : 0;
228 243
229 if (!frame.timestamps->input_event_timestamps.is_null()) { 244 if (!frame.timestamps->input_event_timestamps.is_null()) {
230 stats.capture_pending_delay = 245 stats.capture_pending_delay =
231 frame.timestamps->capture_started_time - 246 frame.timestamps->capture_started_time -
232 frame.timestamps->input_event_timestamps.host_timestamp; 247 frame.timestamps->input_event_timestamps.host_timestamp;
233 stats.latest_event_timestamp = 248 stats.latest_event_timestamp =
234 frame.timestamps->input_event_timestamps.client_timestamp; 249 frame.timestamps->input_event_timestamps.client_timestamp;
235 } 250 }
236 251
237 stats.capture_delay = frame.timestamps->capture_delay; 252 stats.capture_delay = frame.timestamps->capture_delay;
238 253
239 // Total overhead time for IPC and threading when capturing frames. 254 // Total overhead time for IPC and threading when capturing frames.
240 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - 255 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time -
241 frame.timestamps->capture_started_time) - 256 frame.timestamps->capture_started_time) -
242 stats.capture_delay; 257 stats.capture_delay;
243 258
244 stats.encode_pending_delay = frame.timestamps->encode_started_time - 259 stats.encode_pending_delay = frame.timestamps->encode_started_time -
245 frame.timestamps->capture_ended_time; 260 frame.timestamps->capture_ended_time;
246 261
247 stats.encode_delay = frame.timestamps->encode_ended_time - 262 stats.encode_delay = frame.timestamps->encode_ended_time -
248 frame.timestamps->encode_started_time; 263 frame.timestamps->encode_started_time;
249 264
250 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); 265 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats);
251 } 266 }
252 } 267 }
253 268
254 } // namespace protocol 269 } // namespace protocol
255 } // namespace remoting 270 } // namespace remoting
OLDNEW
« no previous file with comments | « remoting/protocol/webrtc_video_stream.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698