| OLD | NEW |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "remoting/protocol/webrtc_video_stream.h" | 5 #include "remoting/protocol/webrtc_video_stream.h" |
| 6 | 6 |
| 7 #include "base/logging.h" | 7 #include "base/logging.h" |
| 8 #include "base/single_thread_task_runner.h" | 8 #include "base/single_thread_task_runner.h" |
| 9 #include "base/task_runner_util.h" | 9 #include "base/task_runner_util.h" |
| 10 #include "base/threading/thread_task_runner_handle.h" | 10 #include "base/threading/thread_task_runner_handle.h" |
| (...skipping 27 matching lines...) Expand all Loading... |
| 38 ParamType param) { | 38 ParamType param) { |
| 39 task_runner->PostTask(FROM_HERE, base::Bind(task, param)); | 39 task_runner->PostTask(FROM_HERE, base::Bind(task, param)); |
| 40 } | 40 } |
| 41 | 41 |
| 42 } // namespace | 42 } // namespace |
| 43 | 43 |
| 44 const char kStreamLabel[] = "screen_stream"; | 44 const char kStreamLabel[] = "screen_stream"; |
| 45 const char kVideoLabel[] = "screen_video"; | 45 const char kVideoLabel[] = "screen_video"; |
| 46 | 46 |
| 47 struct WebrtcVideoStream::FrameTimestamps { | 47 struct WebrtcVideoStream::FrameTimestamps { |
| 48 // The following two fields are set only for one frame after each incoming | 48 // The following fields is not null only for one frame after each incoming |
| 49 // input event. |input_event_client_timestamp| is event timestamp | 49 // input event. |
| 50 // received from the client. |input_event_received_time| is local time when | 50 InputEventTimestamps input_event_timestamps; |
| 51 // the event was received. | |
| 52 int64_t input_event_client_timestamp = -1; | |
| 53 base::TimeTicks input_event_received_time; | |
| 54 | 51 |
| 55 base::TimeTicks capture_started_time; | 52 base::TimeTicks capture_started_time; |
| 56 base::TimeTicks capture_ended_time; | 53 base::TimeTicks capture_ended_time; |
| 57 base::TimeDelta capture_delay; | 54 base::TimeDelta capture_delay; |
| 58 base::TimeTicks encode_started_time; | 55 base::TimeTicks encode_started_time; |
| 59 base::TimeTicks encode_ended_time; | 56 base::TimeTicks encode_ended_time; |
| 60 }; | 57 }; |
| 61 | 58 |
| 62 struct WebrtcVideoStream::EncodedFrameWithTimestamps { | 59 struct WebrtcVideoStream::EncodedFrameWithTimestamps { |
| 63 std::unique_ptr<WebrtcVideoEncoder::EncodedFrame> frame; | 60 std::unique_ptr<WebrtcVideoEncoder::EncodedFrame> frame; |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 135 base::Bind(&WebrtcVideoStream::SetTargetBitrate, | 132 base::Bind(&WebrtcVideoStream::SetTargetBitrate, |
| 136 weak_factory_.GetWeakPtr()))); | 133 weak_factory_.GetWeakPtr()))); |
| 137 | 134 |
| 138 video_stats_dispatcher_.Init(webrtc_transport_->CreateOutgoingChannel( | 135 video_stats_dispatcher_.Init(webrtc_transport_->CreateOutgoingChannel( |
| 139 video_stats_dispatcher_.channel_name()), | 136 video_stats_dispatcher_.channel_name()), |
| 140 this); | 137 this); |
| 141 | 138 |
| 142 scheduler_.reset(new WebrtcFrameSchedulerSimple()); | 139 scheduler_.reset(new WebrtcFrameSchedulerSimple()); |
| 143 } | 140 } |
| 144 | 141 |
| 142 void WebrtcVideoStream::SetEventTimestampSource( |
| 143 scoped_refptr<InputEventTimestampSource> event_timestamp_source) { |
| 144 event_timestamp_source_ = event_timestamp_source; |
| 145 } |
| 146 |
| 145 void WebrtcVideoStream::Pause(bool pause) { | 147 void WebrtcVideoStream::Pause(bool pause) { |
| 146 DCHECK(thread_checker_.CalledOnValidThread()); | 148 DCHECK(thread_checker_.CalledOnValidThread()); |
| 147 scheduler_->Pause(pause); | 149 scheduler_->Pause(pause); |
| 148 } | 150 } |
| 149 | 151 |
| 150 void WebrtcVideoStream::OnInputEventReceived(int64_t event_timestamp) { | |
| 151 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 152 | |
| 153 if (!next_frame_timestamps_) | |
| 154 next_frame_timestamps_.reset(new FrameTimestamps()); | |
| 155 next_frame_timestamps_->input_event_client_timestamp = event_timestamp; | |
| 156 next_frame_timestamps_->input_event_received_time = base::TimeTicks::Now(); | |
| 157 } | |
| 158 | |
| 159 void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) { | 152 void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) { |
| 160 NOTIMPLEMENTED(); | 153 NOTIMPLEMENTED(); |
| 161 } | 154 } |
| 162 | 155 |
| 163 void WebrtcVideoStream::SetLosslessColor(bool want_lossless) { | 156 void WebrtcVideoStream::SetLosslessColor(bool want_lossless) { |
| 164 NOTIMPLEMENTED(); | 157 NOTIMPLEMENTED(); |
| 165 } | 158 } |
| 166 | 159 |
| 167 void WebrtcVideoStream::SetObserver(Observer* observer) { | 160 void WebrtcVideoStream::SetObserver(Observer* observer) { |
| 168 DCHECK(thread_checker_.CalledOnValidThread()); | 161 DCHECK(thread_checker_.CalledOnValidThread()); |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 230 } | 223 } |
| 231 void WebrtcVideoStream::OnChannelClosed( | 224 void WebrtcVideoStream::OnChannelClosed( |
| 232 ChannelDispatcherBase* channel_dispatcher) { | 225 ChannelDispatcherBase* channel_dispatcher) { |
| 233 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); | 226 DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
| 234 LOG(WARNING) << "video_stats channel was closed."; | 227 LOG(WARNING) << "video_stats channel was closed."; |
| 235 } | 228 } |
| 236 | 229 |
| 237 void WebrtcVideoStream::CaptureNextFrame() { | 230 void WebrtcVideoStream::CaptureNextFrame() { |
| 238 DCHECK(thread_checker_.CalledOnValidThread()); | 231 DCHECK(thread_checker_.CalledOnValidThread()); |
| 239 | 232 |
| 240 // |next_frame_timestamps_| is not set if no input events were received since | 233 captured_frame_timestamps_.reset(new FrameTimestamps()); |
| 241 // the previous frame. In that case create FrameTimestamps instance without | 234 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now(); |
| 242 // setting |input_event_client_timestamp| and |input_event_received_time|. | |
| 243 if (!next_frame_timestamps_) | |
| 244 next_frame_timestamps_.reset(new FrameTimestamps()); | |
| 245 | 235 |
| 246 captured_frame_timestamps_ = std::move(next_frame_timestamps_); | 236 if (event_timestamp_source_) { |
| 247 captured_frame_timestamps_->capture_started_time = base::TimeTicks::Now(); | 237 captured_frame_timestamps_->input_event_timestamps = |
| 238 event_timestamp_source_->GetLastEventTimestamps(); |
| 239 } |
| 248 | 240 |
| 249 capturer_->Capture(webrtc::DesktopRegion()); | 241 capturer_->Capture(webrtc::DesktopRegion()); |
| 250 } | 242 } |
| 251 | 243 |
| 252 // static | 244 // static |
| 253 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( | 245 WebrtcVideoStream::EncodedFrameWithTimestamps WebrtcVideoStream::EncodeFrame( |
| 254 WebrtcVideoEncoder* encoder, | 246 WebrtcVideoEncoder* encoder, |
| 255 std::unique_ptr<webrtc::DesktopFrame> frame, | 247 std::unique_ptr<webrtc::DesktopFrame> frame, |
| 256 WebrtcVideoEncoder::FrameParams params, | 248 WebrtcVideoEncoder::FrameParams params, |
| 257 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { | 249 std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps) { |
| (...skipping 18 matching lines...) Expand all Loading... |
| 276 return; | 268 return; |
| 277 } | 269 } |
| 278 | 270 |
| 279 scheduler_->OnFrameEncoded(*frame.frame, result); | 271 scheduler_->OnFrameEncoded(*frame.frame, result); |
| 280 | 272 |
| 281 // Send FrameStats message. | 273 // Send FrameStats message. |
| 282 if (video_stats_dispatcher_.is_connected()) { | 274 if (video_stats_dispatcher_.is_connected()) { |
| 283 HostFrameStats stats; | 275 HostFrameStats stats; |
| 284 stats.frame_size = frame.frame->data.size(); | 276 stats.frame_size = frame.frame->data.size(); |
| 285 | 277 |
| 286 if (!frame.timestamps->input_event_received_time.is_null()) { | 278 if (!frame.timestamps->input_event_timestamps.is_null()) { |
| 287 stats.capture_pending_delay = frame.timestamps->capture_started_time - | 279 stats.capture_pending_delay = |
| 288 frame.timestamps->input_event_received_time; | 280 frame.timestamps->capture_started_time - |
| 289 stats.latest_event_timestamp = base::TimeTicks::FromInternalValue( | 281 frame.timestamps->input_event_timestamps.host_timestamp; |
| 290 frame.timestamps->input_event_client_timestamp); | 282 stats.latest_event_timestamp = |
| 283 frame.timestamps->input_event_timestamps.host_timestamp; |
| 291 } | 284 } |
| 292 | 285 |
| 293 stats.capture_delay = frame.timestamps->capture_delay; | 286 stats.capture_delay = frame.timestamps->capture_delay; |
| 294 | 287 |
| 295 // Total overhead time for IPC and threading when capturing frames. | 288 // Total overhead time for IPC and threading when capturing frames. |
| 296 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - | 289 stats.capture_overhead_delay = (frame.timestamps->capture_ended_time - |
| 297 frame.timestamps->capture_started_time) - | 290 frame.timestamps->capture_started_time) - |
| 298 stats.capture_delay; | 291 stats.capture_delay; |
| 299 | 292 |
| 300 stats.encode_pending_delay = frame.timestamps->encode_started_time - | 293 stats.encode_pending_delay = frame.timestamps->encode_started_time - |
| 301 frame.timestamps->capture_ended_time; | 294 frame.timestamps->capture_ended_time; |
| 302 | 295 |
| 303 stats.encode_delay = frame.timestamps->encode_ended_time - | 296 stats.encode_delay = frame.timestamps->encode_ended_time - |
| 304 frame.timestamps->encode_started_time; | 297 frame.timestamps->encode_started_time; |
| 305 | 298 |
| 306 // TODO(sergeyu): Figure out how to measure send_pending time with WebRTC | 299 // TODO(sergeyu): Figure out how to measure send_pending time with WebRTC |
| 307 // and set it here. | 300 // and set it here. |
| 308 stats.send_pending_delay = base::TimeDelta(); | 301 stats.send_pending_delay = base::TimeDelta(); |
| 309 | 302 |
| 310 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); | 303 video_stats_dispatcher_.OnVideoFrameStats(result.frame_id, stats); |
| 311 } | 304 } |
| 312 } | 305 } |
| 313 | 306 |
| 314 } // namespace protocol | 307 } // namespace protocol |
| 315 } // namespace remoting | 308 } // namespace remoting |
| OLD | NEW |