Chromium Code Reviews| Index: remoting/protocol/webrtc_video_stream.cc |
| diff --git a/remoting/protocol/webrtc_video_stream.cc b/remoting/protocol/webrtc_video_stream.cc |
| index dc24ecb934cc3994088a94ec6781fa3ab231393b..a0d702d574bb435a138711de2f0bf33d100a4eac 100644 |
| --- a/remoting/protocol/webrtc_video_stream.cc |
| +++ b/remoting/protocol/webrtc_video_stream.cc |
| @@ -10,6 +10,8 @@ |
| #include "base/threading/thread_task_runner_handle.h" |
| #include "remoting/base/constants.h" |
| #include "remoting/proto/video.pb.h" |
| +#include "remoting/protocol/frame_stats.h" |
| +#include "remoting/protocol/host_video_stats_dispatcher.h" |
| #include "remoting/protocol/webrtc_dummy_video_capturer.h" |
| #include "remoting/protocol/webrtc_transport.h" |
| #include "third_party/webrtc/api/mediastreaminterface.h" |
| @@ -22,33 +24,6 @@ namespace protocol { |
| namespace { |
| -// Task running on the encoder thread to encode the |frame|. |
| -std::unique_ptr<VideoPacket> EncodeFrame( |
| - VideoEncoder* encoder, |
| - std::unique_ptr<webrtc::DesktopFrame> frame, |
| - uint32_t target_bitrate_kbps, |
| - bool key_frame_request, |
| - int64_t capture_time_ms) { |
| - uint32_t flags = 0; |
| - if (key_frame_request) |
| - flags |= VideoEncoder::REQUEST_KEY_FRAME; |
| - |
| - base::TimeTicks current = base::TimeTicks::Now(); |
| - encoder->UpdateTargetBitrate(target_bitrate_kbps); |
| - std::unique_ptr<VideoPacket> packet = encoder->Encode(*frame, flags); |
| - if (!packet) |
| - return nullptr; |
| - // TODO(isheriff): Note that while VideoPacket capture time is supposed |
| - // to be capture duration, we (ab)use it for capture timestamp here. This |
| - // will go away when we move away from VideoPacket. |
| - packet->set_capture_time_ms(capture_time_ms); |
| - |
| - VLOG(1) << "Encode duration " |
| - << (base::TimeTicks::Now() - current).InMilliseconds() |
| - << " payload size " << packet->data().size(); |
| - return packet; |
| -} |
| - |
| void PostTaskOnTaskRunner( |
| scoped_refptr<base::SingleThreadTaskRunner> task_runner, |
| const base::Closure& task) { |
| @@ -68,9 +43,29 @@ void PostTaskOnTaskRunnerWithParam( |
| const char kStreamLabel[] = "screen_stream"; |
| const char kVideoLabel[] = "screen_video"; |
| +struct WebrtcVideoStream::FrameTimestamps { |
| + // The following two fields are set only for one frame after each incoming |
| + // input event. |input_event_client_timestamp| is event timestamp |
| + // received from the client. |input_event_received_time| is local time when |
| + // the event was received. |
| + int64_t input_event_client_timestamp = -1; |
| + base::TimeTicks input_event_received_time; |
| + |
| + base::TimeTicks capture_started_time; |
| + base::TimeTicks capture_ended_time; |
| + base::TimeDelta capture_delay; |
| + base::TimeTicks encode_started_time; |
| + base::TimeTicks encode_ended_time; |
| + base::TimeTicks can_send_time; |
|
Irfan
2016/08/09 17:00:37
what is this for ?
Sergey Ulanov
2016/08/10 18:07:44
It wasn't used, removed now.
|
| +}; |
| + |
| +struct WebrtcVideoStream::PacketWithTimestamps { |
| + std::unique_ptr<VideoPacket> packet; |
| + std::unique_ptr<FrameTimestamps> timestamps; |
|
Irfan
2016/08/09 17:00:37
It would be nice to have a consistency on the nami
Sergey Ulanov
2016/08/10 18:07:45
packet = encoded frame
Calling it VideoFrame would
|
| +}; |
| + |
| WebrtcVideoStream::WebrtcVideoStream() |
| - : main_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| - weak_factory_(this) {} |
| + : video_stats_dispatcher_(kStreamLabel), weak_factory_(this) {} |
| WebrtcVideoStream::~WebrtcVideoStream() { |
| if (stream_) { |
| @@ -104,8 +99,6 @@ bool WebrtcVideoStream::Start( |
| capturer_ = std::move(desktop_capturer); |
| webrtc_transport_ = webrtc_transport; |
| encoder_ = std::move(video_encoder); |
| - capture_timer_.reset(new base::RepeatingTimer()); |
| - |
| capturer_->Start(this); |
| // Set video stream constraints. |
| @@ -130,23 +123,27 @@ bool WebrtcVideoStream::Start( |
| // Register for PLI requests. |
| webrtc_transport_->video_encoder_factory()->SetKeyFrameRequestCallback( |
| - base::Bind(&PostTaskOnTaskRunner, main_task_runner_, |
| + base::Bind(&PostTaskOnTaskRunner, base::ThreadTaskRunnerHandle::Get(), |
| base::Bind(&WebrtcVideoStream::SetKeyFrameRequest, |
| weak_factory_.GetWeakPtr()))); |
| // Register for target bitrate notifications. |
| webrtc_transport_->video_encoder_factory()->SetTargetBitrateCallback( |
| - base::Bind(&PostTaskOnTaskRunnerWithParam<int>, main_task_runner_, |
| + base::Bind(&PostTaskOnTaskRunnerWithParam<int>, |
| + base::ThreadTaskRunnerHandle::Get(), |
| base::Bind(&WebrtcVideoStream::SetTargetBitrate, |
| weak_factory_.GetWeakPtr()))); |
| + video_stats_dispatcher_.Init(webrtc_transport_->CreateOutgoingChannel( |
| + video_stats_dispatcher_.channel_name()), |
| + this); |
| return true; |
| } |
| void WebrtcVideoStream::Pause(bool pause) { |
| DCHECK(thread_checker_.CalledOnValidThread()); |
| if (pause) { |
| - capture_timer_->Stop(); |
| + capture_timer_.Stop(); |
| } else { |
| if (received_first_frame_request_) { |
| StartCaptureTimer(); |
| @@ -155,7 +152,12 @@ void WebrtcVideoStream::Pause(bool pause) { |
| } |
| void WebrtcVideoStream::OnInputEventReceived(int64_t event_timestamp) { |
| - NOTIMPLEMENTED(); |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + |
| + if (!next_frame_timestamps_) |
| + next_frame_timestamps_.reset(new FrameTimestamps()); |
| + next_frame_timestamps_->input_event_client_timestamp = event_timestamp; |
| + next_frame_timestamps_->input_event_received_time = base::TimeTicks::Now(); |
| } |
| void WebrtcVideoStream::SetLosslessEncode(bool want_lossless) { |
| @@ -178,7 +180,7 @@ void WebrtcVideoStream::SetKeyFrameRequest() { |
| if (!received_first_frame_request_) { |
| received_first_frame_request_ = true; |
| StartCaptureTimer(); |
| - main_task_runner_->PostTask( |
| + base::ThreadTaskRunnerHandle::Get()->PostTask( |
| FROM_HERE, base::Bind(&WebrtcVideoStream::StartCaptureTimer, |
| weak_factory_.GetWeakPtr())); |
| } |
| @@ -186,8 +188,8 @@ void WebrtcVideoStream::SetKeyFrameRequest() { |
| void WebrtcVideoStream::StartCaptureTimer() { |
| DCHECK(thread_checker_.CalledOnValidThread()); |
| - capture_timer_->Start(FROM_HERE, base::TimeDelta::FromSeconds(1) / 30, this, |
| - &WebrtcVideoStream::CaptureNextFrame); |
| + capture_timer_.Start(FROM_HERE, base::TimeDelta::FromSeconds(1) / 30, this, |
| + &WebrtcVideoStream::CaptureNextFrame); |
| } |
| void WebrtcVideoStream::SetTargetBitrate(int target_bitrate_kbps) { |
| @@ -209,12 +211,10 @@ void WebrtcVideoStream::OnCaptureResult( |
| webrtc::DesktopCapturer::Result result, |
| std::unique_ptr<webrtc::DesktopFrame> frame) { |
| DCHECK(thread_checker_.CalledOnValidThread()); |
| - |
| - base::TimeTicks captured_ticks = base::TimeTicks::Now(); |
| - int64_t capture_timestamp_ms = |
| - (captured_ticks - base::TimeTicks()).InMilliseconds(); |
| + DCHECK(capture_pending_); |
| capture_pending_ = false; |
| + |
| if (encode_pending_) { |
| // TODO(isheriff): consider queuing here |
| VLOG(1) << "Dropping captured frame since encoder is still busy"; |
| @@ -233,16 +233,32 @@ void WebrtcVideoStream::OnCaptureResult( |
| if (observer_) |
| observer_->OnVideoSizeChanged(this, frame_size_, frame_dpi_); |
| } |
| + |
| + captured_frame_timestamps_->capture_ended_time = base::TimeTicks::Now(); |
| + captured_frame_timestamps_->capture_delay = |
| + base::TimeDelta::FromMilliseconds(frame->capture_time_ms()); |
| + |
| encode_pending_ = true; |
| base::PostTaskAndReplyWithResult( |
| encode_task_runner_.get(), FROM_HERE, |
| - base::Bind(&EncodeFrame, encoder_.get(), base::Passed(std::move(frame)), |
| - target_bitrate_kbps_, ClearAndGetKeyFrameRequest(), |
| - capture_timestamp_ms), |
| + base::Bind(&WebrtcVideoStream::EncodeFrame, encoder_.get(), |
| + base::Passed(std::move(frame)), |
| + base::Passed(std::move(captured_frame_timestamps_)), |
| + target_bitrate_kbps_, ClearAndGetKeyFrameRequest()), |
| base::Bind(&WebrtcVideoStream::OnFrameEncoded, |
| weak_factory_.GetWeakPtr())); |
| } |
| +void WebrtcVideoStream::OnChannelInitialized( |
| + ChannelDispatcherBase* channel_dispatcher) { |
| + DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
|
Irfan
2016/08/09 17:00:37
Are these callbacks helpful ? It sounds like we al
Sergey Ulanov
2016/08/10 18:07:45
OnChannelClosed() is useful if only to log the war
|
| +} |
| +void WebrtcVideoStream::OnChannelClosed( |
| + ChannelDispatcherBase* channel_dispatcher) { |
| + DCHECK(&video_stats_dispatcher_ == channel_dispatcher); |
| + LOG(WARNING) << "video_stats channel was closing"; |
| +} |
| + |
| void WebrtcVideoStream::CaptureNextFrame() { |
| DCHECK(thread_checker_.CalledOnValidThread()); |
| @@ -251,37 +267,108 @@ void WebrtcVideoStream::CaptureNextFrame() { |
| return; |
| } |
| + base::TimeTicks now = base::TimeTicks::Now(); |
| + |
| capture_pending_ = true; |
| VLOG(1) << "Capture next frame after " |
| << (base::TimeTicks::Now() - last_capture_started_ticks_) |
| .InMilliseconds(); |
| - last_capture_started_ticks_ = base::TimeTicks::Now(); |
| + last_capture_started_ticks_ = now; |
| + |
| + |
| + // |next_frame_timestamps_| is not set if no input events were received since |
| + // the previous frame. In that case create FrameTimestamps instance without |
| + // setting |input_event_client_timestamp| and |input_event_received_time|. |
| + if (!next_frame_timestamps_) |
| + next_frame_timestamps_.reset(new FrameTimestamps()); |
| + |
| + captured_frame_timestamps_ = std::move(next_frame_timestamps_); |
| + captured_frame_timestamps_->capture_started_time = now; |
| + |
| capturer_->Capture(webrtc::DesktopRegion()); |
| } |
| -void WebrtcVideoStream::OnFrameEncoded(std::unique_ptr<VideoPacket> packet) { |
| +// static |
| +WebrtcVideoStream::PacketWithTimestamps WebrtcVideoStream::EncodeFrame( |
| + VideoEncoder* encoder, |
| + std::unique_ptr<webrtc::DesktopFrame> frame, |
| + std::unique_ptr<WebrtcVideoStream::FrameTimestamps> timestamps, |
| + uint32_t target_bitrate_kbps, |
| + bool key_frame_request) { |
| + PacketWithTimestamps result; |
| + result.timestamps = std::move(timestamps); |
| + result.timestamps->encode_started_time = base::TimeTicks::Now(); |
| + |
| + encoder->UpdateTargetBitrate(target_bitrate_kbps); |
| + result.packet = encoder->Encode( |
| + *frame, key_frame_request ? VideoEncoder::REQUEST_KEY_FRAME : 0); |
| + |
| + result.timestamps->encode_ended_time = base::TimeTicks::Now(); |
| + |
| + return result; |
| +} |
| + |
| +void WebrtcVideoStream::OnFrameEncoded(PacketWithTimestamps packet) { |
| DCHECK(thread_checker_.CalledOnValidThread()); |
| encode_pending_ = false; |
| - if (!packet) |
| - return; |
| - base::TimeTicks current = base::TimeTicks::Now(); |
| - float encoded_bits = packet->data().size() * 8.0; |
| + |
| + size_t packet_size = packet.packet ? packet.packet->data().size() : 0; |
| + |
| + // Generate HostFrameStats. |
| + HostFrameStats stats; |
| + stats.frame_size = packet_size; |
| + stats.latest_event_timestamp = base::TimeTicks::FromInternalValue( |
| + packet.timestamps->input_event_client_timestamp); |
| + |
| + if (!packet.timestamps->input_event_received_time.is_null()) { |
| + stats.capture_pending_delay = packet.timestamps->capture_started_time - |
|
Irfan
2016/08/09 17:00:37
update capture_pending_delay when there is no inpu
Sergey Ulanov
2016/08/10 18:07:45
capture_pending_delay is defined as the delay betw
|
| + packet.timestamps->input_event_received_time; |
| + stats.latest_event_timestamp = base::TimeTicks::FromInternalValue( |
|
Irfan
2016/08/09 17:00:37
This is duplicated above
Sergey Ulanov
2016/08/10 18:07:45
Removed it from above.
|
| + packet.timestamps->input_event_client_timestamp); |
| + } |
| + |
| + stats.capture_delay = packet.timestamps->capture_delay; |
| + stats.capture_overhead_delay = (packet.timestamps->capture_ended_time - |
|
Irfan
2016/08/09 17:00:37
Will be helpful to comment what capture_overhead_d
Sergey Ulanov
2016/08/10 18:07:45
Done.
|
| + packet.timestamps->capture_started_time) - |
| + stats.capture_delay; |
| + |
| + stats.encode_pending_delay = packet.timestamps->encode_started_time - |
| + packet.timestamps->capture_ended_time; |
| + |
| + stats.encode_delay = packet.timestamps->encode_ended_time - |
| + packet.timestamps->encode_started_time; |
| + |
| + // TODO(sergeyu): Figure out how to measure send_pending time with WebRTC and |
| + // set it here. |
| + stats.send_pending_delay = base::TimeDelta(); |
|
Irfan
2016/08/09 17:00:37
what is send_pending_delay ? How long webrtc waits
Sergey Ulanov
2016/08/10 18:07:45
It may wait if the are other frames in the queue.
|
| + |
| + uint32_t frame_id = 0; |
| + if (packet.packet) { |
| + // Send the frame itself. |
| + webrtc::EncodedImageCallback::Result result = |
| + webrtc_transport_->video_encoder_factory()->SendEncodedFrame( |
| + std::move(packet.packet), packet.timestamps->capture_started_time); |
| + if (result.error != webrtc::EncodedImageCallback::Result::OK) { |
| + // TODO(sergeyu): Stop the stream. |
| + LOG(ERROR) << "Failed to send video frame."; |
| + return; |
| + } |
| + frame_id = result.frame_id; |
| + } |
| + |
| + // Send FrameStats message. |
| + if (video_stats_dispatcher_.is_connected()) |
| + video_stats_dispatcher_.OnVideoFrameStats(frame_id, stats); |
| // Simplistic adaptation of frame polling in the range 5 FPS to 30 FPS. |
| + // TODO(sergeyu): Move this logic to a separate class. |
| + float encoded_bits = packet_size * 8.0; |
| uint32_t next_sched_ms = std::max( |
| 33, std::min(static_cast<int>(encoded_bits / target_bitrate_kbps_), 200)); |
| - if (webrtc_transport_->video_encoder_factory()->SendEncodedFrame( |
| - std::move(packet)) >= 0) { |
| - VLOG(1) << "Send duration " |
| - << (base::TimeTicks::Now() - current).InMilliseconds() |
| - << ", next sched " << next_sched_ms; |
| - } else { |
| - LOG(ERROR) << "SendEncodedFrame() failed"; |
| - } |
| - capture_timer_->Start(FROM_HERE, |
| - base::TimeDelta::FromMilliseconds(next_sched_ms), this, |
| - &WebrtcVideoStream::CaptureNextFrame); |
| + capture_timer_.Start(FROM_HERE, |
| + base::TimeDelta::FromMilliseconds(next_sched_ms), this, |
| + &WebrtcVideoStream::CaptureNextFrame); |
| } |
| } // namespace protocol |