| Index: remoting/host/video_scheduler.cc
|
| diff --git a/remoting/host/video_scheduler.cc b/remoting/host/video_scheduler.cc
|
| index e2a3f945533aec44f68f2d58c1860f07494e90a5..ea57e8a3cad7e8f0956891cd01b8081a10788486 100644
|
| --- a/remoting/host/video_scheduler.cc
|
| +++ b/remoting/host/video_scheduler.cc
|
| @@ -11,14 +11,13 @@
|
| #include "base/logging.h"
|
| #include "base/memory/scoped_ptr.h"
|
| #include "base/message_loop/message_loop_proxy.h"
|
| -#include "base/stl_util.h"
|
| -#include "base/sys_info.h"
|
| +#include "base/task_runner_util.h"
|
| #include "base/time/time.h"
|
| +#include "remoting/host/capture_scheduler.h"
|
| #include "remoting/proto/control.pb.h"
|
| #include "remoting/proto/internal.pb.h"
|
| #include "remoting/proto/video.pb.h"
|
| #include "remoting/protocol/cursor_shape_stub.h"
|
| -#include "remoting/protocol/message_decoder.h"
|
| #include "remoting/protocol/video_stub.h"
|
| #include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h"
|
| #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
|
| @@ -26,9 +25,23 @@
|
|
|
| namespace remoting {
|
|
|
| -// Maximum number of frames that can be processed simultaneously.
|
| -// TODO(hclam): Move this value to CaptureScheduler.
|
| -static const int kMaxPendingFrames = 2;
|
| +namespace {
|
| +
|
| +// Helper used to encode frames on the encode thread.
|
| +//
|
| +// TODO(sergeyu): This functions doesn't do much beside calling
|
| +// VideoEncoder::Encode(). It's only needed to handle empty frames properly and
|
| +// that logic can be moved to VideoEncoder implementations.
|
| +scoped_ptr<VideoPacket> EncodeFrame(VideoEncoder* encoder,
|
| + scoped_ptr<webrtc::DesktopFrame> frame) {
|
| + // If there is nothing to encode then send an empty packet.
|
| + if (!frame || frame->updated_region().is_empty())
|
| + return make_scoped_ptr(new VideoPacket());
|
| +
|
| + return encoder->Encode(*frame);
|
| +}
|
| +
|
| +} // namespace
|
|
|
| // Interval between empty keep-alive frames. These frames are sent only when the
|
| // stream is paused or inactive for some other reason (e.g. when blocked on
|
| @@ -60,10 +73,6 @@ VideoScheduler::VideoScheduler(
|
| encoder_(encoder.Pass()),
|
| cursor_stub_(cursor_stub),
|
| video_stub_(video_stub),
|
| - pending_frames_(0),
|
| - capture_pending_(false),
|
| - did_skip_frame_(false),
|
| - is_paused_(false),
|
| latest_event_timestamp_(0) {
|
| DCHECK(network_task_runner_->BelongsToCurrentThread());
|
| DCHECK(capturer_);
|
| @@ -75,78 +84,17 @@ VideoScheduler::VideoScheduler(
|
|
|
| // Public methods --------------------------------------------------------------
|
|
|
| -webrtc::SharedMemory* VideoScheduler::CreateSharedMemory(size_t size) {
|
| - return nullptr;
|
| -}
|
| -
|
| -void VideoScheduler::OnCaptureCompleted(webrtc::DesktopFrame* frame) {
|
| - DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
| -
|
| - capture_pending_ = false;
|
| -
|
| - scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
|
| -
|
| - if (owned_frame) {
|
| - scheduler_.RecordCaptureTime(
|
| - base::TimeDelta::FromMilliseconds(owned_frame->capture_time_ms()));
|
| - }
|
| -
|
| - // Even when |frame| is nullptr we still need to post it to the encode thread
|
| - // to make sure frames are freed in the same order they are received and
|
| - // that we don't start capturing frame n+2 before frame n is freed.
|
| - encode_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::EncodeFrame, this,
|
| - base::Passed(&owned_frame), latest_event_timestamp_,
|
| - base::TimeTicks::Now()));
|
| -
|
| - // If a frame was skipped, try to capture it again.
|
| - if (did_skip_frame_) {
|
| - capture_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::CaptureNextFrame, this));
|
| - }
|
| -}
|
| -
|
| -void VideoScheduler::OnMouseCursor(webrtc::MouseCursor* cursor) {
|
| - DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
| -
|
| - scoped_ptr<webrtc::MouseCursor> owned_cursor(cursor);
|
| -
|
| - // Do nothing if the scheduler is being stopped.
|
| - if (!capturer_)
|
| - return;
|
| -
|
| - scoped_ptr<protocol::CursorShapeInfo> cursor_proto(
|
| - new protocol::CursorShapeInfo());
|
| - cursor_proto->set_width(cursor->image()->size().width());
|
| - cursor_proto->set_height(cursor->image()->size().height());
|
| - cursor_proto->set_hotspot_x(cursor->hotspot().x());
|
| - cursor_proto->set_hotspot_y(cursor->hotspot().y());
|
| -
|
| - cursor_proto->set_data(std::string());
|
| - uint8_t* current_row = cursor->image()->data();
|
| - for (int y = 0; y < cursor->image()->size().height(); ++y) {
|
| - cursor_proto->mutable_data()->append(
|
| - current_row,
|
| - current_row + cursor->image()->size().width() *
|
| - webrtc::DesktopFrame::kBytesPerPixel);
|
| - current_row += cursor->image()->stride();
|
| - }
|
| -
|
| - network_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::SendCursorShape, this,
|
| - base::Passed(&cursor_proto)));
|
| -}
|
| -
|
| -void VideoScheduler::OnMouseCursorPosition(
|
| - webrtc::MouseCursorMonitor::CursorState state,
|
| - const webrtc::DesktopVector& position) {
|
| - // We're not subscribing to mouse position changes.
|
| - NOTREACHED();
|
| -}
|
| -
|
| void VideoScheduler::Start() {
|
| DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| + keep_alive_timer_.reset(new base::DelayTimer<VideoScheduler>(
|
| + FROM_HERE, base::TimeDelta::FromMilliseconds(kKeepAlivePacketIntervalMs),
|
| + this, &VideoScheduler::SendKeepAlivePacket));
|
| +
|
| + capture_scheduler_.reset(new CaptureScheduler(
|
| + base::Bind(&VideoScheduler::CaptureNextFrame, this)));
|
| + capture_scheduler_->Start();
|
| +
|
| capture_task_runner_->PostTask(
|
| FROM_HERE, base::Bind(&VideoScheduler::StartOnCaptureThread, this));
|
| }
|
| @@ -158,6 +106,7 @@ void VideoScheduler::Stop() {
|
| cursor_stub_ = nullptr;
|
| video_stub_ = nullptr;
|
|
|
| + capture_scheduler_.reset();
|
| keep_alive_timer_.reset();
|
|
|
| capture_task_runner_->PostTask(
|
| @@ -165,56 +114,31 @@ void VideoScheduler::Stop() {
|
| }
|
|
|
| void VideoScheduler::Pause(bool pause) {
|
| - if (!capture_task_runner_->BelongsToCurrentThread()) {
|
| - DCHECK(network_task_runner_->BelongsToCurrentThread());
|
| - capture_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::Pause, this, pause));
|
| - return;
|
| - }
|
| -
|
| - if (is_paused_ != pause) {
|
| - is_paused_ = pause;
|
| + DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| - // Restart captures if we're resuming and there are none scheduled.
|
| - if (!is_paused_ && capture_timer_ && !capture_timer_->IsRunning())
|
| - CaptureNextFrame();
|
| - }
|
| + capture_scheduler_->Pause(pause);
|
| }
|
|
|
| void VideoScheduler::SetLatestEventTimestamp(int64 latest_event_timestamp) {
|
| - if (!capture_task_runner_->BelongsToCurrentThread()) {
|
| - DCHECK(network_task_runner_->BelongsToCurrentThread());
|
| - capture_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::SetLatestEventTimestamp,
|
| - this, latest_event_timestamp));
|
| - return;
|
| - }
|
| + DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| latest_event_timestamp_ = latest_event_timestamp;
|
| }
|
|
|
| void VideoScheduler::SetLosslessEncode(bool want_lossless) {
|
| - if (!encode_task_runner_->BelongsToCurrentThread()) {
|
| - DCHECK(network_task_runner_->BelongsToCurrentThread());
|
| - encode_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::SetLosslessEncode,
|
| - this, want_lossless));
|
| - return;
|
| - }
|
| + DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| - encoder_->SetLosslessEncode(want_lossless);
|
| + encode_task_runner_->PostTask(
|
| + FROM_HERE, base::Bind(&VideoEncoder::SetLosslessEncode,
|
| + base::Unretained(encoder_.get()), want_lossless));
|
| }
|
|
|
| void VideoScheduler::SetLosslessColor(bool want_lossless) {
|
| - if (!encode_task_runner_->BelongsToCurrentThread()) {
|
| - DCHECK(network_task_runner_->BelongsToCurrentThread());
|
| - encode_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::SetLosslessColor,
|
| - this, want_lossless));
|
| - return;
|
| - }
|
| + DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| - encoder_->SetLosslessColor(want_lossless);
|
| + encode_task_runner_->PostTask(
|
| + FROM_HERE, base::Bind(&VideoEncoder::SetLosslessColor,
|
| + base::Unretained(encoder_.get()), want_lossless));
|
| }
|
|
|
| // Private methods -----------------------------------------------------------
|
| @@ -228,99 +152,122 @@ VideoScheduler::~VideoScheduler() {
|
|
|
| // Capturer thread -------------------------------------------------------------
|
|
|
| -void VideoScheduler::StartOnCaptureThread() {
|
| +webrtc::SharedMemory* VideoScheduler::CreateSharedMemory(size_t size) {
|
| + return nullptr;
|
| +}
|
| +
|
| +void VideoScheduler::OnCaptureCompleted(webrtc::DesktopFrame* frame) {
|
| DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
| - DCHECK(!capture_timer_);
|
|
|
| - // Start mouse cursor monitor.
|
| - mouse_cursor_monitor_->Init(this, webrtc::MouseCursorMonitor::SHAPE_ONLY);
|
| + network_task_runner_->PostTask(
|
| + FROM_HERE, base::Bind(&VideoScheduler::EncodeAndSendFrame, this,
|
| + base::Passed(make_scoped_ptr(frame))));
|
| +}
|
|
|
| - // Start the capturer.
|
| - capturer_->Start(this);
|
| +void VideoScheduler::OnMouseCursor(webrtc::MouseCursor* cursor) {
|
| + DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
|
|
| - capture_timer_.reset(new base::OneShotTimer<VideoScheduler>());
|
| - keep_alive_timer_.reset(new base::DelayTimer<VideoScheduler>(
|
| - FROM_HERE, base::TimeDelta::FromMilliseconds(kKeepAlivePacketIntervalMs),
|
| - this, &VideoScheduler::SendKeepAlivePacket));
|
| + scoped_ptr<webrtc::MouseCursor> owned_cursor(cursor);
|
|
|
| - // Capture first frame immediately.
|
| - CaptureNextFrame();
|
| -}
|
| + scoped_ptr<protocol::CursorShapeInfo> cursor_proto(
|
| + new protocol::CursorShapeInfo());
|
| + cursor_proto->set_width(cursor->image()->size().width());
|
| + cursor_proto->set_height(cursor->image()->size().height());
|
| + cursor_proto->set_hotspot_x(cursor->hotspot().x());
|
| + cursor_proto->set_hotspot_y(cursor->hotspot().y());
|
|
|
| -void VideoScheduler::StopOnCaptureThread() {
|
| - DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
| + cursor_proto->set_data(std::string());
|
| + uint8_t* current_row = cursor->image()->data();
|
| + for (int y = 0; y < cursor->image()->size().height(); ++y) {
|
| + cursor_proto->mutable_data()->append(
|
| + current_row,
|
| + current_row + cursor->image()->size().width() *
|
| + webrtc::DesktopFrame::kBytesPerPixel);
|
| + current_row += cursor->image()->stride();
|
| + }
|
|
|
| - // This doesn't deleted already captured frames, so encoder can keep using the
|
| - // frames that were captured previously.
|
| - capturer_.reset();
|
| + network_task_runner_->PostTask(
|
| + FROM_HERE, base::Bind(&VideoScheduler::SendCursorShape, this,
|
| + base::Passed(&cursor_proto)));
|
| +}
|
|
|
| - // |capture_timer_| must be destroyed on the thread on which it is used.
|
| - capture_timer_.reset();
|
| +void VideoScheduler::OnMouseCursorPosition(
|
| + webrtc::MouseCursorMonitor::CursorState state,
|
| + const webrtc::DesktopVector& position) {
|
| + // We're not subscribing to mouse position changes.
|
| + NOTREACHED();
|
| }
|
|
|
| -void VideoScheduler::ScheduleNextCapture() {
|
| +void VideoScheduler::StartOnCaptureThread() {
|
| DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
|
|
| - capture_timer_->Start(FROM_HERE,
|
| - scheduler_.NextCaptureDelay(),
|
| - this,
|
| - &VideoScheduler::CaptureNextFrame);
|
| + mouse_cursor_monitor_->Init(this, webrtc::MouseCursorMonitor::SHAPE_ONLY);
|
| + capturer_->Start(this);
|
| }
|
|
|
| -void VideoScheduler::CaptureNextFrame() {
|
| +void VideoScheduler::StopOnCaptureThread() {
|
| DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
|
|
| - // If we are stopping (|capturer_| is nullptr), or paused, then don't capture.
|
| - if (!capturer_ || is_paused_)
|
| - return;
|
| -
|
| - // Make sure we have at most two outstanding recordings. We can simply return
|
| - // if we can't make a capture now, the next capture will be started by the
|
| - // end of an encode operation.
|
| - if (pending_frames_ >= kMaxPendingFrames || capture_pending_) {
|
| - did_skip_frame_ = true;
|
| - return;
|
| - }
|
| -
|
| - did_skip_frame_ = false;
|
| -
|
| - // At this point we are going to perform one capture so save the current time.
|
| - pending_frames_++;
|
| - DCHECK_LE(pending_frames_, kMaxPendingFrames);
|
| + // This doesn't deleted already captured frames, so encoder can keep using the
|
| + // frames that were captured previously.
|
| + capturer_.reset();
|
|
|
| - // Before doing a capture schedule for the next one.
|
| - ScheduleNextCapture();
|
| + mouse_cursor_monitor_.reset();
|
| +}
|
|
|
| - capture_pending_ = true;
|
| +void VideoScheduler::CaptureNextFrameOnCaptureThread() {
|
| + DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
|
|
| - // Capture the mouse shape.
|
| + // Capture mouse shape first and then screen content.
|
| mouse_cursor_monitor_->Capture();
|
| -
|
| - // And finally perform one capture.
|
| capturer_->Capture(webrtc::DesktopRegion());
|
| }
|
|
|
| -void VideoScheduler::FrameCaptureCompleted() {
|
| - DCHECK(capture_task_runner_->BelongsToCurrentThread());
|
| +// Network thread --------------------------------------------------------------
|
|
|
| - // Decrement the pending capture count.
|
| - pending_frames_--;
|
| - DCHECK_GE(pending_frames_, 0);
|
| +void VideoScheduler::CaptureNextFrame() {
|
| + DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| - // If we've skipped a frame capture because too we had too many captures
|
| - // pending then schedule one now.
|
| - if (did_skip_frame_)
|
| - CaptureNextFrame();
|
| + capture_task_runner_->PostTask(
|
| + FROM_HERE,
|
| + base::Bind(&VideoScheduler::CaptureNextFrameOnCaptureThread, this));
|
| }
|
|
|
| -// Network thread --------------------------------------------------------------
|
| +void VideoScheduler::EncodeAndSendFrame(
|
| + scoped_ptr<webrtc::DesktopFrame> frame) {
|
| + DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| -void VideoScheduler::SendVideoPacket(scoped_ptr<VideoPacket> packet) {
|
| + if (!video_stub_)
|
| + return;
|
| +
|
| + capture_scheduler_->OnCaptureCompleted();
|
| +
|
| + // Even when |frame| is nullptr we still need to post it to the encode thread
|
| + // to make sure frames are freed in the same order they are received and
|
| + // that we don't start capturing frame n+2 before frame n is freed.
|
| + base::PostTaskAndReplyWithResult(
|
| + encode_task_runner_.get(), FROM_HERE,
|
| + base::Bind(&EncodeFrame, encoder_.get(), base::Passed(&frame)),
|
| + base::Bind(&VideoScheduler::SendEncodedFrame, this,
|
| + latest_event_timestamp_, base::TimeTicks::Now()));
|
| +}
|
| +
|
| +void VideoScheduler::SendEncodedFrame(int64 latest_event_timestamp,
|
| + base::TimeTicks timestamp,
|
| + scoped_ptr<VideoPacket> packet) {
|
| DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| if (!video_stub_)
|
| return;
|
|
|
| + if (g_enable_timestamps)
|
| + packet->set_timestamp(timestamp.ToInternalValue());
|
| +
|
| + packet->set_latest_event_timestamp(latest_event_timestamp);
|
| +
|
| + capture_scheduler_->OnFrameEncoded(
|
| + base::TimeDelta::FromMilliseconds(packet->encode_time_ms()));
|
| +
|
| video_stub_->ProcessVideoPacket(
|
| packet.Pass(), base::Bind(&VideoScheduler::OnVideoPacketSent, this));
|
| }
|
| @@ -331,18 +278,13 @@ void VideoScheduler::OnVideoPacketSent() {
|
| if (!video_stub_)
|
| return;
|
|
|
| + capture_scheduler_->OnFrameSent();
|
| keep_alive_timer_->Reset();
|
| -
|
| - capture_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::FrameCaptureCompleted, this));
|
| }
|
|
|
| void VideoScheduler::SendKeepAlivePacket() {
|
| DCHECK(network_task_runner_->BelongsToCurrentThread());
|
|
|
| - if (!video_stub_)
|
| - return;
|
| -
|
| video_stub_->ProcessVideoPacket(
|
| make_scoped_ptr(new VideoPacket()),
|
| base::Bind(&VideoScheduler::OnKeepAlivePacketSent, this));
|
| @@ -365,43 +307,4 @@ void VideoScheduler::SendCursorShape(
|
| cursor_stub_->SetCursorShape(*cursor_shape);
|
| }
|
|
|
| -// Encoder thread --------------------------------------------------------------
|
| -
|
| -void VideoScheduler::EncodeFrame(
|
| - scoped_ptr<webrtc::DesktopFrame> frame,
|
| - int64 latest_event_timestamp,
|
| - base::TimeTicks timestamp) {
|
| - DCHECK(encode_task_runner_->BelongsToCurrentThread());
|
| -
|
| - // If there is nothing to encode then send an empty packet.
|
| - if (!frame || frame->updated_region().is_empty()) {
|
| - capture_task_runner_->DeleteSoon(FROM_HERE, frame.release());
|
| - scoped_ptr<VideoPacket> packet(new VideoPacket());
|
| - packet->set_latest_event_timestamp(latest_event_timestamp);
|
| - network_task_runner_->PostTask(
|
| - FROM_HERE,
|
| - base::Bind(
|
| - &VideoScheduler::SendVideoPacket, this, base::Passed(&packet)));
|
| - return;
|
| - }
|
| -
|
| - scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame);
|
| - packet->set_latest_event_timestamp(latest_event_timestamp);
|
| -
|
| - if (g_enable_timestamps) {
|
| - packet->set_timestamp(timestamp.ToInternalValue());
|
| - }
|
| -
|
| - // Destroy the frame before sending |packet| because SendVideoPacket() may
|
| - // trigger another frame to be captured, and the screen capturer expects the
|
| - // old frame to be freed by then.
|
| - frame.reset();
|
| -
|
| - scheduler_.RecordEncodeTime(
|
| - base::TimeDelta::FromMilliseconds(packet->encode_time_ms()));
|
| - network_task_runner_->PostTask(
|
| - FROM_HERE, base::Bind(&VideoScheduler::SendVideoPacket, this,
|
| - base::Passed(&packet)));
|
| -}
|
| -
|
| } // namespace remoting
|
|
|