| Index: remoting/protocol/webrtc_video_capturer_adapter.cc
|
| diff --git a/remoting/protocol/webrtc_video_capturer_adapter.cc b/remoting/protocol/webrtc_video_capturer_adapter.cc
|
| index 5952183711d2f7058cc96809fe9a4b594c0eede0..a9ee54e8408214144701f24b00613a7e29bc6564 100644
|
| --- a/remoting/protocol/webrtc_video_capturer_adapter.cc
|
| +++ b/remoting/protocol/webrtc_video_capturer_adapter.cc
|
| @@ -6,9 +6,12 @@
|
|
|
| #include <utility>
|
|
|
| +#include "third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h"
|
| +#include "third_party/libyuv/include/libyuv/convert.h"
|
| #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
|
|
|
| namespace remoting {
|
| +namespace protocol {
|
|
|
| // Number of frames to be captured per second.
|
| const int kFramesPerSec = 30;
|
| @@ -18,8 +21,6 @@ WebrtcVideoCapturerAdapter::WebrtcVideoCapturerAdapter(
|
| : desktop_capturer_(std::move(capturer)) {
|
| DCHECK(desktop_capturer_);
|
|
|
| - thread_checker_.DetachFromThread();
|
| -
|
| // Disable video adaptation since we don't intend to use it.
|
| set_enable_video_adapter(false);
|
| }
|
| @@ -28,50 +29,14 @@ WebrtcVideoCapturerAdapter::~WebrtcVideoCapturerAdapter() {
|
| DCHECK(!capture_timer_);
|
| }
|
|
|
| -webrtc::SharedMemory* WebrtcVideoCapturerAdapter::CreateSharedMemory(
|
| - size_t size) {
|
| - return nullptr;
|
| -}
|
| -
|
| -void WebrtcVideoCapturerAdapter::OnCaptureCompleted(
|
| - webrtc::DesktopFrame* frame) {
|
| - scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
|
| -
|
| - // Drop the owned_frame if there were no changes.
|
| - if (!owned_frame || owned_frame->updated_region().is_empty()) {
|
| - owned_frame.reset();
|
| - return;
|
| - }
|
| -
|
| - // Convert the webrtc::DesktopFrame to a cricket::CapturedFrame.
|
| - cricket::CapturedFrame captured_frame;
|
| - captured_frame.width = owned_frame->size().width();
|
| - captured_frame.height = owned_frame->size().height();
|
| - base::TimeTicks current_time = base::TimeTicks::Now();
|
| - captured_frame.time_stamp =
|
| - current_time.ToInternalValue() * base::Time::kNanosecondsPerMicrosecond;
|
| - captured_frame.data = owned_frame->data();
|
| -
|
| - // The data_size attribute must be set. If multiple formats are supported,
|
| - // this should be set appropriately for each one.
|
| - captured_frame.data_size =
|
| - (captured_frame.width * webrtc::DesktopFrame::kBytesPerPixel * 8 + 7) /
|
| - 8 * captured_frame.height;
|
| - captured_frame.fourcc = cricket::FOURCC_ARGB;
|
| -
|
| - SignalFrameCaptured(this, &captured_frame);
|
| -}
|
| -
|
| bool WebrtcVideoCapturerAdapter::GetBestCaptureFormat(
|
| const cricket::VideoFormat& desired,
|
| cricket::VideoFormat* best_format) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
| - // For now, just used the desired width and height.
|
| - best_format->width = desired.width;
|
| - best_format->height = desired.height;
|
| - best_format->fourcc = cricket::FOURCC_ARGB;
|
| - best_format->interval = FPS_TO_INTERVAL(kFramesPerSec);
|
| + // The |capture_format| passed to Start() is always ignored, so copy
|
| + // |best_format| to |desired_format|.
|
| + *best_format = desired;
|
| return true;
|
| }
|
|
|
| @@ -79,26 +44,17 @@ cricket::CaptureState WebrtcVideoCapturerAdapter::Start(
|
| const cricket::VideoFormat& capture_format) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| DCHECK(!capture_timer_);
|
| - DCHECK_EQ(capture_format.fourcc,
|
| - (static_cast<uint32_t>(cricket::FOURCC_ARGB)));
|
|
|
| if (!desktop_capturer_) {
|
| VLOG(1) << "WebrtcVideoCapturerAdapter failed to start.";
|
| return cricket::CS_FAILED;
|
| }
|
|
|
| - // This is required to tell the cricket::VideoCapturer base class what the
|
| - // capture format will be.
|
| - SetCaptureFormat(&capture_format);
|
| -
|
| desktop_capturer_->Start(this);
|
|
|
| capture_timer_.reset(new base::RepeatingTimer());
|
| capture_timer_->Start(FROM_HERE,
|
| - base::TimeDelta::FromMicroseconds(
|
| - GetCaptureFormat()->interval /
|
| - (base::Time::kNanosecondsPerMicrosecond)),
|
| - this,
|
| + base::TimeDelta::FromSeconds(1) / kFramesPerSec, this,
|
| &WebrtcVideoCapturerAdapter::CaptureNextFrame);
|
|
|
| return cricket::CS_RUNNING;
|
| @@ -170,7 +126,6 @@ void WebrtcVideoCapturerAdapter::Stop() {
|
|
|
| bool WebrtcVideoCapturerAdapter::IsRunning() {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| -
|
| return capture_timer_->IsRunning();
|
| }
|
|
|
| @@ -180,19 +135,86 @@ bool WebrtcVideoCapturerAdapter::IsScreencast() const {
|
|
|
| bool WebrtcVideoCapturerAdapter::GetPreferredFourccs(
|
| std::vector<uint32_t>* fourccs) {
|
| + return false;
|
| +}
|
| +
|
| +webrtc::SharedMemory* WebrtcVideoCapturerAdapter::CreateSharedMemory(
|
| + size_t size) {
|
| + return nullptr;
|
| +}
|
| +
|
| +void WebrtcVideoCapturerAdapter::OnCaptureCompleted(
|
| + webrtc::DesktopFrame* frame) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| - if (!fourccs)
|
| - return false;
|
| - fourccs->push_back(cricket::FOURCC_ARGB);
|
| - return true;
|
| +
|
| + DCHECK(capture_pending_);
|
| + capture_pending_ = false;
|
| +
|
| + scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
|
| +
|
| + // Drop the frame if there were no changes.
|
| + if (!owned_frame || owned_frame->updated_region().is_empty())
|
| + return;
|
| +
|
| + size_t width = frame->size().width();
|
| + size_t height = frame->size().height();
|
| + if (!yuv_frame_ || yuv_frame_->GetWidth() != width ||
|
| + yuv_frame_->GetHeight() != height) {
|
| + scoped_ptr<cricket::WebRtcVideoFrame> webrtc_frame(
|
| + new cricket::WebRtcVideoFrame());
|
| + webrtc_frame->InitToEmptyBuffer(width, height, 1, 1, 0);
|
| + yuv_frame_ = std::move(webrtc_frame);
|
| +
|
| + // Set updated_region so the whole frame is converted to YUV below.
|
| + frame->mutable_updated_region()->SetRect(
|
| + webrtc::DesktopRect::MakeWH(width, height));
|
| + }
|
| +
|
| + // TODO(sergeyu): This will copy the buffer if it's being used. Optimize it by
|
| + // keeping a queue of frames.
|
| + CHECK(yuv_frame_->MakeExclusive());
|
| +
|
| + yuv_frame_->SetTimeStamp(base::TimeTicks::Now().ToInternalValue() *
|
| + base::Time::kNanosecondsPerMicrosecond);
|
| +
|
| + for (webrtc::DesktopRegion::Iterator i(frame->updated_region()); !i.IsAtEnd();
|
| + i.Advance()) {
|
| + int left = i.rect().left();
|
| + int top = i.rect().top();
|
| + int width = i.rect().width();
|
| + int height = i.rect().height();
|
| +
|
| + if (left % 2 == 1) {
|
| + --left;
|
| + ++width;
|
| + }
|
| + if (top % 2 == 1) {
|
| + --top;
|
| + ++height;
|
| + }
|
| + libyuv::ARGBToI420(
|
| + frame->data() + frame->stride() * top +
|
| + left * webrtc::DesktopFrame::kBytesPerPixel,
|
| + frame->stride(),
|
| + yuv_frame_->GetYPlane() + yuv_frame_->GetYPitch() * top + left,
|
| + yuv_frame_->GetYPitch(),
|
| + yuv_frame_->GetUPlane() + yuv_frame_->GetUPitch() * top / 2 + left / 2,
|
| + yuv_frame_->GetUPitch(),
|
| + yuv_frame_->GetVPlane() + yuv_frame_->GetVPitch() * top / 2 + left / 2,
|
| + yuv_frame_->GetVPitch(), width, height);
|
| + }
|
| +
|
| + SignalVideoFrame(this, yuv_frame_.get());
|
| }
|
|
|
| void WebrtcVideoCapturerAdapter::CaptureNextFrame() {
|
| - // If we are paused, then don't capture.
|
| - if (!IsRunning())
|
| - return;
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
|
|
| + if (capture_pending_)
|
| + return;
|
| + capture_pending_ = true;
|
| desktop_capturer_->Capture(webrtc::DesktopRegion());
|
| }
|
|
|
| +} // namespace protocol
|
| } // namespace remoting
|
|
|