Index: content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
index 603edb3780aed102025eddcdf2efc2a2511beed5..41146c79f74da6e68b7611f4640591fc82eb1061 100644 |
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
@@ -8,21 +8,214 @@ |
#include "base/debug/trace_event.h" |
#include "base/memory/aligned_memory.h" |
#include "media/base/video_frame.h" |
-#include "third_party/libyuv/include/libyuv/scale.h" |
+#include "third_party/libjingle/source/talk/media/base/videoframe.h" |
+#include "third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h" |
+#include "third_party/libyuv/include/libyuv/convert_from.h" |
+ |
+// Empty method used for keeping a reference to the original media::VideoFrame. |
+// The reference to |frame| is kept in the closure that calls this method. |
+void ReleaseOriginalFrame(const scoped_refptr<media::VideoFrame>& frame) { |
+} |
+ |
+// Thin map between an existing media::VideoFrame and cricket::VideoFrame to |
+// avoid premature deep copies. |
+// This implementation is only safe to use in a const context and should never |
+// be written to. |
+class VideoFrameWrapper : public cricket::VideoFrame { |
tommi (sloooow) - chröme
2014/09/25 20:06:48
Assuming this is a single threaded class, can we a
magjed_chromium
2014/09/25 20:52:08
Yes, will fix.
|
+ public: |
+ VideoFrameWrapper(const scoped_refptr<media::VideoFrame>& frame, |
+ int64 elapsed_time) |
+ : frame_(media::VideoFrame::WrapVideoFrame( |
+ frame, |
+ frame->visible_rect(), |
+ frame->natural_size(), |
+ base::Bind(&ReleaseOriginalFrame, frame))), |
+ elapsed_time_(elapsed_time) {} |
+ |
+ virtual VideoFrame* Copy() const OVERRIDE { |
+ return new VideoFrameWrapper(frame_, elapsed_time_); |
+ } |
+ |
+ virtual size_t GetWidth() const OVERRIDE { |
+ return static_cast<size_t>(frame_->visible_rect().width()); |
+ } |
+ |
+ virtual size_t GetHeight() const OVERRIDE { |
+ return static_cast<size_t>(frame_->visible_rect().height()); |
+ } |
+ |
+ virtual const uint8* GetYPlane() const OVERRIDE { |
+ return frame_->visible_data(media::VideoFrame::kYPlane); |
+ } |
+ |
+ virtual const uint8* GetUPlane() const OVERRIDE { |
+ return frame_->visible_data(media::VideoFrame::kUPlane); |
+ } |
+ |
+ virtual const uint8* GetVPlane() const OVERRIDE { |
+ return frame_->visible_data(media::VideoFrame::kVPlane); |
+ } |
+ |
+ virtual uint8* GetYPlane() OVERRIDE { |
+ return frame_->visible_data(media::VideoFrame::kYPlane); |
+ } |
+ |
+ virtual uint8* GetUPlane() OVERRIDE { |
+ return frame_->visible_data(media::VideoFrame::kUPlane); |
+ } |
+ |
+ virtual uint8* GetVPlane() OVERRIDE { |
+ return frame_->visible_data(media::VideoFrame::kVPlane); |
+ } |
+ |
+ virtual int32 GetYPitch() const OVERRIDE { |
+ return frame_->stride(media::VideoFrame::kYPlane); |
+ } |
+ |
+ virtual int32 GetUPitch() const OVERRIDE { |
+ return frame_->stride(media::VideoFrame::kUPlane); |
+ } |
+ |
+ virtual int32 GetVPitch() const OVERRIDE { |
+ return frame_->stride(media::VideoFrame::kVPlane); |
+ } |
+ |
+ virtual void* GetNativeHandle() const OVERRIDE { return NULL; } |
+ virtual size_t GetPixelWidth() const OVERRIDE { return 1; } |
+ virtual size_t GetPixelHeight() const OVERRIDE { return 1; } |
+ |
+ virtual int64 GetElapsedTime() const OVERRIDE { return elapsed_time_; } |
+ |
+ virtual int64 GetTimeStamp() const OVERRIDE { |
+ return frame_->timestamp().InMicroseconds() * |
+ base::Time::kNanosecondsPerMicrosecond; |
+ } |
+ |
+ virtual void SetElapsedTime(int64 elapsed_time) OVERRIDE { |
+ elapsed_time_ = elapsed_time; |
+ } |
+ |
+ virtual void SetTimeStamp(int64 time_stamp) OVERRIDE { |
+ // Round to closest microsecond. |
+ frame_->set_timestamp(base::TimeDelta::FromMicroseconds( |
+ (time_stamp + base::Time::kNanosecondsPerMicrosecond / 2) / |
+ base::Time::kNanosecondsPerMicrosecond)); |
+ } |
+ |
+ virtual int GetRotation() const OVERRIDE { return 0; } |
+ |
+ // TODO(magjed): Refactor into base class |
+ virtual size_t ConvertToRgbBuffer(uint32 to_fourcc, |
+ uint8* buffer, |
+ size_t size, |
+ int stride_rgb) const OVERRIDE { |
+ const size_t needed = std::abs(stride_rgb) * GetHeight(); |
+ if (size < needed) { |
+ DLOG(WARNING) << "RGB buffer is not large enough"; |
+ return needed; |
+ } |
+ |
+ if (libyuv::ConvertFromI420(GetYPlane(), |
+ GetYPitch(), |
+ GetUPlane(), |
+ GetUPitch(), |
+ GetVPlane(), |
+ GetVPitch(), |
+ buffer, |
+ stride_rgb, |
+ static_cast<int>(GetWidth()), |
+ static_cast<int>(GetHeight()), |
+ to_fourcc)) { |
+ DLOG(ERROR) << "RGB type not supported: " << to_fourcc; |
+ return 0; // 0 indicates error |
+ } |
+ return needed; |
+ } |
+ |
+ // The rest of the public methods are NOTIMPLEMENTED. |
+ virtual bool InitToBlack(int w, |
+ int h, |
+ size_t pixel_width, |
+ size_t pixel_height, |
+ int64 elapsed_time, |
+ int64 time_stamp) OVERRIDE { |
+ NOTIMPLEMENTED(); |
+ return false; |
+ } |
+ |
+ virtual bool Reset(uint32 fourcc, |
+ int w, |
+ int h, |
+ int dw, |
+ int dh, |
+ uint8* sample, |
+ size_t sample_size, |
+ size_t pixel_width, |
+ size_t pixel_height, |
+ int64 elapsed_time, |
+ int64 time_stamp, |
+ int rotation) OVERRIDE { |
+ NOTIMPLEMENTED(); |
+ return false; |
+ } |
+ |
+ virtual bool MakeExclusive() OVERRIDE { |
+ NOTIMPLEMENTED(); |
+ return false; |
+ } |
+ |
+ virtual size_t CopyToBuffer(uint8* buffer, size_t size) const OVERRIDE { |
+ NOTIMPLEMENTED(); |
+ return 0; |
+ } |
+ |
+ protected: |
+ // TODO(magjed): Refactor as a static method in WebRtcVideoFrame |
+ virtual VideoFrame* CreateEmptyFrame(int w, |
+ int h, |
+ size_t pixel_width, |
+ size_t pixel_height, |
+ int64 elapsed_time, |
+ int64 time_stamp) const OVERRIDE { |
+ VideoFrame* frame = new cricket::WebRtcVideoFrame(); |
+ frame->InitToBlack( |
+ w, h, pixel_width, pixel_height, elapsed_time, time_stamp); |
+ return frame; |
+ } |
+ |
+ private: |
+ scoped_refptr<media::VideoFrame> frame_; |
+ int64 elapsed_time_; |
+}; |
+ |
+class DummyFactory : public cricket::VideoFrameFactory { |
tommi (sloooow) - chröme
2014/09/25 20:06:48
can you add documentation for what context this cl
magjed_chromium
2014/09/25 20:52:08
The context for VideoFrameWrapper and DummyFactory
|
+ public: |
+ DummyFactory(const scoped_refptr<media::VideoFrame>& frame, |
+ int64_t elapsed_time) |
+ : frame_(frame), elapsed_time_(elapsed_time) {} |
+ |
+ virtual cricket::VideoFrame* CreateAliasedFrame(const cricket::CapturedFrame*, |
+ int, |
+ int) const OVERRIDE { |
+ // Create a shallow cricket::VideoFrame wrapper around the |
+ // media::VideoFrame. The caller has ownership over the returned frame. |
+ return new VideoFrameWrapper(frame_, elapsed_time_); |
+ } |
+ |
+ private: |
+ const scoped_refptr<media::VideoFrame>& frame_; |
tommi (sloooow) - chröme
2014/09/25 20:06:48
is the reference intentional?
I'd think it'd be s
magjed_chromium
2014/09/25 20:52:08
Ops, that is not intentional. Will fix.
|
+ const int64_t elapsed_time_; |
+}; |
namespace content { |
WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) |
- : is_screencast_(is_screencast), |
- running_(false), |
- buffer_(NULL), |
- buffer_size_(0) { |
+ : is_screencast_(is_screencast), running_(false) { |
thread_checker_.DetachFromThread(); |
} |
WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { |
DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; |
- base::AlignedFree(buffer_); |
} |
cricket::CaptureState WebRtcVideoCapturerAdapter::Start( |
@@ -53,10 +246,9 @@ bool WebRtcVideoCapturerAdapter::IsRunning() { |
bool WebRtcVideoCapturerAdapter::GetPreferredFourccs( |
std::vector<uint32>* fourccs) { |
DCHECK(thread_checker_.CalledOnValidThread()); |
- if (!fourccs) |
- return false; |
- fourccs->push_back(cricket::FOURCC_I420); |
- return true; |
+ if (fourccs) |
tommi (sloooow) - chröme
2014/09/25 20:06:48
Does it make sense to call this function with a NU
magjed_chromium
2014/09/25 20:52:08
We only have one real call to this function in Chr
|
+ fourccs->push_back(cricket::FOURCC_I420); |
+ return fourccs; |
tommi (sloooow) - chröme
2014/09/25 20:06:48
hm... does this compile? :)
I'm guessing you'd wa
magjed_chromium
2014/09/25 20:52:08
Yes, sorry about that. It compiled locally.
|
} |
bool WebRtcVideoCapturerAdapter::IsScreencast() const { |
@@ -97,97 +289,33 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
if (first_frame_timestamp_ == media::kNoTimestamp()) |
first_frame_timestamp_ = frame->timestamp(); |
- cricket::CapturedFrame captured_frame; |
- captured_frame.width = frame->natural_size().width(); |
- captured_frame.height = frame->natural_size().height(); |
- // cricket::CapturedFrame time is in nanoseconds. |
- captured_frame.elapsed_time = |
+ const int64 elapsed_time = |
(frame->timestamp() - first_frame_timestamp_).InMicroseconds() * |
base::Time::kNanosecondsPerMicrosecond; |
- captured_frame.time_stamp = frame->timestamp().InMicroseconds() * |
- base::Time::kNanosecondsPerMicrosecond; |
- captured_frame.pixel_height = 1; |
- captured_frame.pixel_width = 1; |
- |
- // TODO(perkj): |
- // Libjingle expects contiguous layout of image planes as input. |
- // The only format where that is true in Chrome is I420 where the |
- // coded_size == natural_size(). |
- if (frame->format() != media::VideoFrame::I420 || |
- frame->coded_size() != frame->natural_size()) { |
- // Cropping / Scaling and or switching UV planes is needed. |
- UpdateI420Buffer(frame); |
- captured_frame.data = buffer_; |
- captured_frame.data_size = buffer_size_; |
- captured_frame.fourcc = cricket::FOURCC_I420; |
- } else { |
- captured_frame.fourcc = media::VideoFrame::I420 == frame->format() ? |
- cricket::FOURCC_I420 : cricket::FOURCC_YV12; |
- captured_frame.data = frame->data(0); |
- captured_frame.data_size = |
- media::VideoFrame::AllocationSize(frame->format(), frame->coded_size()); |
- } |
+ |
+ // Create a CapturedFrame that only contains header information, not the |
+ // actual pixel data. The purpose of this charade is to satisfy the interface |
+ // of cricket::VideoCapturer. We will inject the real cricket::VideoFrame with |
+ // a custom VideoFrameFactory instead. |
+ cricket::CapturedFrame dummy_captured_frame; |
+ dummy_captured_frame.width = frame->visible_rect().width(); |
+ dummy_captured_frame.height = frame->visible_rect().height(); |
+ dummy_captured_frame.elapsed_time = elapsed_time; |
+ dummy_captured_frame.time_stamp = frame->timestamp().InMicroseconds() * |
+ base::Time::kNanosecondsPerMicrosecond; |
+ dummy_captured_frame.pixel_height = 1; |
+ dummy_captured_frame.pixel_width = 1; |
+ dummy_captured_frame.rotation = 0; |
+ dummy_captured_frame.data = NULL; |
+ dummy_captured_frame.data_size = cricket::CapturedFrame::kUnknownDataSize; |
+ dummy_captured_frame.fourcc = cricket::FOURCC_ANY; |
+ |
+ // Inject the frame via the VideoFrameFractory. |
+ set_frame_factory(new DummyFactory(frame, elapsed_time)); |
tommi (sloooow) - chröme
2014/09/25 20:06:48
does set_frame_factory accept a scoped_ptr? if not
magjed_chromium
2014/09/25 20:52:08
No, it does not accept a scoped_ptr. The definitio
|
// This signals to libJingle that a new VideoFrame is available. |
// libJingle have no assumptions on what thread this signal come from. |
- SignalFrameCaptured(this, &captured_frame); |
-} |
- |
-void WebRtcVideoCapturerAdapter::UpdateI420Buffer( |
- const scoped_refptr<media::VideoFrame>& src) { |
- DCHECK(thread_checker_.CalledOnValidThread()); |
- const int dst_width = src->natural_size().width(); |
- const int dst_height = src->natural_size().height(); |
- DCHECK(src->visible_rect().width() >= dst_width && |
- src->visible_rect().height() >= dst_height); |
- |
- const gfx::Rect& visible_rect = src->visible_rect(); |
- |
- const uint8* src_y = src->data(media::VideoFrame::kYPlane) + |
- visible_rect.y() * src->stride(media::VideoFrame::kYPlane) + |
- visible_rect.x(); |
- const uint8* src_u = src->data(media::VideoFrame::kUPlane) + |
- visible_rect.y() / 2 * src->stride(media::VideoFrame::kUPlane) + |
- visible_rect.x() / 2; |
- const uint8* src_v = src->data(media::VideoFrame::kVPlane) + |
- visible_rect.y() / 2 * src->stride(media::VideoFrame::kVPlane) + |
- visible_rect.x() / 2; |
- |
- const size_t dst_size = |
- media::VideoFrame::AllocationSize(src->format(), src->natural_size()); |
- |
- if (dst_size != buffer_size_) { |
- base::AlignedFree(buffer_); |
- buffer_ = reinterpret_cast<uint8*>( |
- base::AlignedAlloc(dst_size + media::VideoFrame::kFrameSizePadding, |
- media::VideoFrame::kFrameAddressAlignment)); |
- buffer_size_ = dst_size; |
- } |
- |
- uint8* dst_y = buffer_; |
- const int dst_stride_y = dst_width; |
- uint8* dst_u = dst_y + dst_width * dst_height; |
- const int dst_halfwidth = (dst_width + 1) / 2; |
- const int dst_halfheight = (dst_height + 1) / 2; |
- uint8* dst_v = dst_u + dst_halfwidth * dst_halfheight; |
- |
- libyuv::I420Scale(src_y, |
magjed_chromium
2014/09/25 19:21:23
Let the CoordinatedVideoAdapter take care of this
|
- src->stride(media::VideoFrame::kYPlane), |
- src_u, |
- src->stride(media::VideoFrame::kUPlane), |
- src_v, |
- src->stride(media::VideoFrame::kVPlane), |
- visible_rect.width(), |
- visible_rect.height(), |
- dst_y, |
- dst_stride_y, |
- dst_u, |
- dst_halfwidth, |
- dst_v, |
- dst_halfwidth, |
- dst_width, |
- dst_height, |
- libyuv::kFilterBilinear); |
+ SignalFrameCaptured(this, &dummy_captured_frame); |
} |
} // namespace content |