Index: content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
index 199cdff7538a42aafd901257dae07a394a9e974a..d7c639c3943f037f39dd21bb88042d6741bfbb3c 100644 |
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
@@ -8,7 +8,7 @@ |
#include "base/debug/trace_event.h" |
#include "base/memory/aligned_memory.h" |
#include "media/base/video_frame.h" |
-#include "third_party/libyuv/include/libyuv/convert.h" |
+#include "third_party/libyuv/include/libyuv/scale.h" |
namespace content { |
@@ -77,14 +77,22 @@ bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat( |
void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
const scoped_refptr<media::VideoFrame>& frame) { |
- DCHECK(media::VideoFrame::I420 == frame->format() || |
- media::VideoFrame::YV12 == frame->format()); |
+ TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); |
+ if (!(media::VideoFrame::I420 == frame->format() || |
+ media::VideoFrame::YV12 == frame->format())) { |
+ // Some types of sources support textures as output. Since connecting |
+ // sources and sinks do not check the format, we need to just ignore |
+ // formats that we can not handle. |
+ NOTREACHED(); |
+ return; |
+ } |
+ |
if (first_frame_timestamp_ == media::kNoTimestamp()) |
first_frame_timestamp_ = frame->timestamp(); |
cricket::CapturedFrame captured_frame; |
- captured_frame.width = frame->visible_rect().width(); |
- captured_frame.height = frame->visible_rect().height(); |
+ captured_frame.width = frame->natural_size().width(); |
+ captured_frame.height = frame->natural_size().height(); |
// cricket::CapturedFrame time is in nanoseconds. |
captured_frame.elapsed_time = |
(frame->timestamp() - first_frame_timestamp_).InMicroseconds() * |
@@ -97,10 +105,10 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
// TODO(perkj): |
// Libjingle expects contiguous layout of image planes as input. |
// The only format where that is true in Chrome is I420 where the |
- // coded_size == visible_rect().size(). |
+ // coded_size == natural_size(). |
if (frame->format() != media::VideoFrame::I420 || |
- frame->coded_size() != frame->visible_rect().size()) { |
- // Cropping and or switching UV planes is needed. |
+ frame->coded_size() != frame->natural_size()) { |
+ // Cropping / Scaling and or switching UV planes is needed. |
UpdateI420Buffer(frame); |
captured_frame.data = buffer_; |
captured_frame.data_size = buffer_size_; |
@@ -122,24 +130,24 @@ void WebRtcVideoCapturerAdapter::UpdateI420Buffer( |
const scoped_refptr<media::VideoFrame>& src) { |
const int src_width = src->coded_size().width(); |
const int src_height = src->coded_size().height(); |
- const int dst_width = src->visible_rect().width(); |
- const int dst_height = src->visible_rect().height(); |
+ const int dst_width = src->natural_size().width(); |
+ const int dst_height = src->natural_size().height(); |
DCHECK(src_width >= dst_width && src_height >= dst_height); |
- const int horiz_crop = src->visible_rect().x(); |
- const int vert_crop = src->visible_rect().y(); |
+ const gfx::Rect& visible_rect = src->visible_rect(); |
const uint8* src_y = src->data(media::VideoFrame::kYPlane) + |
- (src_width * vert_crop + horiz_crop); |
- const int center = (src_width + 1) / 2; |
+ visible_rect.y() * src->stride(media::VideoFrame::kYPlane) + |
+ visible_rect.x(); |
const uint8* src_u = src->data(media::VideoFrame::kUPlane) + |
- (center * vert_crop + horiz_crop) / 2; |
+ visible_rect.y() * src->stride(media::VideoFrame::kUPlane) + |
+ visible_rect.x() / 2; |
const uint8* src_v = src->data(media::VideoFrame::kVPlane) + |
- (center * vert_crop + horiz_crop) / 2; |
+ visible_rect.y() * src->stride(media::VideoFrame::kVPlane) + |
+ visible_rect.x() / 2; |
const size_t dst_size = |
- media::VideoFrame::AllocationSize(src->format(), |
- src->visible_rect().size()); |
+ media::VideoFrame::AllocationSize(src->format(), src->natural_size()); |
if (dst_size != buffer_size_) { |
base::AlignedFree(buffer_); |
@@ -156,12 +164,14 @@ void WebRtcVideoCapturerAdapter::UpdateI420Buffer( |
const int dst_halfheight = (dst_height + 1) / 2; |
uint8* dst_v = dst_u + dst_halfwidth * dst_halfheight; |
- libyuv::I420Copy(src_y, |
+ libyuv::I420Scale(src_y, |
src->stride(media::VideoFrame::kYPlane), |
src_u, |
src->stride(media::VideoFrame::kUPlane), |
src_v, |
src->stride(media::VideoFrame::kVPlane), |
+ visible_rect.width(), |
+ visible_rect.height(), |
dst_y, |
dst_stride_y, |
dst_u, |
@@ -169,7 +179,8 @@ void WebRtcVideoCapturerAdapter::UpdateI420Buffer( |
dst_v, |
dst_halfwidth, |
dst_width, |
- dst_height); |
+ dst_height, |
+ libyuv::kFilterBilinear); |
} |
} // namespace content |