Index: content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
index eb56669612e19bb62aa1f7fa7885c98298d44234..603edb3780aed102025eddcdf2efc2a2511beed5 100644 |
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
@@ -8,7 +8,7 @@ |
#include "base/debug/trace_event.h" |
#include "base/memory/aligned_memory.h" |
#include "media/base/video_frame.h" |
-#include "third_party/libyuv/include/libyuv/convert.h" |
+#include "third_party/libyuv/include/libyuv/scale.h" |
namespace content { |
@@ -84,14 +84,22 @@ bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat( |
void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
const scoped_refptr<media::VideoFrame>& frame) { |
DCHECK(thread_checker_.CalledOnValidThread()); |
- DCHECK(media::VideoFrame::I420 == frame->format() || |
- media::VideoFrame::YV12 == frame->format()); |
+ TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); |
+ if (!(media::VideoFrame::I420 == frame->format() || |
+ media::VideoFrame::YV12 == frame->format())) { |
+ // Some types of sources support textures as output. Since connecting |
+ // sources and sinks do not check the format, we need to just ignore |
+ // formats that we can not handle. |
+ NOTREACHED(); |
+ return; |
+ } |
+ |
if (first_frame_timestamp_ == media::kNoTimestamp()) |
first_frame_timestamp_ = frame->timestamp(); |
cricket::CapturedFrame captured_frame; |
- captured_frame.width = frame->visible_rect().width(); |
- captured_frame.height = frame->visible_rect().height(); |
+ captured_frame.width = frame->natural_size().width(); |
+ captured_frame.height = frame->natural_size().height(); |
// cricket::CapturedFrame time is in nanoseconds. |
captured_frame.elapsed_time = |
(frame->timestamp() - first_frame_timestamp_).InMicroseconds() * |
@@ -104,10 +112,10 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
// TODO(perkj): |
// Libjingle expects contiguous layout of image planes as input. |
// The only format where that is true in Chrome is I420 where the |
- // coded_size == visible_rect().size(). |
+ // coded_size == natural_size(). |
if (frame->format() != media::VideoFrame::I420 || |
- frame->coded_size() != frame->visible_rect().size()) { |
- // Cropping and or switching UV planes is needed. |
+ frame->coded_size() != frame->natural_size()) { |
+ // Cropping / Scaling and or switching UV planes is needed. |
UpdateI420Buffer(frame); |
captured_frame.data = buffer_; |
captured_frame.data_size = buffer_size_; |
@@ -128,26 +136,25 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
void WebRtcVideoCapturerAdapter::UpdateI420Buffer( |
const scoped_refptr<media::VideoFrame>& src) { |
DCHECK(thread_checker_.CalledOnValidThread()); |
- const int src_width = src->coded_size().width(); |
- const int src_height = src->coded_size().height(); |
- const int dst_width = src->visible_rect().width(); |
- const int dst_height = src->visible_rect().height(); |
- DCHECK(src_width >= dst_width && src_height >= dst_height); |
+ const int dst_width = src->natural_size().width(); |
+ const int dst_height = src->natural_size().height(); |
+ DCHECK(src->visible_rect().width() >= dst_width && |
+ src->visible_rect().height() >= dst_height); |
- const int horiz_crop = src->visible_rect().x(); |
- const int vert_crop = src->visible_rect().y(); |
+ const gfx::Rect& visible_rect = src->visible_rect(); |
const uint8* src_y = src->data(media::VideoFrame::kYPlane) + |
- (src_width * vert_crop + horiz_crop); |
- const int center = (src_width + 1) / 2; |
+ visible_rect.y() * src->stride(media::VideoFrame::kYPlane) + |
+ visible_rect.x(); |
const uint8* src_u = src->data(media::VideoFrame::kUPlane) + |
- (center * vert_crop + horiz_crop) / 2; |
+ visible_rect.y() / 2 * src->stride(media::VideoFrame::kUPlane) + |
+ visible_rect.x() / 2; |
const uint8* src_v = src->data(media::VideoFrame::kVPlane) + |
- (center * vert_crop + horiz_crop) / 2; |
+ visible_rect.y() / 2 * src->stride(media::VideoFrame::kVPlane) + |
+ visible_rect.x() / 2; |
const size_t dst_size = |
- media::VideoFrame::AllocationSize(src->format(), |
- src->visible_rect().size()); |
+ media::VideoFrame::AllocationSize(src->format(), src->natural_size()); |
if (dst_size != buffer_size_) { |
base::AlignedFree(buffer_); |
@@ -164,20 +171,23 @@ void WebRtcVideoCapturerAdapter::UpdateI420Buffer( |
const int dst_halfheight = (dst_height + 1) / 2; |
uint8* dst_v = dst_u + dst_halfwidth * dst_halfheight; |
- libyuv::I420Copy(src_y, |
- src->stride(media::VideoFrame::kYPlane), |
- src_u, |
- src->stride(media::VideoFrame::kUPlane), |
- src_v, |
- src->stride(media::VideoFrame::kVPlane), |
- dst_y, |
- dst_stride_y, |
- dst_u, |
- dst_halfwidth, |
- dst_v, |
- dst_halfwidth, |
- dst_width, |
- dst_height); |
+ libyuv::I420Scale(src_y, |
+ src->stride(media::VideoFrame::kYPlane), |
+ src_u, |
+ src->stride(media::VideoFrame::kUPlane), |
+ src_v, |
+ src->stride(media::VideoFrame::kVPlane), |
+ visible_rect.width(), |
+ visible_rect.height(), |
+ dst_y, |
+ dst_stride_y, |
+ dst_u, |
+ dst_halfwidth, |
+ dst_v, |
+ dst_halfwidth, |
+ dst_width, |
+ dst_height, |
+ libyuv::kFilterBilinear); |
} |
} // namespace content |