Index: content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
diff --git a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
index cb1a832fbfe25a29684623266dd12c6fa5c3168a..c2eb3459614127d69e3a132ff015c2ee9bbf8101 100644 |
--- a/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
+++ b/content/renderer/media/webrtc/webrtc_video_capturer_adapter.cc |
@@ -71,6 +71,17 @@ class WebRtcVideoCapturerAdapter::MediaVideoFrameFactory |
DCHECK(input_frame == &captured_frame_); |
DCHECK(frame_.get()); |
+ const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() * |
+ base::Time::kNanosecondsPerMicrosecond; |
+ |
+ // Return |frame_| directly if it is texture backed, because there is no |
+ // cropping support for texture yet. See http://crbug/503653. |
+ if (frame_->HasTextures()) { |
+ return new cricket::WebRtcVideoFrame( |
+ new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame_), |
+ captured_frame_.elapsed_time, timestamp_ns); |
+ } |
+ |
// Create a centered cropped visible rect that preservers aspect ratio for |
// cropped natural size. |
gfx::Rect visible_rect = frame_->visible_rect(); |
@@ -84,9 +95,6 @@ class WebRtcVideoCapturerAdapter::MediaVideoFrameFactory |
video_frame->AddDestructionObserver( |
base::Bind(&ReleaseOriginalFrame, frame_)); |
- const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() * |
- base::Time::kNanosecondsPerMicrosecond; |
- |
// If no scaling is needed, return a wrapped version of |frame_| directly. |
if (video_frame->natural_size() == video_frame->visible_rect().size()) { |
return new cricket::WebRtcVideoFrame( |
@@ -208,11 +216,11 @@ void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
const scoped_refptr<media::VideoFrame>& frame) { |
DCHECK(thread_checker_.CalledOnValidThread()); |
TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); |
- if (!(media::VideoFrame::I420 == frame->format() || |
- media::VideoFrame::YV12 == frame->format())) { |
- // Some types of sources support textures as output. Since connecting |
- // sources and sinks do not check the format, we need to just ignore |
- // formats that we can not handle. |
+ if (!((frame->IsMappable() && (frame->format() == media::VideoFrame::I420 || |
+ frame->format() == media::VideoFrame::YV12)) || |
+ frame->HasTextures())) { |
+ // Since connecting sources and sinks do not check the format, we need to |
+ // just ignore formats that we can not handle. |
NOTREACHED(); |
return; |
} |