OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/memory/aligned_memory.h" | 8 #include "base/memory/aligned_memory.h" |
9 #include "base/trace_event/trace_event.h" | 9 #include "base/trace_event/trace_event.h" |
10 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h" | 10 #include "content/renderer/media/webrtc/webrtc_video_frame_adapter.h" |
11 #include "media/base/timestamp_constants.h" | 11 #include "media/base/timestamp_constants.h" |
| 12 #include "media/base/video_frame.h" |
| 13 #include "media/base/video_frame_pool.h" |
12 #include "media/base/video_util.h" | 14 #include "media/base/video_util.h" |
13 #include "third_party/libyuv/include/libyuv/convert_from.h" | 15 #include "third_party/libyuv/include/libyuv/convert_from.h" |
14 #include "third_party/libyuv/include/libyuv/scale.h" | 16 #include "third_party/libyuv/include/libyuv/scale.h" |
15 #include "third_party/webrtc/common_video/include/video_frame_buffer.h" | 17 #include "third_party/webrtc/common_video/include/video_frame_buffer.h" |
16 #include "third_party/webrtc/common_video/rotation.h" | 18 #include "third_party/webrtc/common_video/rotation.h" |
| 19 #include "third_party/webrtc/media/base/videoframefactory.h" |
17 #include "third_party/webrtc/media/engine/webrtcvideoframe.h" | 20 #include "third_party/webrtc/media/engine/webrtcvideoframe.h" |
18 | 21 |
19 namespace content { | 22 namespace content { |
20 namespace { | 23 namespace { |
21 | 24 |
22 // Empty method used for keeping a reference to the original media::VideoFrame. | 25 // Empty method used for keeping a reference to the original media::VideoFrame. |
23 // The reference to |frame| is kept in the closure that calls this method. | 26 // The reference to |frame| is kept in the closure that calls this method. |
24 void ReleaseOriginalFrame(const scoped_refptr<media::VideoFrame>& frame) { | 27 void ReleaseOriginalFrame(const scoped_refptr<media::VideoFrame>& frame) { |
25 } | 28 } |
26 | 29 |
27 } // anonymous namespace | 30 } // anonymous namespace |
28 | 31 |
| 32 // A cricket::VideoFrameFactory for media::VideoFrame. The purpose of this |
| 33 // class is to avoid a premature frame copy. A media::VideoFrame is injected |
| 34 // with SetFrame, and converted into a cricket::VideoFrame with |
| 35 // CreateAliasedFrame. SetFrame should be called before CreateAliasedFrame |
| 36 // for every frame. |
| 37 class WebRtcVideoCapturerAdapter::MediaVideoFrameFactory |
| 38 : public cricket::VideoFrameFactory { |
| 39 public: |
| 40 void SetFrame(const scoped_refptr<media::VideoFrame>& frame) { |
| 41 DCHECK(frame.get()); |
| 42 // Create a CapturedFrame that only contains header information, not the |
| 43 // actual pixel data. |
| 44 captured_frame_.width = frame->natural_size().width(); |
| 45 captured_frame_.height = frame->natural_size().height(); |
| 46 captured_frame_.time_stamp = frame->timestamp().InMicroseconds() * |
| 47 base::Time::kNanosecondsPerMicrosecond; |
| 48 captured_frame_.pixel_height = 1; |
| 49 captured_frame_.pixel_width = 1; |
| 50 captured_frame_.rotation = webrtc::kVideoRotation_0; |
| 51 captured_frame_.data = NULL; |
| 52 captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize; |
| 53 captured_frame_.fourcc = static_cast<uint32_t>(cricket::FOURCC_ANY); |
| 54 |
| 55 frame_ = frame; |
| 56 } |
| 57 |
| 58 void ReleaseFrame() { frame_ = NULL; } |
| 59 |
| 60 const cricket::CapturedFrame* GetCapturedFrame() const { |
| 61 return &captured_frame_; |
| 62 } |
| 63 |
| 64 cricket::VideoFrame* CreateAliasedFrame( |
| 65 const cricket::CapturedFrame* input_frame, |
| 66 int cropped_input_width, |
| 67 int cropped_input_height, |
| 68 int output_width, |
| 69 int output_height) const override { |
| 70 // Check that captured_frame is actually our frame. |
| 71 DCHECK(input_frame == &captured_frame_); |
| 72 DCHECK(frame_.get()); |
| 73 |
| 74 const int64_t timestamp_ns = frame_->timestamp().InMicroseconds() * |
| 75 base::Time::kNanosecondsPerMicrosecond; |
| 76 |
| 77 // Return |frame_| directly if it is texture backed, because there is no |
| 78 // cropping support for texture yet. See http://crbug/503653. |
| 79 // Return |frame_| directly if it is GpuMemoryBuffer backed, as we want to |
| 80 // keep the frame on native buffers. |
| 81 if (frame_->HasTextures() || |
| 82 frame_->storage_type() == |
| 83 media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS) { |
| 84 return new cricket::WebRtcVideoFrame( |
| 85 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame_), |
| 86 timestamp_ns, webrtc::kVideoRotation_0); |
| 87 } |
| 88 |
| 89 // Create a centered cropped visible rect that preservers aspect ratio for |
| 90 // cropped natural size. |
| 91 gfx::Rect visible_rect = frame_->visible_rect(); |
| 92 visible_rect.ClampToCenteredSize(gfx::Size( |
| 93 visible_rect.width() * cropped_input_width / input_frame->width, |
| 94 visible_rect.height() * cropped_input_height / input_frame->height)); |
| 95 |
| 96 const gfx::Size output_size(output_width, output_height); |
| 97 scoped_refptr<media::VideoFrame> video_frame = |
| 98 media::VideoFrame::WrapVideoFrame(frame_, frame_->format(), |
| 99 visible_rect, output_size); |
| 100 if (!video_frame) |
| 101 return nullptr; |
| 102 video_frame->AddDestructionObserver( |
| 103 base::Bind(&ReleaseOriginalFrame, frame_)); |
| 104 |
| 105 // If no scaling is needed, return a wrapped version of |frame_| directly. |
| 106 if (video_frame->natural_size() == video_frame->visible_rect().size()) { |
| 107 return new cricket::WebRtcVideoFrame( |
| 108 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(video_frame), |
| 109 timestamp_ns, webrtc::kVideoRotation_0); |
| 110 } |
| 111 |
| 112 // We need to scale the frame before we hand it over to cricket. |
| 113 scoped_refptr<media::VideoFrame> scaled_frame = |
| 114 scaled_frame_pool_.CreateFrame(media::PIXEL_FORMAT_I420, output_size, |
| 115 gfx::Rect(output_size), output_size, |
| 116 frame_->timestamp()); |
| 117 libyuv::I420Scale(video_frame->visible_data(media::VideoFrame::kYPlane), |
| 118 video_frame->stride(media::VideoFrame::kYPlane), |
| 119 video_frame->visible_data(media::VideoFrame::kUPlane), |
| 120 video_frame->stride(media::VideoFrame::kUPlane), |
| 121 video_frame->visible_data(media::VideoFrame::kVPlane), |
| 122 video_frame->stride(media::VideoFrame::kVPlane), |
| 123 video_frame->visible_rect().width(), |
| 124 video_frame->visible_rect().height(), |
| 125 scaled_frame->data(media::VideoFrame::kYPlane), |
| 126 scaled_frame->stride(media::VideoFrame::kYPlane), |
| 127 scaled_frame->data(media::VideoFrame::kUPlane), |
| 128 scaled_frame->stride(media::VideoFrame::kUPlane), |
| 129 scaled_frame->data(media::VideoFrame::kVPlane), |
| 130 scaled_frame->stride(media::VideoFrame::kVPlane), |
| 131 output_width, output_height, libyuv::kFilterBilinear); |
| 132 return new cricket::WebRtcVideoFrame( |
| 133 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(scaled_frame), |
| 134 timestamp_ns, webrtc::kVideoRotation_0); |
| 135 } |
| 136 |
| 137 cricket::VideoFrame* CreateAliasedFrame( |
| 138 const cricket::CapturedFrame* input_frame, |
| 139 int output_width, |
| 140 int output_height) const override { |
| 141 return CreateAliasedFrame(input_frame, input_frame->width, |
| 142 input_frame->height, output_width, output_height); |
| 143 } |
| 144 |
| 145 private: |
| 146 scoped_refptr<media::VideoFrame> frame_; |
| 147 cricket::CapturedFrame captured_frame_; |
| 148 // This is used only if scaling is needed. |
| 149 mutable media::VideoFramePool scaled_frame_pool_; |
| 150 }; |
| 151 |
29 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) | 152 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) |
30 : is_screencast_(is_screencast), | 153 : is_screencast_(is_screencast), |
31 running_(false) { | 154 running_(false) { |
32 thread_checker_.DetachFromThread(); | 155 thread_checker_.DetachFromThread(); |
| 156 // The base class takes ownership of the frame factory. |
| 157 set_frame_factory(new MediaVideoFrameFactory); |
33 } | 158 } |
34 | 159 |
35 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { | 160 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { |
36 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; | 161 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; |
37 } | 162 } |
38 | 163 |
39 cricket::CaptureState WebRtcVideoCapturerAdapter::Start( | 164 cricket::CaptureState WebRtcVideoCapturerAdapter::Start( |
40 const cricket::VideoFormat& capture_format) { | 165 const cricket::VideoFormat& capture_format) { |
41 DCHECK(thread_checker_.CalledOnValidThread()); | 166 DCHECK(thread_checker_.CalledOnValidThread()); |
42 DCHECK(!running_); | 167 DCHECK(!running_); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
86 // just use what is provided. | 211 // just use what is provided. |
87 // Use the desired format as the best format. | 212 // Use the desired format as the best format. |
88 best_format->width = desired.width; | 213 best_format->width = desired.width; |
89 best_format->height = desired.height; | 214 best_format->height = desired.height; |
90 best_format->fourcc = cricket::FOURCC_I420; | 215 best_format->fourcc = cricket::FOURCC_I420; |
91 best_format->interval = desired.interval; | 216 best_format->interval = desired.interval; |
92 return true; | 217 return true; |
93 } | 218 } |
94 | 219 |
95 void WebRtcVideoCapturerAdapter::OnFrameCaptured( | 220 void WebRtcVideoCapturerAdapter::OnFrameCaptured( |
96 const scoped_refptr<media::VideoFrame>& input_frame) { | 221 const scoped_refptr<media::VideoFrame>& video_frame) { |
97 DCHECK(thread_checker_.CalledOnValidThread()); | 222 DCHECK(thread_checker_.CalledOnValidThread()); |
98 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); | 223 TRACE_EVENT0("video", "WebRtcVideoCapturerAdapter::OnFrameCaptured"); |
99 if (!(input_frame->IsMappable() && | 224 if (!(video_frame->IsMappable() && |
100 (input_frame->format() == media::PIXEL_FORMAT_I420 || | 225 (video_frame->format() == media::PIXEL_FORMAT_I420 || |
101 input_frame->format() == media::PIXEL_FORMAT_YV12 || | 226 video_frame->format() == media::PIXEL_FORMAT_YV12 || |
102 input_frame->format() == media::PIXEL_FORMAT_YV12A))) { | 227 video_frame->format() == media::PIXEL_FORMAT_YV12A))) { |
103 // Since connecting sources and sinks do not check the format, we need to | 228 // Since connecting sources and sinks do not check the format, we need to |
104 // just ignore formats that we can not handle. | 229 // just ignore formats that we can not handle. |
105 NOTREACHED(); | 230 NOTREACHED(); |
106 return; | 231 return; |
107 } | 232 } |
108 scoped_refptr<media::VideoFrame> frame = input_frame; | 233 scoped_refptr<media::VideoFrame> frame = video_frame; |
109 // Drop alpha channel since we do not support it yet. | 234 // Drop alpha channel since we do not support it yet. |
110 if (frame->format() == media::PIXEL_FORMAT_YV12A) | 235 if (frame->format() == media::PIXEL_FORMAT_YV12A) |
111 frame = media::WrapAsI420VideoFrame(input_frame); | 236 frame = media::WrapAsI420VideoFrame(video_frame); |
112 | 237 |
113 const int orig_width = frame->natural_size().width(); | 238 // Inject the frame via the VideoFrameFactory of base class. |
114 const int orig_height = frame->natural_size().height(); | 239 MediaVideoFrameFactory* media_video_frame_factory = |
115 int adapted_width; | 240 reinterpret_cast<MediaVideoFrameFactory*>(frame_factory()); |
116 int adapted_height; | 241 media_video_frame_factory->SetFrame(frame); |
117 // The VideoAdapter is only used for cpu-adaptation downscaling, no | |
118 // aspect changes. So we ignore these crop-related outputs. | |
119 int crop_width; | |
120 int crop_height; | |
121 int crop_x; | |
122 int crop_y; | |
123 int64_t translated_camera_time_us; | |
124 | 242 |
125 if (!AdaptFrame(orig_width, orig_height, | 243 // This signals to libJingle that a new VideoFrame is available. |
126 frame->timestamp().InMicroseconds(), | 244 SignalFrameCaptured(this, media_video_frame_factory->GetCapturedFrame()); |
127 rtc::TimeMicros(), | |
128 &adapted_width, &adapted_height, | |
129 &crop_width, &crop_height, &crop_x, &crop_y, | |
130 &translated_camera_time_us)) { | |
131 return; | |
132 } | |
133 | 245 |
134 // Return |frame| directly if it is texture backed, because there is no | 246 media_video_frame_factory->ReleaseFrame(); // Release the frame ASAP. |
135 // cropping support for texture yet. See http://crbug/503653. | |
136 // Return |frame| directly if it is GpuMemoryBuffer backed, as we want to | |
137 // keep the frame on native buffers. | |
138 if (frame->HasTextures() || | |
139 frame->storage_type() == | |
140 media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS) { | |
141 OnFrame(cricket::WebRtcVideoFrame( | |
142 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(frame), | |
143 webrtc::kVideoRotation_0, translated_camera_time_us), | |
144 orig_width, orig_height); | |
145 return; | |
146 } | |
147 | |
148 // Create a centered cropped visible rect that preservers aspect ratio for | |
149 // cropped natural size. | |
150 gfx::Rect visible_rect = frame->visible_rect(); | |
151 visible_rect.ClampToCenteredSize(gfx::Size( | |
152 visible_rect.width() * adapted_width / orig_width, | |
153 visible_rect.height() * adapted_height / orig_height)); | |
154 | |
155 const gfx::Size adapted_size(adapted_width, adapted_height); | |
156 scoped_refptr<media::VideoFrame> video_frame = | |
157 media::VideoFrame::WrapVideoFrame(frame, frame->format(), | |
158 visible_rect, adapted_size); | |
159 if (!video_frame) | |
160 return; | |
161 | |
162 video_frame->AddDestructionObserver(base::Bind(&ReleaseOriginalFrame, frame)); | |
163 | |
164 // If no scaling is needed, return a wrapped version of |frame| directly. | |
165 if (video_frame->natural_size() == video_frame->visible_rect().size()) { | |
166 OnFrame(cricket::WebRtcVideoFrame( | |
167 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(video_frame), | |
168 webrtc::kVideoRotation_0, translated_camera_time_us), | |
169 orig_width, orig_height); | |
170 return; | |
171 } | |
172 | |
173 // We need to scale the frame before we hand it over to webrtc. | |
174 scoped_refptr<media::VideoFrame> scaled_frame = | |
175 scaled_frame_pool_.CreateFrame(media::PIXEL_FORMAT_I420, adapted_size, | |
176 gfx::Rect(adapted_size), adapted_size, | |
177 frame->timestamp()); | |
178 libyuv::I420Scale(video_frame->visible_data(media::VideoFrame::kYPlane), | |
179 video_frame->stride(media::VideoFrame::kYPlane), | |
180 video_frame->visible_data(media::VideoFrame::kUPlane), | |
181 video_frame->stride(media::VideoFrame::kUPlane), | |
182 video_frame->visible_data(media::VideoFrame::kVPlane), | |
183 video_frame->stride(media::VideoFrame::kVPlane), | |
184 video_frame->visible_rect().width(), | |
185 video_frame->visible_rect().height(), | |
186 scaled_frame->data(media::VideoFrame::kYPlane), | |
187 scaled_frame->stride(media::VideoFrame::kYPlane), | |
188 scaled_frame->data(media::VideoFrame::kUPlane), | |
189 scaled_frame->stride(media::VideoFrame::kUPlane), | |
190 scaled_frame->data(media::VideoFrame::kVPlane), | |
191 scaled_frame->stride(media::VideoFrame::kVPlane), | |
192 adapted_width, adapted_height, libyuv::kFilterBilinear); | |
193 | |
194 OnFrame(cricket::WebRtcVideoFrame( | |
195 new rtc::RefCountedObject<WebRtcVideoFrameAdapter>(scaled_frame), | |
196 webrtc::kVideoRotation_0, translated_camera_time_us), | |
197 orig_width, orig_height); | |
198 } | 247 } |
199 | 248 |
200 } // namespace content | 249 } // namespace content |
OLD | NEW |