OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 5 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/debug/trace_event.h" | 8 #include "base/debug/trace_event.h" |
9 #include "base/memory/aligned_memory.h" | 9 #include "base/memory/aligned_memory.h" |
10 #include "media/base/video_frame.h" | 10 #include "media/base/video_frame.h" |
11 #include "media/base/video_frame_pool.h" | |
12 #include "third_party/libjingle/source/talk/media/base/videoframe.h" | |
13 #include "third_party/libjingle/source/talk/media/base/videoframefactory.h" | |
14 #include "third_party/libjingle/source/talk/media/webrtc/webrtcvideoframe.h" | |
15 #include "third_party/libyuv/include/libyuv/convert_from.h" | |
11 #include "third_party/libyuv/include/libyuv/scale.h" | 16 #include "third_party/libyuv/include/libyuv/scale.h" |
12 | 17 |
18 namespace { | |
19 | |
20 // Empty method used for keeping a reference to the original media::VideoFrame. | |
21 // The reference to |frame| is kept in the closure that calls this method. | |
22 void ReleaseOriginalFrame(const scoped_refptr<media::VideoFrame>& frame) { | |
23 } | |
24 | |
25 // Thin map between an existing media::VideoFrame and cricket::VideoFrame to | |
26 // avoid premature deep copies. | |
27 // This implementation is only safe to use in a const context and should never | |
28 // be written to. | |
29 class VideoFrameWrapper : public cricket::VideoFrame { | |
30 public: | |
31 VideoFrameWrapper(const scoped_refptr<media::VideoFrame>& frame, | |
32 int64 elapsed_time) | |
33 : frame_(media::VideoFrame::WrapVideoFrame( | |
34 frame, | |
35 frame->visible_rect(), | |
36 frame->natural_size(), | |
37 base::Bind(&ReleaseOriginalFrame, frame))), | |
38 elapsed_time_(elapsed_time) {} | |
39 | |
40 virtual VideoFrame* Copy() const OVERRIDE { | |
perkj_chrome
2014/10/16 07:55:09
use new c++ override instead of OVERRIDE here and
magjed_chromium
2014/10/16 10:35:47
Done.
| |
41 DCHECK(thread_checker_.CalledOnValidThread()); | |
42 return new VideoFrameWrapper(frame_, elapsed_time_); | |
43 } | |
44 | |
45 virtual size_t GetWidth() const OVERRIDE { | |
46 DCHECK(thread_checker_.CalledOnValidThread()); | |
47 return static_cast<size_t>(frame_->visible_rect().width()); | |
48 } | |
49 | |
50 virtual size_t GetHeight() const OVERRIDE { | |
51 DCHECK(thread_checker_.CalledOnValidThread()); | |
52 return static_cast<size_t>(frame_->visible_rect().height()); | |
53 } | |
54 | |
55 virtual const uint8* GetYPlane() const OVERRIDE { | |
56 DCHECK(thread_checker_.CalledOnValidThread()); | |
57 return frame_->visible_data(media::VideoFrame::kYPlane); | |
58 } | |
59 | |
60 virtual const uint8* GetUPlane() const OVERRIDE { | |
61 DCHECK(thread_checker_.CalledOnValidThread()); | |
62 return frame_->visible_data(media::VideoFrame::kUPlane); | |
63 } | |
64 | |
65 virtual const uint8* GetVPlane() const OVERRIDE { | |
66 DCHECK(thread_checker_.CalledOnValidThread()); | |
67 return frame_->visible_data(media::VideoFrame::kVPlane); | |
68 } | |
69 | |
70 virtual uint8* GetYPlane() OVERRIDE { | |
71 DCHECK(thread_checker_.CalledOnValidThread()); | |
72 return frame_->visible_data(media::VideoFrame::kYPlane); | |
73 } | |
74 | |
75 virtual uint8* GetUPlane() OVERRIDE { | |
76 DCHECK(thread_checker_.CalledOnValidThread()); | |
77 return frame_->visible_data(media::VideoFrame::kUPlane); | |
78 } | |
79 | |
80 virtual uint8* GetVPlane() OVERRIDE { | |
81 DCHECK(thread_checker_.CalledOnValidThread()); | |
82 return frame_->visible_data(media::VideoFrame::kVPlane); | |
83 } | |
84 | |
85 virtual int32 GetYPitch() const OVERRIDE { | |
86 DCHECK(thread_checker_.CalledOnValidThread()); | |
87 return frame_->stride(media::VideoFrame::kYPlane); | |
88 } | |
89 | |
90 virtual int32 GetUPitch() const OVERRIDE { | |
91 DCHECK(thread_checker_.CalledOnValidThread()); | |
92 return frame_->stride(media::VideoFrame::kUPlane); | |
93 } | |
94 | |
95 virtual int32 GetVPitch() const OVERRIDE { | |
96 DCHECK(thread_checker_.CalledOnValidThread()); | |
97 return frame_->stride(media::VideoFrame::kVPlane); | |
98 } | |
99 | |
100 virtual void* GetNativeHandle() const OVERRIDE { | |
101 DCHECK(thread_checker_.CalledOnValidThread()); | |
102 return NULL; | |
103 } | |
104 | |
105 virtual size_t GetPixelWidth() const OVERRIDE { | |
106 DCHECK(thread_checker_.CalledOnValidThread()); | |
107 return 1; | |
108 } | |
109 virtual size_t GetPixelHeight() const OVERRIDE { | |
110 DCHECK(thread_checker_.CalledOnValidThread()); | |
111 return 1; | |
112 } | |
113 | |
114 virtual int64 GetElapsedTime() const OVERRIDE { | |
115 DCHECK(thread_checker_.CalledOnValidThread()); | |
116 return elapsed_time_; | |
117 } | |
118 | |
119 virtual int64 GetTimeStamp() const OVERRIDE { | |
120 DCHECK(thread_checker_.CalledOnValidThread()); | |
121 return frame_->timestamp().InMicroseconds() * | |
122 base::Time::kNanosecondsPerMicrosecond; | |
123 } | |
124 | |
125 virtual void SetElapsedTime(int64 elapsed_time) OVERRIDE { | |
126 DCHECK(thread_checker_.CalledOnValidThread()); | |
127 elapsed_time_ = elapsed_time; | |
128 } | |
129 | |
130 virtual void SetTimeStamp(int64 time_stamp) OVERRIDE { | |
131 DCHECK(thread_checker_.CalledOnValidThread()); | |
132 // Round to closest microsecond. | |
133 frame_->set_timestamp(base::TimeDelta::FromMicroseconds( | |
134 (time_stamp + base::Time::kNanosecondsPerMicrosecond / 2) / | |
135 base::Time::kNanosecondsPerMicrosecond)); | |
136 } | |
137 | |
138 virtual int GetRotation() const OVERRIDE { | |
139 DCHECK(thread_checker_.CalledOnValidThread()); | |
140 return 0; | |
141 } | |
142 | |
143 // TODO(magjed): Refactor into base class. | |
perkj_chrome
2014/10/16 07:55:09
I don't think this is used? Return NOTIMPLEMENTED
magjed_chromium
2014/10/16 10:35:47
It is probably not used, but it is called in a cou
perkj_chrome
2014/10/16 13:47:30
Acknowledged.
| |
144 virtual size_t ConvertToRgbBuffer(uint32 to_fourcc, | |
145 uint8* buffer, | |
146 size_t size, | |
147 int stride_rgb) const OVERRIDE { | |
148 DCHECK(thread_checker_.CalledOnValidThread()); | |
149 const size_t needed = std::abs(stride_rgb) * GetHeight(); | |
150 if (size < needed) { | |
151 DLOG(WARNING) << "RGB buffer is not large enough"; | |
152 return needed; | |
153 } | |
154 | |
155 if (libyuv::ConvertFromI420(GetYPlane(), | |
156 GetYPitch(), | |
157 GetUPlane(), | |
158 GetUPitch(), | |
159 GetVPlane(), | |
160 GetVPitch(), | |
161 buffer, | |
162 stride_rgb, | |
163 static_cast<int>(GetWidth()), | |
164 static_cast<int>(GetHeight()), | |
165 to_fourcc)) { | |
166 DLOG(ERROR) << "RGB type not supported: " << to_fourcc; | |
167 return 0; // 0 indicates error | |
168 } | |
169 return needed; | |
170 } | |
171 | |
172 // The rest of the public methods are NOTIMPLEMENTED. | |
173 virtual bool InitToBlack(int w, | |
174 int h, | |
175 size_t pixel_width, | |
176 size_t pixel_height, | |
177 int64 elapsed_time, | |
178 int64 time_stamp) OVERRIDE { | |
179 NOTIMPLEMENTED(); | |
180 return false; | |
181 } | |
182 | |
183 virtual bool Reset(uint32 fourcc, | |
184 int w, | |
185 int h, | |
186 int dw, | |
187 int dh, | |
188 uint8* sample, | |
189 size_t sample_size, | |
190 size_t pixel_width, | |
191 size_t pixel_height, | |
192 int64 elapsed_time, | |
193 int64 time_stamp, | |
194 int rotation) OVERRIDE { | |
195 NOTIMPLEMENTED(); | |
196 return false; | |
197 } | |
198 | |
199 virtual bool MakeExclusive() OVERRIDE { | |
200 NOTIMPLEMENTED(); | |
201 return false; | |
202 } | |
203 | |
204 virtual size_t CopyToBuffer(uint8* buffer, size_t size) const OVERRIDE { | |
205 NOTIMPLEMENTED(); | |
206 return 0; | |
207 } | |
208 | |
209 protected: | |
210 // TODO(magjed): Refactor as a static method in WebRtcVideoFrame. | |
211 virtual VideoFrame* CreateEmptyFrame(int w, | |
perkj_chrome
2014/10/16 07:55:09
is this really used?
magjed_chromium
2014/10/16 10:35:47
Yes, it is used in the VideoAdapter to allocate fr
perkj_chrome
2014/10/16 13:47:30
Acknowledged.
| |
212 int h, | |
213 size_t pixel_width, | |
214 size_t pixel_height, | |
215 int64 elapsed_time, | |
216 int64 time_stamp) const OVERRIDE { | |
217 DCHECK(thread_checker_.CalledOnValidThread()); | |
218 VideoFrame* frame = new cricket::WebRtcVideoFrame(); | |
219 frame->InitToBlack( | |
220 w, h, pixel_width, pixel_height, elapsed_time, time_stamp); | |
221 return frame; | |
222 } | |
223 | |
224 private: | |
225 scoped_refptr<media::VideoFrame> frame_; | |
226 int64 elapsed_time_; | |
227 base::ThreadChecker thread_checker_; | |
228 }; | |
229 | |
230 // A cricket::VideoFrameFactory for media::VideoFrame. The purpose of this | |
231 // class is to avoid a premature frame copy. A media::VideoFrame is injected | |
232 // with SetFrame, and converted into a cricket::VideoFrame with | |
233 // CreateAliasedFrame. SetFrame should be called before CreateAliasedFrame | |
234 // for every frame. | |
235 class MediaVideoFrameFactory : public cricket::VideoFrameFactory { | |
236 public: | |
237 void SetFrame(const scoped_refptr<media::VideoFrame>& frame, | |
238 int64_t elapsed_time) { | |
239 DCHECK(frame.get()); | |
240 // Create a CapturedFrame that only contains header information, not the | |
241 // actual pixel data. | |
242 captured_frame_.width = frame->natural_size().width(); | |
243 captured_frame_.height = frame->natural_size().height(); | |
244 captured_frame_.elapsed_time = elapsed_time; | |
245 captured_frame_.time_stamp = frame->timestamp().InMicroseconds() * | |
246 base::Time::kNanosecondsPerMicrosecond; | |
247 captured_frame_.pixel_height = 1; | |
248 captured_frame_.pixel_width = 1; | |
249 captured_frame_.rotation = 0; | |
250 captured_frame_.data = NULL; | |
251 captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize; | |
252 captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_ANY); | |
253 | |
254 frame_ = frame; | |
255 } | |
256 | |
257 const cricket::CapturedFrame* GetCapturedFrame() const { | |
258 return &captured_frame_; | |
259 } | |
260 | |
261 virtual cricket::VideoFrame* CreateAliasedFrame( | |
262 const cricket::CapturedFrame* captured_frame, | |
263 int dst_width, | |
264 int dst_height) const OVERRIDE { | |
265 // Check that captured_frame is actually our frame. | |
266 DCHECK(captured_frame == &captured_frame_); | |
267 | |
268 scoped_refptr<media::VideoFrame> video_frame = frame_; | |
perkj_chrome
2014/10/16 07:55:09
frame_.Release to avoid holding a reference when
magjed_chromium
2014/10/16 10:35:47
Done, I have added it as a separate call as this f
| |
269 // Check if scaling is needed. | |
perkj_chrome
2014/10/16 07:55:09
SHould we add a todo to further investigate what y
magjed_chromium
2014/10/16 10:35:47
I would prefer to keep this code and instead refac
| |
270 if (dst_width != frame_->visible_rect().width() || | |
271 dst_height != frame_->visible_rect().height()) { | |
272 video_frame = | |
273 scaled_frame_pool_.CreateFrame(media::VideoFrame::I420, | |
274 gfx::Size(dst_width, dst_height), | |
275 gfx::Rect(0, 0, dst_width, dst_height), | |
276 gfx::Size(dst_width, dst_height), | |
277 frame_->timestamp()); | |
278 libyuv::I420Scale(frame_->visible_data(media::VideoFrame::kYPlane), | |
279 frame_->stride(media::VideoFrame::kYPlane), | |
280 frame_->visible_data(media::VideoFrame::kUPlane), | |
281 frame_->stride(media::VideoFrame::kUPlane), | |
282 frame_->visible_data(media::VideoFrame::kVPlane), | |
283 frame_->stride(media::VideoFrame::kVPlane), | |
284 frame_->visible_rect().width(), | |
285 frame_->visible_rect().height(), | |
286 video_frame->data(media::VideoFrame::kYPlane), | |
287 video_frame->stride(media::VideoFrame::kYPlane), | |
288 video_frame->data(media::VideoFrame::kUPlane), | |
289 video_frame->stride(media::VideoFrame::kUPlane), | |
290 video_frame->data(media::VideoFrame::kVPlane), | |
291 video_frame->stride(media::VideoFrame::kVPlane), | |
292 dst_width, | |
293 dst_height, | |
294 libyuv::kFilterBilinear); | |
295 } | |
296 | |
297 // Create a shallow cricket::VideoFrame wrapper around the | |
298 // media::VideoFrame. The caller has ownership of the returned frame. | |
299 return new VideoFrameWrapper(video_frame, captured_frame_.elapsed_time); | |
300 } | |
301 | |
302 private: | |
303 scoped_refptr<media::VideoFrame> frame_; | |
304 cricket::CapturedFrame captured_frame_; | |
305 // This is used only if scaling is needed. | |
306 mutable media::VideoFramePool scaled_frame_pool_; | |
307 }; | |
308 | |
309 } // anonymous namespace | |
310 | |
13 namespace content { | 311 namespace content { |
14 | 312 |
15 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) | 313 WebRtcVideoCapturerAdapter::WebRtcVideoCapturerAdapter(bool is_screencast) |
16 : is_screencast_(is_screencast), | 314 : is_screencast_(is_screencast), |
17 running_(false), | 315 running_(false), |
18 buffer_(NULL), | 316 first_frame_timestamp_(media::kNoTimestamp()), |
19 buffer_size_(0) { | 317 frame_factory_(new MediaVideoFrameFactory) { |
20 thread_checker_.DetachFromThread(); | 318 thread_checker_.DetachFromThread(); |
319 // The base class takes ownership of the frame factory. | |
320 set_frame_factory(frame_factory_); | |
21 } | 321 } |
22 | 322 |
23 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { | 323 WebRtcVideoCapturerAdapter::~WebRtcVideoCapturerAdapter() { |
24 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; | 324 DVLOG(3) << " WebRtcVideoCapturerAdapter::dtor"; |
25 base::AlignedFree(buffer_); | |
26 } | 325 } |
27 | 326 |
28 cricket::CaptureState WebRtcVideoCapturerAdapter::Start( | 327 cricket::CaptureState WebRtcVideoCapturerAdapter::Start( |
29 const cricket::VideoFormat& capture_format) { | 328 const cricket::VideoFormat& capture_format) { |
30 DCHECK(thread_checker_.CalledOnValidThread()); | 329 DCHECK(thread_checker_.CalledOnValidThread()); |
31 DCHECK(!running_); | 330 DCHECK(!running_); |
32 DVLOG(3) << " WebRtcVideoCapturerAdapter::Start w = " << capture_format.width | 331 DVLOG(3) << " WebRtcVideoCapturerAdapter::Start w = " << capture_format.width |
33 << " h = " << capture_format.height; | 332 << " h = " << capture_format.height; |
34 | 333 |
35 running_ = true; | 334 running_ = true; |
(...skipping 10 matching lines...) Expand all Loading... | |
46 } | 345 } |
47 | 346 |
48 bool WebRtcVideoCapturerAdapter::IsRunning() { | 347 bool WebRtcVideoCapturerAdapter::IsRunning() { |
49 DCHECK(thread_checker_.CalledOnValidThread()); | 348 DCHECK(thread_checker_.CalledOnValidThread()); |
50 return running_; | 349 return running_; |
51 } | 350 } |
52 | 351 |
53 bool WebRtcVideoCapturerAdapter::GetPreferredFourccs( | 352 bool WebRtcVideoCapturerAdapter::GetPreferredFourccs( |
54 std::vector<uint32>* fourccs) { | 353 std::vector<uint32>* fourccs) { |
55 DCHECK(thread_checker_.CalledOnValidThread()); | 354 DCHECK(thread_checker_.CalledOnValidThread()); |
56 if (!fourccs) | 355 DCHECK(!fourccs || fourccs->empty()); |
57 return false; | 356 if (fourccs) |
58 fourccs->push_back(cricket::FOURCC_I420); | 357 fourccs->push_back(cricket::FOURCC_I420); |
59 return true; | 358 return fourccs != NULL; |
60 } | 359 } |
61 | 360 |
62 bool WebRtcVideoCapturerAdapter::IsScreencast() const { | 361 bool WebRtcVideoCapturerAdapter::IsScreencast() const { |
63 return is_screencast_; | 362 return is_screencast_; |
64 } | 363 } |
65 | 364 |
66 bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat( | 365 bool WebRtcVideoCapturerAdapter::GetBestCaptureFormat( |
67 const cricket::VideoFormat& desired, | 366 const cricket::VideoFormat& desired, |
68 cricket::VideoFormat* best_format) { | 367 cricket::VideoFormat* best_format) { |
69 DCHECK(thread_checker_.CalledOnValidThread()); | 368 DCHECK(thread_checker_.CalledOnValidThread()); |
(...skipping 20 matching lines...) Expand all Loading... | |
90 // Some types of sources support textures as output. Since connecting | 389 // Some types of sources support textures as output. Since connecting |
91 // sources and sinks do not check the format, we need to just ignore | 390 // sources and sinks do not check the format, we need to just ignore |
92 // formats that we can not handle. | 391 // formats that we can not handle. |
93 NOTREACHED(); | 392 NOTREACHED(); |
94 return; | 393 return; |
95 } | 394 } |
96 | 395 |
97 if (first_frame_timestamp_ == media::kNoTimestamp()) | 396 if (first_frame_timestamp_ == media::kNoTimestamp()) |
98 first_frame_timestamp_ = frame->timestamp(); | 397 first_frame_timestamp_ = frame->timestamp(); |
99 | 398 |
100 cricket::CapturedFrame captured_frame; | 399 const int64 elapsed_time = |
101 captured_frame.width = frame->natural_size().width(); | |
102 captured_frame.height = frame->natural_size().height(); | |
103 // cricket::CapturedFrame time is in nanoseconds. | |
104 captured_frame.elapsed_time = | |
105 (frame->timestamp() - first_frame_timestamp_).InMicroseconds() * | 400 (frame->timestamp() - first_frame_timestamp_).InMicroseconds() * |
106 base::Time::kNanosecondsPerMicrosecond; | 401 base::Time::kNanosecondsPerMicrosecond; |
107 captured_frame.time_stamp = frame->timestamp().InMicroseconds() * | |
108 base::Time::kNanosecondsPerMicrosecond; | |
109 captured_frame.pixel_height = 1; | |
110 captured_frame.pixel_width = 1; | |
111 | 402 |
112 // TODO(perkj): | 403 // Inject the frame via the VideoFrameFractory. |
113 // Libjingle expects contiguous layout of image planes as input. | 404 DCHECK(frame_factory_ == frame_factory()); |
114 // The only format where that is true in Chrome is I420 where the | 405 frame_factory_->SetFrame(frame, elapsed_time); |
115 // coded_size == natural_size(). | |
116 if (frame->format() != media::VideoFrame::I420 || | |
117 frame->coded_size() != frame->natural_size()) { | |
118 // Cropping / Scaling and or switching UV planes is needed. | |
119 UpdateI420Buffer(frame); | |
120 captured_frame.data = buffer_; | |
121 captured_frame.data_size = buffer_size_; | |
122 captured_frame.fourcc = cricket::FOURCC_I420; | |
123 } else { | |
124 captured_frame.fourcc = media::VideoFrame::I420 == frame->format() ? | |
125 cricket::FOURCC_I420 : cricket::FOURCC_YV12; | |
126 captured_frame.data = frame->data(0); | |
127 captured_frame.data_size = | |
128 media::VideoFrame::AllocationSize(frame->format(), frame->coded_size()); | |
129 } | |
130 | 406 |
131 // This signals to libJingle that a new VideoFrame is available. | 407 // This signals to libJingle that a new VideoFrame is available. |
132 // libJingle have no assumptions on what thread this signal come from. | 408 // libJingle have no assumptions on what thread this signal come from. |
perkj_chrome
2014/10/16 07:55:09
Can you please remove this line "libJingle have no
magjed_chromium
2014/10/16 10:35:47
Done.
| |
133 SignalFrameCaptured(this, &captured_frame); | 409 SignalFrameCaptured(this, frame_factory_->GetCapturedFrame()); |
134 } | |
135 | |
136 void WebRtcVideoCapturerAdapter::UpdateI420Buffer( | |
137 const scoped_refptr<media::VideoFrame>& src) { | |
138 DCHECK(thread_checker_.CalledOnValidThread()); | |
139 const int dst_width = src->natural_size().width(); | |
140 const int dst_height = src->natural_size().height(); | |
141 DCHECK(src->visible_rect().width() >= dst_width && | |
142 src->visible_rect().height() >= dst_height); | |
143 | |
144 const gfx::Rect& visible_rect = src->visible_rect(); | |
145 | |
146 const uint8* src_y = src->data(media::VideoFrame::kYPlane) + | |
147 visible_rect.y() * src->stride(media::VideoFrame::kYPlane) + | |
148 visible_rect.x(); | |
149 const uint8* src_u = src->data(media::VideoFrame::kUPlane) + | |
150 visible_rect.y() / 2 * src->stride(media::VideoFrame::kUPlane) + | |
151 visible_rect.x() / 2; | |
152 const uint8* src_v = src->data(media::VideoFrame::kVPlane) + | |
153 visible_rect.y() / 2 * src->stride(media::VideoFrame::kVPlane) + | |
154 visible_rect.x() / 2; | |
155 | |
156 const size_t dst_size = | |
157 media::VideoFrame::AllocationSize(src->format(), src->natural_size()); | |
158 | |
159 if (dst_size != buffer_size_) { | |
160 base::AlignedFree(buffer_); | |
161 buffer_ = reinterpret_cast<uint8*>( | |
162 base::AlignedAlloc(dst_size + media::VideoFrame::kFrameSizePadding, | |
163 media::VideoFrame::kFrameAddressAlignment)); | |
164 buffer_size_ = dst_size; | |
165 } | |
166 | |
167 uint8* dst_y = buffer_; | |
168 const int dst_stride_y = dst_width; | |
169 uint8* dst_u = dst_y + dst_width * dst_height; | |
170 const int dst_halfwidth = (dst_width + 1) / 2; | |
171 const int dst_halfheight = (dst_height + 1) / 2; | |
172 uint8* dst_v = dst_u + dst_halfwidth * dst_halfheight; | |
173 | |
174 libyuv::I420Scale(src_y, | |
175 src->stride(media::VideoFrame::kYPlane), | |
176 src_u, | |
177 src->stride(media::VideoFrame::kUPlane), | |
178 src_v, | |
179 src->stride(media::VideoFrame::kVPlane), | |
180 visible_rect.width(), | |
181 visible_rect.height(), | |
182 dst_y, | |
183 dst_stride_y, | |
184 dst_u, | |
185 dst_halfwidth, | |
186 dst_v, | |
187 dst_halfwidth, | |
188 dst_width, | |
189 dst_height, | |
190 libyuv::kFilterBilinear); | |
191 } | 410 } |
192 | 411 |
193 } // namespace content | 412 } // namespace content |
OLD | NEW |