OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "cc/resources/media/skcanvas_video_renderer.h" | |
6 | |
7 #include "base/logging.h" | |
8 #include "gpu/GLES2/gl2extchromium.h" | |
9 #include "gpu/command_buffer/client/gles2_interface.h" | |
10 #include "gpu/command_buffer/common/mailbox_holder.h" | |
11 #include "media/base/video_frame.h" | |
12 #include "media/base/yuv_convert.h" | |
13 #include "skia/ext/refptr.h" | |
14 #include "third_party/libyuv/include/libyuv.h" | |
15 #include "third_party/skia/include/core/SkCanvas.h" | |
16 #include "third_party/skia/include/gpu/GrContext.h" | |
17 #include "third_party/skia/include/gpu/SkGrPixelRef.h" | |
18 | |
19 // Skia internal format depends on a platform. On Android it is ABGR, on others | |
20 // it is ARGB. | |
21 #if SK_B32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_R32_SHIFT == 16 && \ | |
22 SK_A32_SHIFT == 24 | |
23 #define LIBYUV_I420_TO_ARGB libyuv::I420ToARGB | |
24 #define LIBYUV_I422_TO_ARGB libyuv::I422ToARGB | |
25 #elif SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ | |
26 SK_A32_SHIFT == 24 | |
27 #define LIBYUV_I420_TO_ARGB libyuv::I420ToABGR | |
28 #define LIBYUV_I422_TO_ARGB libyuv::I422ToABGR | |
dshwang
2014/08/06 14:10:59
Now cc depends on thirdparty/libyuv directly.
Howe
| |
29 #else | |
30 #error Unexpected Skia ARGB_8888 layout! | |
31 #endif | |
32 | |
33 namespace cc { | |
34 | |
35 namespace { | |
36 | |
37 bool IsYUV(media::VideoFrame::Format format) { | |
38 return format == media::VideoFrame::YV12 || | |
39 format == media::VideoFrame::YV16 || | |
40 format == media::VideoFrame::I420 || | |
41 format == media::VideoFrame::YV12A || | |
42 format == media::VideoFrame::YV12J || | |
43 format == media::VideoFrame::YV24; | |
44 } | |
45 | |
46 bool IsYUVOrNative(media::VideoFrame::Format format) { | |
47 return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE; | |
48 } | |
49 | |
50 // Converts a VideoFrame containing YUV data to a SkBitmap containing RGB data. | |
51 // | |
52 // |bitmap| will be (re)allocated to match the dimensions of |video_frame|. | |
53 void ConvertVideoFrameToBitmap(media::VideoFrame* video_frame, | |
54 SkBitmap* bitmap) { | |
55 DCHECK(IsYUVOrNative(video_frame->format())) << video_frame->format(); | |
56 if (IsYUV(video_frame->format())) { | |
57 DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), | |
58 video_frame->stride(media::VideoFrame::kVPlane)); | |
59 } | |
60 | |
61 // Check if |bitmap| needs to be (re)allocated. | |
62 if (bitmap->isNull() || | |
63 bitmap->width() != video_frame->visible_rect().width() || | |
64 bitmap->height() != video_frame->visible_rect().height()) { | |
65 bitmap->allocN32Pixels(video_frame->visible_rect().width(), | |
66 video_frame->visible_rect().height()); | |
67 bitmap->setIsVolatile(true); | |
68 } | |
69 | |
70 bitmap->lockPixels(); | |
71 | |
72 size_t y_offset = 0; | |
73 size_t uv_offset = 0; | |
74 if (IsYUV(video_frame->format())) { | |
75 int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1; | |
76 // Use the "left" and "top" of the destination rect to locate the offset | |
77 // in Y, U and V planes. | |
78 y_offset = (video_frame->stride(media::VideoFrame::kYPlane) * | |
79 video_frame->visible_rect().y()) + | |
80 video_frame->visible_rect().x(); | |
81 // For format YV12, there is one U, V value per 2x2 block. | |
82 // For format YV16, there is one U, V value per 2x1 block. | |
83 uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) * | |
84 (video_frame->visible_rect().y() >> y_shift)) + | |
85 (video_frame->visible_rect().x() >> 1); | |
86 } | |
87 | |
88 switch (video_frame->format()) { | |
89 case media::VideoFrame::YV12: | |
90 case media::VideoFrame::I420: | |
91 LIBYUV_I420_TO_ARGB( | |
92 video_frame->data(media::VideoFrame::kYPlane) + y_offset, | |
93 video_frame->stride(media::VideoFrame::kYPlane), | |
94 video_frame->data(media::VideoFrame::kUPlane) + uv_offset, | |
95 video_frame->stride(media::VideoFrame::kUPlane), | |
96 video_frame->data(media::VideoFrame::kVPlane) + uv_offset, | |
97 video_frame->stride(media::VideoFrame::kVPlane), | |
98 static_cast<uint8*>(bitmap->getPixels()), | |
99 bitmap->rowBytes(), | |
100 video_frame->visible_rect().width(), | |
101 video_frame->visible_rect().height()); | |
102 break; | |
103 | |
104 case media::VideoFrame::YV12J: | |
105 media::ConvertYUVToRGB32( | |
106 video_frame->data(media::VideoFrame::kYPlane) + y_offset, | |
107 video_frame->data(media::VideoFrame::kUPlane) + uv_offset, | |
108 video_frame->data(media::VideoFrame::kVPlane) + uv_offset, | |
109 static_cast<uint8*>(bitmap->getPixels()), | |
110 video_frame->visible_rect().width(), | |
111 video_frame->visible_rect().height(), | |
112 video_frame->stride(media::VideoFrame::kYPlane), | |
113 video_frame->stride(media::VideoFrame::kUPlane), | |
114 bitmap->rowBytes(), | |
115 media::YV12J); | |
116 break; | |
117 | |
118 case media::VideoFrame::YV16: | |
119 LIBYUV_I422_TO_ARGB( | |
120 video_frame->data(media::VideoFrame::kYPlane) + y_offset, | |
121 video_frame->stride(media::VideoFrame::kYPlane), | |
122 video_frame->data(media::VideoFrame::kUPlane) + uv_offset, | |
123 video_frame->stride(media::VideoFrame::kUPlane), | |
124 video_frame->data(media::VideoFrame::kVPlane) + uv_offset, | |
125 video_frame->stride(media::VideoFrame::kVPlane), | |
126 static_cast<uint8*>(bitmap->getPixels()), | |
127 bitmap->rowBytes(), | |
128 video_frame->visible_rect().width(), | |
129 video_frame->visible_rect().height()); | |
130 break; | |
131 | |
132 case media::VideoFrame::YV12A: | |
133 // Since libyuv doesn't support YUVA, fallback to media, which is not ARM | |
134 // optimized. | |
135 // TODO(fbarchard, mtomasz): Use libyuv, then copy the alpha channel. | |
136 media::ConvertYUVAToARGB( | |
137 video_frame->data(media::VideoFrame::kYPlane) + y_offset, | |
138 video_frame->data(media::VideoFrame::kUPlane) + uv_offset, | |
139 video_frame->data(media::VideoFrame::kVPlane) + uv_offset, | |
140 video_frame->data(media::VideoFrame::kAPlane), | |
141 static_cast<uint8*>(bitmap->getPixels()), | |
142 video_frame->visible_rect().width(), | |
143 video_frame->visible_rect().height(), | |
144 video_frame->stride(media::VideoFrame::kYPlane), | |
145 video_frame->stride(media::VideoFrame::kUPlane), | |
146 video_frame->stride(media::VideoFrame::kAPlane), | |
147 bitmap->rowBytes(), | |
148 media::YV12); | |
149 break; | |
150 | |
151 case media::VideoFrame::YV24: | |
152 libyuv::I444ToARGB( | |
153 video_frame->data(media::VideoFrame::kYPlane) + y_offset, | |
154 video_frame->stride(media::VideoFrame::kYPlane), | |
155 video_frame->data(media::VideoFrame::kUPlane) + uv_offset, | |
156 video_frame->stride(media::VideoFrame::kUPlane), | |
157 video_frame->data(media::VideoFrame::kVPlane) + uv_offset, | |
158 video_frame->stride(media::VideoFrame::kVPlane), | |
159 static_cast<uint8*>(bitmap->getPixels()), | |
160 bitmap->rowBytes(), | |
161 video_frame->visible_rect().width(), | |
162 video_frame->visible_rect().height()); | |
163 #if SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ | |
164 SK_A32_SHIFT == 24 | |
165 libyuv::ARGBToABGR(static_cast<uint8*>(bitmap->getPixels()), | |
166 bitmap->rowBytes(), | |
167 static_cast<uint8*>(bitmap->getPixels()), | |
168 bitmap->rowBytes(), | |
169 video_frame->visible_rect().width(), | |
170 video_frame->visible_rect().height()); | |
171 #endif | |
172 break; | |
173 | |
174 case media::VideoFrame::NATIVE_TEXTURE: | |
175 DCHECK_EQ(video_frame->format(), media::VideoFrame::NATIVE_TEXTURE); | |
176 video_frame->ReadPixelsFromNativeTexture(*bitmap); | |
177 break; | |
178 | |
179 default: | |
180 NOTREACHED(); | |
181 break; | |
182 } | |
183 bitmap->notifyPixelsChanged(); | |
184 bitmap->unlockPixels(); | |
185 } | |
186 | |
187 bool EnsureTextureBackedSkBitmap(GrContext* gr, | |
188 SkBitmap* bitmap, | |
189 const gfx::Size& size, | |
190 GrSurfaceOrigin origin, | |
191 GrPixelConfig config) { | |
192 if (!bitmap->getTexture() || bitmap->width() != size.width() || | |
193 bitmap->height() != size.height()) { | |
194 if (!gr) | |
195 return false; | |
196 GrTextureDesc desc; | |
197 desc.fConfig = config; | |
198 desc.fFlags = kRenderTarget_GrTextureFlagBit | kNoStencil_GrTextureFlagBit; | |
199 desc.fSampleCnt = 0; | |
200 desc.fOrigin = origin; | |
201 desc.fWidth = size.width(); | |
202 desc.fHeight = size.height(); | |
203 skia::RefPtr<GrTexture> texture; | |
204 texture = skia::AdoptRef(gr->createUncachedTexture(desc, 0, 0)); | |
205 if (!texture.get()) | |
206 return false; | |
207 | |
208 SkImageInfo info = SkImageInfo::MakeN32Premul(desc.fWidth, desc.fHeight); | |
209 SkGrPixelRef* pixelRef = SkNEW_ARGS(SkGrPixelRef, (info, texture.get())); | |
210 if (!pixelRef) | |
211 return false; | |
212 bitmap->setInfo(info); | |
213 bitmap->setPixelRef(pixelRef)->unref(); | |
214 } | |
215 | |
216 return true; | |
217 } | |
218 | |
219 bool ConvertVideoFrameToTexture(media::VideoFrame* video_frame, | |
220 SkBitmap* bitmap, | |
221 ContextProvider* context_provider) { | |
222 DCHECK(context_provider && | |
223 video_frame->format() == media::VideoFrame::NATIVE_TEXTURE); | |
224 gpu::gles2::GLES2Interface* gl = context_provider->ContextGL(); | |
225 DCHECK(gl); | |
226 | |
227 // Check if we could reuse existing texture based bitmap. | |
228 // Otherwise, release existing texture based bitmap and allocate | |
229 // a new one based on video size. | |
230 if (!EnsureTextureBackedSkBitmap(context_provider->GrContext(), | |
231 bitmap, | |
232 video_frame->visible_rect().size(), | |
233 kTopLeft_GrSurfaceOrigin, | |
234 kSkia8888_GrPixelConfig)) { | |
235 return false; | |
236 } | |
237 | |
238 unsigned textureId = | |
239 static_cast<unsigned>((bitmap->getTexture())->getTextureHandle()); | |
240 SkCanvasVideoRenderer::CopyVideoFrameToTexture( | |
241 gl, video_frame, textureId, 0, GL_RGBA, GL_UNSIGNED_BYTE, true, false); | |
242 return true; | |
243 } | |
244 | |
245 class SyncPointClientImpl : public media::VideoFrame::SyncPointClient { | |
246 public: | |
247 explicit SyncPointClientImpl(gpu::gles2::GLES2Interface* gl) : gl_(gl) {} | |
248 virtual ~SyncPointClientImpl() {} | |
249 virtual uint32 InsertSyncPoint() OVERRIDE { | |
250 return gl_->InsertSyncPointCHROMIUM(); | |
251 } | |
252 virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE { | |
253 gl_->WaitSyncPointCHROMIUM(sync_point); | |
254 } | |
255 | |
256 private: | |
257 gpu::gles2::GLES2Interface* gl_; | |
258 }; | |
259 | |
260 } // anonymous namespace | |
261 | |
262 SkCanvasVideoRenderer::SkCanvasVideoRenderer() | |
263 : last_frame_timestamp_(media::kNoTimestamp()), | |
264 accelerated_last_frame_timestamp_(media::kNoTimestamp()) { | |
265 } | |
266 | |
267 SkCanvasVideoRenderer::~SkCanvasVideoRenderer() { | |
268 } | |
269 | |
270 void SkCanvasVideoRenderer::Paint(media::VideoFrame* video_frame, | |
271 SkCanvas* canvas, | |
272 const gfx::RectF& dest_rect, | |
273 uint8 alpha, | |
274 ContextProvider* context_provider) { | |
275 if (alpha == 0) { | |
276 return; | |
277 } | |
278 | |
279 SkRect dest; | |
280 dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom()); | |
281 | |
282 SkPaint paint; | |
283 paint.setAlpha(alpha); | |
284 | |
285 // Paint black rectangle if there isn't a frame available or the | |
286 // frame has an unexpected format. | |
287 if (!video_frame || video_frame->natural_size().IsEmpty() || | |
288 !IsYUVOrNative(video_frame->format())) { | |
289 canvas->drawRect(dest, paint); | |
290 return; | |
291 } | |
292 | |
293 bool accelerated = false; | |
294 if (context_provider && | |
295 video_frame->format() == media::VideoFrame::NATIVE_TEXTURE && | |
296 canvas->getGrContext()) { | |
297 // TODO(dshwang): Android video decoder doesn't update the timestamp on a | |
298 // VideoFrame. To reduce redundant copy, Android should update the | |
299 // timestamp. | |
300 if (video_frame->timestamp() != accelerated_last_frame_timestamp_ || | |
301 video_frame->timestamp() == base::TimeDelta()) { | |
302 accelerated = ConvertVideoFrameToTexture( | |
303 video_frame, &accelerated_last_frame_, context_provider); | |
304 if (accelerated) { | |
305 accelerated_last_frame_timestamp_ = video_frame->timestamp(); | |
306 } | |
307 } else { | |
308 DCHECK(accelerated_last_frame_.getTexture()); | |
309 accelerated = true; | |
310 } | |
311 } | |
312 | |
313 // Check if we should convert and update |last_frame_|. | |
314 if (!accelerated && video_frame->timestamp() != last_frame_timestamp_) { | |
315 ConvertVideoFrameToBitmap(video_frame, &last_frame_); | |
316 last_frame_timestamp_ = video_frame->timestamp(); | |
317 } | |
318 | |
319 canvas->drawBitmapRect( | |
320 accelerated ? accelerated_last_frame_ : last_frame_, NULL, dest, &paint); | |
321 | |
322 CleanUpTemporaryBuffers(); | |
323 } | |
324 | |
325 // If a buffer is not used by 3 sec, remove it. | |
326 void SkCanvasVideoRenderer::CleanUpTemporaryBuffers() { | |
327 static const base::TimeDelta buffer_time = base::TimeDelta::FromSeconds(3); | |
328 base::TimeDelta last_timestamp = | |
329 accelerated_last_frame_timestamp_ > last_frame_timestamp_ | |
330 ? accelerated_last_frame_timestamp_ | |
331 : last_frame_timestamp_; | |
332 if (last_timestamp > last_frame_timestamp_ + buffer_time && | |
333 !last_frame_.isNull()) | |
334 last_frame_.reset(); | |
335 if (last_timestamp > accelerated_last_frame_timestamp_ + buffer_time && | |
336 !accelerated_last_frame_.isNull()) | |
337 accelerated_last_frame_.reset(); | |
338 } | |
339 | |
340 // static | |
341 void SkCanvasVideoRenderer::CopyVideoFrameToTexture( | |
342 gpu::gles2::GLES2Interface* gl, | |
343 media::VideoFrame* video_frame, | |
344 unsigned int texture, | |
345 unsigned int level, | |
346 unsigned int internal_format, | |
347 unsigned int type, | |
348 bool premultiply_alpha, | |
349 bool flip_y) { | |
350 DCHECK(video_frame && | |
351 video_frame->format() == media::VideoFrame::NATIVE_TEXTURE); | |
352 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); | |
353 DCHECK(mailbox_holder->texture_target == GL_TEXTURE_2D || | |
354 mailbox_holder->texture_target == GL_TEXTURE_EXTERNAL_OES); | |
355 | |
356 gl->WaitSyncPointCHROMIUM(mailbox_holder->sync_point); | |
357 uint32 source_texture = gl->CreateAndConsumeTextureCHROMIUM( | |
358 mailbox_holder->texture_target, mailbox_holder->mailbox.name); | |
359 | |
360 // The video is stored in a unmultiplied format, so premultiply | |
361 // if necessary. | |
362 gl->PixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, premultiply_alpha); | |
363 // Application itself needs to take care of setting the right flip_y | |
364 // value down to get the expected result. | |
365 // flip_y==true means to reverse the video orientation while | |
366 // flip_y==false means to keep the intrinsic orientation. | |
367 gl->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); | |
368 gl->CopyTextureCHROMIUM( | |
369 GL_TEXTURE_2D, source_texture, texture, level, internal_format, type); | |
370 gl->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false); | |
371 gl->PixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, false); | |
372 | |
373 gl->DeleteTextures(1, &source_texture); | |
374 gl->Flush(); | |
375 | |
376 SyncPointClientImpl client(gl); | |
377 video_frame->UpdateReleaseSyncPoint(&client); | |
378 } | |
379 | |
380 } // namespace cc | |
OLD | NEW |