OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/filters/skcanvas_video_renderer.h" | |
6 | |
7 #include "gpu/GLES2/gl2extchromium.h" | |
8 #include "gpu/command_buffer/client/gles2_interface.h" | |
9 #include "gpu/command_buffer/common/mailbox_holder.h" | |
10 #include "media/base/video_frame.h" | |
11 #include "media/base/yuv_convert.h" | |
12 #include "skia/ext/refptr.h" | |
13 #include "third_party/libyuv/include/libyuv.h" | |
14 #include "third_party/skia/include/core/SkCanvas.h" | |
15 #include "third_party/skia/include/core/SkImageGenerator.h" | |
16 #include "third_party/skia/include/gpu/GrContext.h" | |
17 #include "third_party/skia/include/gpu/SkGrPixelRef.h" | |
18 #include "ui/gfx/skbitmap_operations.h" | |
19 | |
20 // Skia internal format depends on a platform. On Android it is ABGR, on others | |
21 // it is ARGB. | |
22 #if SK_B32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_R32_SHIFT == 16 && \ | |
23 SK_A32_SHIFT == 24 | |
24 #define LIBYUV_I420_TO_ARGB libyuv::I420ToARGB | |
25 #define LIBYUV_I422_TO_ARGB libyuv::I422ToARGB | |
26 #elif SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ | |
27 SK_A32_SHIFT == 24 | |
28 #define LIBYUV_I420_TO_ARGB libyuv::I420ToABGR | |
29 #define LIBYUV_I422_TO_ARGB libyuv::I422ToABGR | |
30 #else | |
31 #error Unexpected Skia ARGB_8888 layout! | |
32 #endif | |
33 | |
34 namespace media { | |
35 | |
36 namespace { | |
37 | |
38 // This class keeps two temporary resources; software bitmap, hardware bitmap. | |
39 // If both bitmap are created and then only software bitmap is updated every | |
40 // frame, hardware bitmap outlives until the media player dies. So we delete | |
41 // a temporary resource if it is not used for 3 sec. | |
42 const int kTemporaryResourceDeletionDelay = 3; // Seconds; | |
43 | |
44 bool IsYUV(media::VideoFrame::Format format) { | |
45 switch (format) { | |
46 case VideoFrame::YV12: | |
47 case VideoFrame::YV16: | |
48 case VideoFrame::I420: | |
49 case VideoFrame::YV12A: | |
50 case VideoFrame::YV12J: | |
51 case VideoFrame::YV12HD: | |
52 case VideoFrame::YV24: | |
53 case VideoFrame::NV12: | |
54 return true; | |
55 case VideoFrame::UNKNOWN: | |
56 case VideoFrame::NATIVE_TEXTURE: | |
57 #if defined(VIDEO_HOLE) | |
58 case VideoFrame::HOLE: | |
59 #endif // defined(VIDEO_HOLE) | |
60 case VideoFrame::ARGB: | |
61 return false; | |
62 } | |
63 NOTREACHED() << "Invalid videoframe format provided: " << format; | |
64 return false; | |
65 } | |
66 | |
67 bool IsJPEGColorSpace(media::VideoFrame::Format format) { | |
68 switch (format) { | |
69 case VideoFrame::YV12J: | |
70 return true; | |
71 case VideoFrame::YV12: | |
72 case VideoFrame::YV12HD: | |
73 case VideoFrame::YV16: | |
74 case VideoFrame::I420: | |
75 case VideoFrame::YV12A: | |
76 case VideoFrame::YV24: | |
77 case VideoFrame::NV12: | |
78 case VideoFrame::UNKNOWN: | |
79 case VideoFrame::NATIVE_TEXTURE: | |
80 #if defined(VIDEO_HOLE) | |
81 case VideoFrame::HOLE: | |
82 #endif // defined(VIDEO_HOLE) | |
83 case VideoFrame::ARGB: | |
84 return false; | |
85 } | |
86 NOTREACHED() << "Invalid videoframe format provided: " << format; | |
87 return false; | |
88 } | |
89 | |
90 bool IsYUVOrNative(media::VideoFrame::Format format) { | |
91 return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE; | |
92 } | |
93 | |
94 bool IsSkBitmapProperlySizedTexture(const SkBitmap* bitmap, | |
95 const gfx::Size& size) { | |
96 return bitmap->getTexture() && bitmap->width() == size.width() && | |
97 bitmap->height() == size.height(); | |
98 } | |
99 | |
100 bool AllocateSkBitmapTexture(GrContext* gr, | |
101 SkBitmap* bitmap, | |
102 const gfx::Size& size) { | |
103 DCHECK(gr); | |
104 GrTextureDesc desc; | |
105 // Use kRGBA_8888_GrPixelConfig, not kSkia8888_GrPixelConfig, to avoid | |
106 // RGBA to BGRA conversion. | |
107 desc.fConfig = kRGBA_8888_GrPixelConfig; | |
108 desc.fFlags = kRenderTarget_GrTextureFlagBit | kNoStencil_GrTextureFlagBit; | |
109 desc.fSampleCnt = 0; | |
110 desc.fOrigin = kTopLeft_GrSurfaceOrigin; | |
111 desc.fWidth = size.width(); | |
112 desc.fHeight = size.height(); | |
113 skia::RefPtr<GrTexture> texture = skia::AdoptRef( | |
114 gr->refScratchTexture(desc, GrContext::kExact_ScratchTexMatch)); | |
115 if (!texture.get()) | |
116 return false; | |
117 | |
118 SkImageInfo info = SkImageInfo::MakeN32Premul(desc.fWidth, desc.fHeight); | |
119 SkGrPixelRef* pixel_ref = SkNEW_ARGS(SkGrPixelRef, (info, texture.get())); | |
120 if (!pixel_ref) | |
121 return false; | |
122 bitmap->setInfo(info); | |
123 bitmap->setPixelRef(pixel_ref)->unref(); | |
124 return true; | |
125 } | |
126 | |
127 bool CopyVideoFrameTextureToSkBitmapTexture(VideoFrame* video_frame, | |
128 SkBitmap* bitmap, | |
129 const Context3D& context_3d) { | |
130 // Check if we could reuse existing texture based bitmap. | |
131 // Otherwise, release existing texture based bitmap and allocate | |
132 // a new one based on video size. | |
133 if (!IsSkBitmapProperlySizedTexture(bitmap, | |
134 video_frame->visible_rect().size())) { | |
135 if (!AllocateSkBitmapTexture(context_3d.gr_context, bitmap, | |
136 video_frame->visible_rect().size())) { | |
137 return false; | |
138 } | |
139 } | |
140 | |
141 unsigned texture_id = | |
142 static_cast<unsigned>((bitmap->getTexture())->getTextureHandle()); | |
143 // If CopyVideoFrameTextureToGLTexture() changes the state of the | |
144 // |texture_id|, it's needed to invalidate the state cached in skia, | |
145 // but currently the state isn't changed. | |
146 SkCanvasVideoRenderer::CopyVideoFrameTextureToGLTexture( | |
147 context_3d.gl, video_frame, texture_id, 0, GL_RGBA, GL_UNSIGNED_BYTE, | |
148 true, false); | |
149 bitmap->notifyPixelsChanged(); | |
150 return true; | |
151 } | |
152 | |
153 class SyncPointClientImpl : public VideoFrame::SyncPointClient { | |
154 public: | |
155 explicit SyncPointClientImpl(gpu::gles2::GLES2Interface* gl) : gl_(gl) {} | |
156 ~SyncPointClientImpl() override {} | |
157 uint32 InsertSyncPoint() override { return gl_->InsertSyncPointCHROMIUM(); } | |
158 void WaitSyncPoint(uint32 sync_point) override { | |
159 gl_->WaitSyncPointCHROMIUM(sync_point); | |
160 } | |
161 | |
162 private: | |
163 gpu::gles2::GLES2Interface* gl_; | |
164 | |
165 DISALLOW_IMPLICIT_CONSTRUCTORS(SyncPointClientImpl); | |
166 }; | |
167 | |
168 } // anonymous namespace | |
169 | |
170 // Generates an RGB image from a VideoFrame. Convert YUV to RGB plain on GPU. | |
171 class VideoImageGenerator : public SkImageGenerator { | |
172 public: | |
173 VideoImageGenerator(const scoped_refptr<VideoFrame>& frame) : frame_(frame) { | |
174 DCHECK(frame_.get()); | |
175 } | |
176 ~VideoImageGenerator() override {} | |
177 | |
178 void set_frame(const scoped_refptr<VideoFrame>& frame) { frame_ = frame; } | |
179 | |
180 protected: | |
181 bool onGetInfo(SkImageInfo* info) override { | |
182 info->fWidth = frame_->visible_rect().width(); | |
183 info->fHeight = frame_->visible_rect().height(); | |
184 info->fColorType = kN32_SkColorType; | |
185 info->fAlphaType = kPremul_SkAlphaType; | |
186 return true; | |
187 } | |
188 | |
189 Result onGetPixels(const SkImageInfo& info, | |
190 void* pixels, | |
191 size_t row_bytes, | |
192 SkPMColor ctable[], | |
193 int* ctable_count) override { | |
194 if (!frame_.get()) | |
195 return kInvalidInput; | |
196 // If skia couldn't do the YUV conversion on GPU, we will on CPU. | |
197 SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( | |
198 frame_, pixels, row_bytes); | |
199 return kSuccess; | |
200 } | |
201 | |
202 bool onGetYUV8Planes(SkISize sizes[3], | |
203 void* planes[3], | |
204 size_t row_bytes[3], | |
205 SkYUVColorSpace* color_space) override { | |
206 if (!frame_.get() || !IsYUV(frame_->format()) || | |
207 // TODO(rileya): Skia currently doesn't support Rec709 YUV conversion, | |
208 // or YUVA conversion. Remove this case once it does. As-is we will | |
209 // fall back on the pure-software path in this case. | |
210 frame_->format() == VideoFrame::YV12HD || | |
211 frame_->format() == VideoFrame::YV12A) { | |
212 return false; | |
213 } | |
214 | |
215 if (color_space) { | |
216 if (IsJPEGColorSpace(frame_->format())) | |
217 *color_space = kJPEG_SkYUVColorSpace; | |
218 else | |
219 *color_space = kRec601_SkYUVColorSpace; | |
220 } | |
221 | |
222 for (int plane = VideoFrame::kYPlane; plane <= VideoFrame::kVPlane; | |
223 ++plane) { | |
224 if (sizes) { | |
225 gfx::Size size; | |
226 size = | |
227 VideoFrame::PlaneSize(frame_->format(), | |
228 plane, | |
229 gfx::Size(frame_->visible_rect().width(), | |
230 frame_->visible_rect().height())); | |
231 sizes[plane].set(size.width(), size.height()); | |
232 } | |
233 if (row_bytes && planes) { | |
234 size_t offset; | |
235 int y_shift = (frame_->format() == media::VideoFrame::YV16) ? 0 : 1; | |
236 if (plane == media::VideoFrame::kYPlane) { | |
237 offset = (frame_->stride(media::VideoFrame::kYPlane) * | |
238 frame_->visible_rect().y()) + | |
239 frame_->visible_rect().x(); | |
240 } else { | |
241 offset = (frame_->stride(media::VideoFrame::kUPlane) * | |
242 (frame_->visible_rect().y() >> y_shift)) + | |
243 (frame_->visible_rect().x() >> 1); | |
244 } | |
245 | |
246 // Copy the frame to the supplied memory. | |
247 // TODO: Find a way (API change?) to avoid this copy. | |
248 char* out_line = static_cast<char*>(planes[plane]); | |
249 int out_line_stride = row_bytes[plane]; | |
250 uint8* in_line = frame_->data(plane) + offset; | |
251 int in_line_stride = frame_->stride(plane); | |
252 int plane_height = sizes[plane].height(); | |
253 if (in_line_stride == out_line_stride) { | |
254 memcpy(out_line, in_line, plane_height * in_line_stride); | |
255 } else { | |
256 // Different line padding so need to copy one line at a time. | |
257 int bytes_to_copy_per_line = out_line_stride < in_line_stride | |
258 ? out_line_stride | |
259 : in_line_stride; | |
260 for (int line_no = 0; line_no < plane_height; line_no++) { | |
261 memcpy(out_line, in_line, bytes_to_copy_per_line); | |
262 in_line += in_line_stride; | |
263 out_line += out_line_stride; | |
264 } | |
265 } | |
266 } | |
267 } | |
268 return true; | |
269 } | |
270 | |
271 private: | |
272 scoped_refptr<VideoFrame> frame_; | |
273 | |
274 DISALLOW_IMPLICIT_CONSTRUCTORS(VideoImageGenerator); | |
275 }; | |
276 | |
277 SkCanvasVideoRenderer::SkCanvasVideoRenderer() | |
278 : last_frame_timestamp_(media::kNoTimestamp()), | |
279 frame_deleting_timer_( | |
280 FROM_HERE, | |
281 base::TimeDelta::FromSeconds(kTemporaryResourceDeletionDelay), | |
282 this, | |
283 &SkCanvasVideoRenderer::ResetLastFrame), | |
284 accelerated_generator_(nullptr), | |
285 accelerated_last_frame_timestamp_(media::kNoTimestamp()), | |
286 accelerated_frame_deleting_timer_( | |
287 FROM_HERE, | |
288 base::TimeDelta::FromSeconds(kTemporaryResourceDeletionDelay), | |
289 this, | |
290 &SkCanvasVideoRenderer::ResetAcceleratedLastFrame) { | |
291 last_frame_.setIsVolatile(true); | |
292 } | |
293 | |
294 SkCanvasVideoRenderer::~SkCanvasVideoRenderer() {} | |
295 | |
296 void SkCanvasVideoRenderer::Paint(const scoped_refptr<VideoFrame>& video_frame, | |
297 SkCanvas* canvas, | |
298 const gfx::RectF& dest_rect, | |
299 uint8 alpha, | |
300 SkXfermode::Mode mode, | |
301 VideoRotation video_rotation, | |
302 const Context3D& context_3d) { | |
303 if (alpha == 0) { | |
304 return; | |
305 } | |
306 | |
307 SkRect dest; | |
308 dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom()); | |
309 | |
310 SkPaint paint; | |
311 paint.setAlpha(alpha); | |
312 | |
313 // Paint black rectangle if there isn't a frame available or the | |
314 // frame has an unexpected format. | |
315 if (!video_frame.get() || video_frame->natural_size().IsEmpty() || | |
316 !IsYUVOrNative(video_frame->format())) { | |
317 canvas->drawRect(dest, paint); | |
318 canvas->flush(); | |
319 return; | |
320 } | |
321 | |
322 SkBitmap* target_frame = nullptr; | |
323 | |
324 if (video_frame->format() == VideoFrame::NATIVE_TEXTURE) { | |
325 // Draw HW Video on both SW and HW Canvas. | |
326 // In SW Canvas case, rely on skia drawing Ganesh SkBitmap on SW SkCanvas. | |
327 if (accelerated_last_frame_.isNull() || | |
328 video_frame->timestamp() != accelerated_last_frame_timestamp_) { | |
329 DCHECK(context_3d.gl); | |
330 DCHECK(context_3d.gr_context); | |
331 if (accelerated_generator_) { | |
332 // Reset SkBitmap used in SWVideo-to-HWCanvas path. | |
333 accelerated_last_frame_.reset(); | |
334 accelerated_generator_ = nullptr; | |
335 } | |
336 if (!CopyVideoFrameTextureToSkBitmapTexture( | |
337 video_frame.get(), &accelerated_last_frame_, context_3d)) { | |
338 NOTREACHED(); | |
339 return; | |
340 } | |
341 DCHECK(video_frame->visible_rect().width() == | |
342 accelerated_last_frame_.width() && | |
343 video_frame->visible_rect().height() == | |
344 accelerated_last_frame_.height()); | |
345 | |
346 accelerated_last_frame_timestamp_ = video_frame->timestamp(); | |
347 } | |
348 target_frame = &accelerated_last_frame_; | |
349 accelerated_frame_deleting_timer_.Reset(); | |
350 } else if (canvas->getGrContext()) { | |
351 DCHECK(video_frame->format() != VideoFrame::NATIVE_TEXTURE); | |
352 if (accelerated_last_frame_.isNull() || | |
353 video_frame->timestamp() != accelerated_last_frame_timestamp_) { | |
354 // Draw SW Video on HW Canvas. | |
355 if (!accelerated_generator_ && !accelerated_last_frame_.isNull()) { | |
356 // Reset SkBitmap used in HWVideo-to-HWCanvas path. | |
357 accelerated_last_frame_.reset(); | |
358 } | |
359 accelerated_generator_ = new VideoImageGenerator(video_frame); | |
360 | |
361 // Note: This takes ownership of |accelerated_generator_|. | |
362 if (!SkInstallDiscardablePixelRef(accelerated_generator_, | |
363 &accelerated_last_frame_)) { | |
364 NOTREACHED(); | |
365 return; | |
366 } | |
367 DCHECK(video_frame->visible_rect().width() == | |
368 accelerated_last_frame_.width() && | |
369 video_frame->visible_rect().height() == | |
370 accelerated_last_frame_.height()); | |
371 | |
372 accelerated_last_frame_timestamp_ = video_frame->timestamp(); | |
373 } else if (accelerated_generator_) { | |
374 accelerated_generator_->set_frame(video_frame); | |
375 } | |
376 target_frame = &accelerated_last_frame_; | |
377 accelerated_frame_deleting_timer_.Reset(); | |
378 } else { | |
379 // Draw SW Video on SW Canvas. | |
380 DCHECK(video_frame->format() != VideoFrame::NATIVE_TEXTURE); | |
381 if (last_frame_.isNull() || | |
382 video_frame->timestamp() != last_frame_timestamp_) { | |
383 // Check if |bitmap| needs to be (re)allocated. | |
384 if (last_frame_.isNull() || | |
385 last_frame_.width() != video_frame->visible_rect().width() || | |
386 last_frame_.height() != video_frame->visible_rect().height()) { | |
387 last_frame_.allocN32Pixels(video_frame->visible_rect().width(), | |
388 video_frame->visible_rect().height()); | |
389 last_frame_.setIsVolatile(true); | |
390 } | |
391 last_frame_.lockPixels(); | |
392 ConvertVideoFrameToRGBPixels( | |
393 video_frame, last_frame_.getPixels(), last_frame_.rowBytes()); | |
394 last_frame_.notifyPixelsChanged(); | |
395 last_frame_.unlockPixels(); | |
396 last_frame_timestamp_ = video_frame->timestamp(); | |
397 } | |
398 target_frame = &last_frame_; | |
399 frame_deleting_timer_.Reset(); | |
400 } | |
401 | |
402 paint.setXfermodeMode(mode); | |
403 paint.setFilterLevel(SkPaint::kLow_FilterLevel); | |
404 | |
405 bool need_transform = | |
406 video_rotation != VIDEO_ROTATION_0 || | |
407 dest_rect.size() != video_frame->visible_rect().size() || | |
408 !dest_rect.origin().IsOrigin(); | |
409 if (need_transform) { | |
410 canvas->save(); | |
411 canvas->translate( | |
412 SkFloatToScalar(dest_rect.x() + (dest_rect.width() * 0.5f)), | |
413 SkFloatToScalar(dest_rect.y() + (dest_rect.height() * 0.5f))); | |
414 SkScalar angle = SkFloatToScalar(0.0f); | |
415 switch (video_rotation) { | |
416 case VIDEO_ROTATION_0: | |
417 break; | |
418 case VIDEO_ROTATION_90: | |
419 angle = SkFloatToScalar(90.0f); | |
420 break; | |
421 case VIDEO_ROTATION_180: | |
422 angle = SkFloatToScalar(180.0f); | |
423 break; | |
424 case VIDEO_ROTATION_270: | |
425 angle = SkFloatToScalar(270.0f); | |
426 break; | |
427 } | |
428 canvas->rotate(angle); | |
429 | |
430 gfx::SizeF rotated_dest_size = dest_rect.size(); | |
431 if (video_rotation == VIDEO_ROTATION_90 || | |
432 video_rotation == VIDEO_ROTATION_270) { | |
433 rotated_dest_size = | |
434 gfx::SizeF(rotated_dest_size.height(), rotated_dest_size.width()); | |
435 } | |
436 canvas->scale( | |
437 SkFloatToScalar(rotated_dest_size.width() / target_frame->width()), | |
438 SkFloatToScalar(rotated_dest_size.height() / target_frame->height())); | |
439 canvas->translate(-SkFloatToScalar(target_frame->width() * 0.5f), | |
440 -SkFloatToScalar(target_frame->height() * 0.5f)); | |
441 } | |
442 canvas->drawBitmap(*target_frame, 0, 0, &paint); | |
443 if (need_transform) | |
444 canvas->restore(); | |
445 canvas->flush(); | |
446 // SkCanvas::flush() causes the generator to generate SkImage, so delete | |
447 // |video_frame| not to be outlived. | |
448 if (canvas->getGrContext() && accelerated_generator_) | |
449 accelerated_generator_->set_frame(nullptr); | |
450 } | |
451 | |
452 void SkCanvasVideoRenderer::Copy(const scoped_refptr<VideoFrame>& video_frame, | |
453 SkCanvas* canvas, | |
454 const Context3D& context_3d) { | |
455 Paint(video_frame, canvas, video_frame->visible_rect(), 0xff, | |
456 SkXfermode::kSrc_Mode, media::VIDEO_ROTATION_0, context_3d); | |
457 } | |
458 | |
459 // static | |
460 void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels( | |
461 const scoped_refptr<media::VideoFrame>& video_frame, | |
462 void* rgb_pixels, | |
463 size_t row_bytes) { | |
464 DCHECK(IsYUVOrNative(video_frame->format())) | |
465 << video_frame->format(); | |
466 if (IsYUV(video_frame->format())) { | |
467 DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), | |
468 video_frame->stride(media::VideoFrame::kVPlane)); | |
469 } | |
470 | |
471 size_t y_offset = 0; | |
472 size_t uv_offset = 0; | |
473 if (IsYUV(video_frame->format())) { | |
474 int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1; | |
475 // Use the "left" and "top" of the destination rect to locate the offset | |
476 // in Y, U and V planes. | |
477 y_offset = (video_frame->stride(media::VideoFrame::kYPlane) * | |
478 video_frame->visible_rect().y()) + | |
479 video_frame->visible_rect().x(); | |
480 // For format YV12, there is one U, V value per 2x2 block. | |
481 // For format YV16, there is one U, V value per 2x1 block. | |
482 uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) * | |
483 (video_frame->visible_rect().y() >> y_shift)) + | |
484 (video_frame->visible_rect().x() >> 1); | |
485 } | |
486 | |
487 switch (video_frame->format()) { | |
488 case VideoFrame::YV12: | |
489 case VideoFrame::I420: | |
490 LIBYUV_I420_TO_ARGB( | |
491 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
492 video_frame->stride(VideoFrame::kYPlane), | |
493 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
494 video_frame->stride(VideoFrame::kUPlane), | |
495 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
496 video_frame->stride(VideoFrame::kVPlane), | |
497 static_cast<uint8*>(rgb_pixels), | |
498 row_bytes, | |
499 video_frame->visible_rect().width(), | |
500 video_frame->visible_rect().height()); | |
501 break; | |
502 | |
503 case VideoFrame::YV12J: | |
504 ConvertYUVToRGB32( | |
505 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
506 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
507 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
508 static_cast<uint8*>(rgb_pixels), | |
509 video_frame->visible_rect().width(), | |
510 video_frame->visible_rect().height(), | |
511 video_frame->stride(VideoFrame::kYPlane), | |
512 video_frame->stride(VideoFrame::kUPlane), | |
513 row_bytes, | |
514 YV12J); | |
515 break; | |
516 | |
517 case VideoFrame::YV12HD: | |
518 ConvertYUVToRGB32( | |
519 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
520 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
521 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
522 static_cast<uint8*>(rgb_pixels), | |
523 video_frame->visible_rect().width(), | |
524 video_frame->visible_rect().height(), | |
525 video_frame->stride(VideoFrame::kYPlane), | |
526 video_frame->stride(VideoFrame::kUPlane), | |
527 row_bytes, | |
528 YV12HD); | |
529 break; | |
530 | |
531 case VideoFrame::YV16: | |
532 LIBYUV_I422_TO_ARGB( | |
533 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
534 video_frame->stride(VideoFrame::kYPlane), | |
535 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
536 video_frame->stride(VideoFrame::kUPlane), | |
537 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
538 video_frame->stride(VideoFrame::kVPlane), | |
539 static_cast<uint8*>(rgb_pixels), | |
540 row_bytes, | |
541 video_frame->visible_rect().width(), | |
542 video_frame->visible_rect().height()); | |
543 break; | |
544 | |
545 case VideoFrame::YV12A: | |
546 // Since libyuv doesn't support YUVA, fallback to media, which is not ARM | |
547 // optimized. | |
548 // TODO(fbarchard, mtomasz): Use libyuv, then copy the alpha channel. | |
549 ConvertYUVAToARGB( | |
550 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
551 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
552 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
553 video_frame->data(VideoFrame::kAPlane), | |
554 static_cast<uint8*>(rgb_pixels), | |
555 video_frame->visible_rect().width(), | |
556 video_frame->visible_rect().height(), | |
557 video_frame->stride(VideoFrame::kYPlane), | |
558 video_frame->stride(VideoFrame::kUPlane), | |
559 video_frame->stride(VideoFrame::kAPlane), | |
560 row_bytes, | |
561 YV12); | |
562 break; | |
563 | |
564 case VideoFrame::YV24: | |
565 libyuv::I444ToARGB( | |
566 video_frame->data(VideoFrame::kYPlane) + y_offset, | |
567 video_frame->stride(VideoFrame::kYPlane), | |
568 video_frame->data(VideoFrame::kUPlane) + uv_offset, | |
569 video_frame->stride(VideoFrame::kUPlane), | |
570 video_frame->data(VideoFrame::kVPlane) + uv_offset, | |
571 video_frame->stride(VideoFrame::kVPlane), | |
572 static_cast<uint8*>(rgb_pixels), | |
573 row_bytes, | |
574 video_frame->visible_rect().width(), | |
575 video_frame->visible_rect().height()); | |
576 #if SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ | |
577 SK_A32_SHIFT == 24 | |
578 libyuv::ARGBToABGR(static_cast<uint8*>(rgb_pixels), | |
579 row_bytes, | |
580 static_cast<uint8*>(rgb_pixels), | |
581 row_bytes, | |
582 video_frame->visible_rect().width(), | |
583 video_frame->visible_rect().height()); | |
584 #endif | |
585 break; | |
586 | |
587 case VideoFrame::NATIVE_TEXTURE: | |
588 NOTREACHED(); | |
589 break; | |
590 #if defined(VIDEO_HOLE) | |
591 case VideoFrame::HOLE: | |
592 #endif // defined(VIDEO_HOLE) | |
593 case VideoFrame::ARGB: | |
594 case VideoFrame::UNKNOWN: | |
595 case VideoFrame::NV12: | |
596 NOTREACHED(); | |
597 } | |
598 } | |
599 | |
600 // static | |
601 void SkCanvasVideoRenderer::CopyVideoFrameTextureToGLTexture( | |
602 gpu::gles2::GLES2Interface* gl, | |
603 VideoFrame* video_frame, | |
604 unsigned int texture, | |
605 unsigned int level, | |
606 unsigned int internal_format, | |
607 unsigned int type, | |
608 bool premultiply_alpha, | |
609 bool flip_y) { | |
610 DCHECK(video_frame && video_frame->format() == VideoFrame::NATIVE_TEXTURE); | |
611 const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder(); | |
612 DCHECK(mailbox_holder->texture_target == GL_TEXTURE_2D || | |
613 mailbox_holder->texture_target == GL_TEXTURE_RECTANGLE_ARB || | |
614 mailbox_holder->texture_target == GL_TEXTURE_EXTERNAL_OES); | |
615 | |
616 gl->WaitSyncPointCHROMIUM(mailbox_holder->sync_point); | |
617 uint32 source_texture = gl->CreateAndConsumeTextureCHROMIUM( | |
618 mailbox_holder->texture_target, mailbox_holder->mailbox.name); | |
619 | |
620 // The video is stored in a unmultiplied format, so premultiply | |
621 // if necessary. | |
622 gl->PixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, premultiply_alpha); | |
623 // Application itself needs to take care of setting the right |flip_y| | |
624 // value down to get the expected result. | |
625 // "flip_y == true" means to reverse the video orientation while | |
626 // "flip_y == false" means to keep the intrinsic orientation. | |
627 gl->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, flip_y); | |
628 gl->CopyTextureCHROMIUM(GL_TEXTURE_2D, source_texture, texture, level, | |
629 internal_format, type); | |
630 gl->PixelStorei(GL_UNPACK_FLIP_Y_CHROMIUM, false); | |
631 gl->PixelStorei(GL_UNPACK_PREMULTIPLY_ALPHA_CHROMIUM, false); | |
632 | |
633 gl->DeleteTextures(1, &source_texture); | |
634 gl->Flush(); | |
635 | |
636 SyncPointClientImpl client(gl); | |
637 video_frame->UpdateReleaseSyncPoint(&client); | |
638 } | |
639 | |
640 void SkCanvasVideoRenderer::ResetLastFrame() { | |
641 last_frame_.reset(); | |
642 last_frame_timestamp_ = media::kNoTimestamp(); | |
643 } | |
644 | |
645 void SkCanvasVideoRenderer::ResetAcceleratedLastFrame() { | |
646 accelerated_last_frame_.reset(); | |
647 accelerated_generator_ = nullptr; | |
648 accelerated_last_frame_timestamp_ = media::kNoTimestamp(); | |
649 } | |
650 | |
651 } // namespace media | |
OLD | NEW |