Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(184)

Side by Side Diff: media/filters/vpx_video_decoder.cc

Issue 1561703002: media/vpx: Add support for VP9 alpha channel (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: rebase Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/filters/vpx_video_decoder.h ('k') | media/test/data/bear-vp9a.webm » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/filters/vpx_video_decoder.h" 5 #include "media/filters/vpx_video_decoder.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <stdint.h> 8 #include <stdint.h>
9 9
10 #include <algorithm> 10 #include <algorithm>
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
129 // Generates a "no_longer_needed" closure that holds a reference to this pool. 129 // Generates a "no_longer_needed" closure that holds a reference to this pool.
130 base::Closure CreateFrameCallback(void* fb_priv_data); 130 base::Closure CreateFrameCallback(void* fb_priv_data);
131 131
132 // base::MemoryDumpProvider. 132 // base::MemoryDumpProvider.
133 bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args, 133 bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args,
134 base::trace_event::ProcessMemoryDump* pmd) override; 134 base::trace_event::ProcessMemoryDump* pmd) override;
135 135
136 int NumberOfFrameBuffersInUseByDecoder() const; 136 int NumberOfFrameBuffersInUseByDecoder() const;
137 int NumberOfFrameBuffersInUseByDecoderAndVideoFrame() const; 137 int NumberOfFrameBuffersInUseByDecoderAndVideoFrame() const;
138 138
139 private:
140 friend class base::RefCountedThreadSafe<VpxVideoDecoder::MemoryPool>;
141 ~MemoryPool() override;
142
143 // Reference counted frame buffers used for VP9 decoding. Reference counting 139 // Reference counted frame buffers used for VP9 decoding. Reference counting
144 // is done manually because both chromium and libvpx has to release this 140 // is done manually because both chromium and libvpx has to release this
145 // before a buffer can be re-used. 141 // before a buffer can be re-used.
146 struct VP9FrameBuffer { 142 struct VP9FrameBuffer {
147 VP9FrameBuffer() : ref_cnt(0) {} 143 VP9FrameBuffer() : ref_cnt(0) {}
148 std::vector<uint8_t> data; 144 std::vector<uint8_t> data;
145 std::vector<uint8_t> alpha_data;
149 uint32_t ref_cnt; 146 uint32_t ref_cnt;
150 }; 147 };
151 148
149 private:
150 friend class base::RefCountedThreadSafe<VpxVideoDecoder::MemoryPool>;
151 ~MemoryPool() override;
152
152 // Gets the next available frame buffer for use by libvpx. 153 // Gets the next available frame buffer for use by libvpx.
153 VP9FrameBuffer* GetFreeFrameBuffer(size_t min_size); 154 VP9FrameBuffer* GetFreeFrameBuffer(size_t min_size);
154 155
155 // Method that gets called when a VideoFrame that references this pool gets 156 // Method that gets called when a VideoFrame that references this pool gets
156 // destroyed. 157 // destroyed.
157 void OnVideoFrameDestroyed(VP9FrameBuffer* frame_buffer); 158 void OnVideoFrameDestroyed(VP9FrameBuffer* frame_buffer);
158 159
159 // Frame buffers to be used by libvpx for VP9 Decoding. 160 // Frame buffers to be used by libvpx for VP9 Decoding.
160 std::vector<VP9FrameBuffer*> frame_buffers_; 161 std::vector<VP9FrameBuffer*> frame_buffers_;
161 162
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
373 state_ = kNormal; 374 state_ = kNormal;
374 // PostTask() to avoid calling |closure| inmediately. 375 // PostTask() to avoid calling |closure| inmediately.
375 base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, closure); 376 base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, closure);
376 } 377 }
377 378
378 bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) { 379 bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
379 if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9) 380 if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9)
380 return false; 381 return false;
381 382
382 // These are the combinations of codec-pixel format supported in principle. 383 // These are the combinations of codec-pixel format supported in principle.
383 // Note that VP9 does not support Alpha in the current implementation.
384 DCHECK( 384 DCHECK(
385 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12) || 385 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12) ||
386 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12A) || 386 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12A) ||
387 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12) || 387 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12) ||
388 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12A) ||
388 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV24)); 389 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV24));
389 390
390 #if !defined(DISABLE_FFMPEG_VIDEO_DECODERS) 391 #if !defined(DISABLE_FFMPEG_VIDEO_DECODERS)
391 // When FFmpegVideoDecoder is available it handles VP8 that doesn't have 392 // When FFmpegVideoDecoder is available it handles VP8 that doesn't have
392 // alpha, and VpxVideoDecoder will handle VP8 with alpha. 393 // alpha, and VpxVideoDecoder will handle VP8 with alpha.
393 if (config.codec() == kCodecVP8 && config.format() != PIXEL_FORMAT_YV12A) 394 if (config.codec() == kCodecVP8 && config.format() != PIXEL_FORMAT_YV12A)
394 return false; 395 return false;
395 #endif 396 #endif
396 397
397 CloseDecoder(); 398 CloseDecoder();
398 399
399 vpx_codec_ = InitializeVpxContext(vpx_codec_, config); 400 vpx_codec_ = InitializeVpxContext(vpx_codec_, config);
400 if (!vpx_codec_) 401 if (!vpx_codec_)
401 return false; 402 return false;
402 403
403 // Configure VP9 to decode on our buffers to skip a data copy on decoding. 404 // Configure VP9 to decode on our buffers to skip a data copy on
405 // decoding. For YV12A-VP9, we use our buffers for the Y, U and V planes and
406 // copy the A plane.
404 if (config.codec() == kCodecVP9) { 407 if (config.codec() == kCodecVP9) {
405 DCHECK_NE(PIXEL_FORMAT_YV12A, config.format());
406 DCHECK(vpx_codec_get_caps(vpx_codec_->iface) & 408 DCHECK(vpx_codec_get_caps(vpx_codec_->iface) &
407 VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER); 409 VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER);
408 410
409 memory_pool_ = new MemoryPool(); 411 memory_pool_ = new MemoryPool();
410 if (vpx_codec_set_frame_buffer_functions(vpx_codec_, 412 if (vpx_codec_set_frame_buffer_functions(vpx_codec_,
411 &MemoryPool::GetVP9FrameBuffer, 413 &MemoryPool::GetVP9FrameBuffer,
412 &MemoryPool::ReleaseVP9FrameBuffer, 414 &MemoryPool::ReleaseVP9FrameBuffer,
413 memory_pool_.get())) { 415 memory_pool_.get())) {
414 DLOG(ERROR) << "Failed to configure external buffers. " 416 DLOG(ERROR) << "Failed to configure external buffers. "
415 << vpx_codec_error(vpx_codec_); 417 << vpx_codec_error(vpx_codec_);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
463 if (!vpx_image) { 465 if (!vpx_image) {
464 *video_frame = nullptr; 466 *video_frame = nullptr;
465 return true; 467 return true;
466 } 468 }
467 469
468 if (vpx_image->user_priv != user_priv) { 470 if (vpx_image->user_priv != user_priv) {
469 DLOG(ERROR) << "Invalid output timestamp."; 471 DLOG(ERROR) << "Invalid output timestamp.";
470 return false; 472 return false;
471 } 473 }
472 474
473 if (!CopyVpxImageToVideoFrame(vpx_image, video_frame)) 475 const vpx_image_t* vpx_image_alpha = nullptr;
476 AlphaDecodeStatus alpha_decode_status =
477 DecodeAlphaPlane(vpx_image, &vpx_image_alpha, buffer);
478 if (alpha_decode_status == kAlphaPlaneError) {
474 return false; 479 return false;
480 } else if (alpha_decode_status == kNoAlphaPlaneData) {
481 *video_frame = nullptr;
482 return true;
483 }
484 if (!CopyVpxImageToVideoFrame(vpx_image, vpx_image_alpha, video_frame)) {
485 return false;
486 }
487 if (vpx_image_alpha && config_.codec() == kCodecVP8) {
488 libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
489 vpx_image_alpha->stride[VPX_PLANE_Y],
490 (*video_frame)->visible_data(VideoFrame::kAPlane),
491 (*video_frame)->stride(VideoFrame::kAPlane),
492 (*video_frame)->visible_rect().width(),
493 (*video_frame)->visible_rect().height());
494 }
475 495
476 (*video_frame)->set_timestamp(base::TimeDelta::FromMicroseconds(timestamp)); 496 (*video_frame)->set_timestamp(base::TimeDelta::FromMicroseconds(timestamp));
477 497
478 // Default to the color space from the config, but if the bistream specifies 498 // Default to the color space from the config, but if the bistream specifies
479 // one, prefer that instead. 499 // one, prefer that instead.
480 ColorSpace color_space = config_.color_space(); 500 ColorSpace color_space = config_.color_space();
481 if (vpx_image->cs == VPX_CS_BT_709) 501 if (vpx_image->cs == VPX_CS_BT_709)
482 color_space = COLOR_SPACE_HD_REC709; 502 color_space = COLOR_SPACE_HD_REC709;
483 else if (vpx_image->cs == VPX_CS_BT_601) 503 else if (vpx_image->cs == VPX_CS_BT_601)
484 color_space = COLOR_SPACE_SD_REC601; 504 color_space = COLOR_SPACE_SD_REC601;
485 (*video_frame) 505 (*video_frame)
486 ->metadata() 506 ->metadata()
487 ->SetInteger(VideoFrameMetadata::COLOR_SPACE, color_space); 507 ->SetInteger(VideoFrameMetadata::COLOR_SPACE, color_space);
508 return true;
509 }
488 510
489 if (!vpx_codec_alpha_) 511 VpxVideoDecoder::AlphaDecodeStatus VpxVideoDecoder::DecodeAlphaPlane(
490 return true; 512 const struct vpx_image* vpx_image,
491 513 const struct vpx_image** vpx_image_alpha,
492 if (buffer->side_data_size() < 8) { 514 const scoped_refptr<DecoderBuffer>& buffer) {
493 // TODO(mcasas): Is this a warning or an error? 515 if (!vpx_codec_alpha_ || buffer->side_data_size() < 8) {
494 DLOG(WARNING) << "Making Alpha channel opaque due to missing input"; 516 return kAlphaPlaneProcessed;
495 const uint32_t kAlphaOpaqueValue = 255;
496 libyuv::SetPlane((*video_frame)->visible_data(VideoFrame::kAPlane),
497 (*video_frame)->stride(VideoFrame::kAPlane),
498 (*video_frame)->visible_rect().width(),
499 (*video_frame)->visible_rect().height(),
500 kAlphaOpaqueValue);
501 return true;
502 } 517 }
503 518
504 // First 8 bytes of side data is |side_data_id| in big endian. 519 // First 8 bytes of side data is |side_data_id| in big endian.
505 const uint64_t side_data_id = base::NetToHost64( 520 const uint64_t side_data_id = base::NetToHost64(
506 *(reinterpret_cast<const uint64_t*>(buffer->side_data()))); 521 *(reinterpret_cast<const uint64_t*>(buffer->side_data())));
507 if (side_data_id != 1) 522 if (side_data_id != 1) {
508 return true; 523 return kAlphaPlaneProcessed;
524 }
509 525
510 // Try and decode buffer->side_data() minus the first 8 bytes as a full frame. 526 // Try and decode buffer->side_data() minus the first 8 bytes as a full
527 // frame.
511 int64_t timestamp_alpha = buffer->timestamp().InMicroseconds(); 528 int64_t timestamp_alpha = buffer->timestamp().InMicroseconds();
512 void* user_priv_alpha = reinterpret_cast<void*>(&timestamp_alpha); 529 void* user_priv_alpha = reinterpret_cast<void*>(&timestamp_alpha);
513 { 530 {
514 TRACE_EVENT1("video", "vpx_codec_decode_alpha", "timestamp_alpha", 531 TRACE_EVENT1("video", "vpx_codec_decode_alpha", "timestamp_alpha",
515 timestamp_alpha); 532 timestamp_alpha);
516 vpx_codec_err_t status = vpx_codec_decode( 533 vpx_codec_err_t status = vpx_codec_decode(
517 vpx_codec_alpha_, buffer->side_data() + 8, buffer->side_data_size() - 8, 534 vpx_codec_alpha_, buffer->side_data() + 8, buffer->side_data_size() - 8,
518 user_priv_alpha, 0 /* deadline */); 535 user_priv_alpha, 0 /* deadline */);
519 if (status != VPX_CODEC_OK) { 536 if (status != VPX_CODEC_OK) {
520 DLOG(ERROR) << "vpx_codec_decode() failed for the alpha: " 537 DLOG(ERROR) << "vpx_codec_decode() failed for the alpha: "
521 << vpx_codec_error(vpx_codec_); 538 << vpx_codec_error(vpx_codec_);
522 return false; 539 return kAlphaPlaneError;
523 } 540 }
524 } 541 }
525 542
526 vpx_codec_iter_t iter_alpha = NULL; 543 vpx_codec_iter_t iter_alpha = NULL;
527 const vpx_image_t* vpx_image_alpha = 544 *vpx_image_alpha = vpx_codec_get_frame(vpx_codec_alpha_, &iter_alpha);
528 vpx_codec_get_frame(vpx_codec_alpha_, &iter_alpha); 545 if (!(*vpx_image_alpha)) {
529 if (!vpx_image_alpha) { 546 return kNoAlphaPlaneData;
530 *video_frame = nullptr;
531 return true;
532 } 547 }
533 548
534 if (vpx_image_alpha->user_priv != user_priv_alpha) { 549 if ((*vpx_image_alpha)->user_priv != user_priv_alpha) {
535 DLOG(ERROR) << "Invalid output timestamp on alpha."; 550 DLOG(ERROR) << "Invalid output timestamp on alpha.";
536 return false; 551 return kAlphaPlaneError;
537 } 552 }
538 553
539 if (vpx_image_alpha->d_h != vpx_image->d_h || 554 if ((*vpx_image_alpha)->d_h != vpx_image->d_h ||
540 vpx_image_alpha->d_w != vpx_image->d_w) { 555 (*vpx_image_alpha)->d_w != vpx_image->d_w) {
541 DLOG(ERROR) << "The alpha plane dimensions are not the same as the " 556 DLOG(ERROR) << "The alpha plane dimensions are not the same as the "
542 "image dimensions."; 557 "image dimensions.";
543 return false; 558 return kAlphaPlaneError;
544 } 559 }
545 560
546 libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y], 561 if (config_.codec() == kCodecVP9) {
547 vpx_image_alpha->stride[VPX_PLANE_Y], 562 VpxVideoDecoder::MemoryPool::VP9FrameBuffer* frame_buffer =
548 (*video_frame)->visible_data(VideoFrame::kAPlane), 563 static_cast<VpxVideoDecoder::MemoryPool::VP9FrameBuffer*>(
549 (*video_frame)->stride(VideoFrame::kAPlane), 564 vpx_image->fb_priv);
550 (*video_frame)->visible_rect().width(), 565 uint64_t alpha_plane_size =
551 (*video_frame)->visible_rect().height()); 566 (*vpx_image_alpha)->stride[VPX_PLANE_Y] * (*vpx_image_alpha)->d_h;
552 return true; 567 if (frame_buffer->alpha_data.size() < alpha_plane_size) {
568 frame_buffer->alpha_data.resize(alpha_plane_size);
569 }
570 libyuv::CopyPlane((*vpx_image_alpha)->planes[VPX_PLANE_Y],
571 (*vpx_image_alpha)->stride[VPX_PLANE_Y],
572 &frame_buffer->alpha_data[0],
573 (*vpx_image_alpha)->stride[VPX_PLANE_Y],
574 (*vpx_image_alpha)->d_w, (*vpx_image_alpha)->d_h);
575 }
576 return kAlphaPlaneProcessed;
553 } 577 }
554 578
555 bool VpxVideoDecoder::CopyVpxImageToVideoFrame( 579 bool VpxVideoDecoder::CopyVpxImageToVideoFrame(
556 const struct vpx_image* vpx_image, 580 const struct vpx_image* vpx_image,
581 const struct vpx_image* vpx_image_alpha,
557 scoped_refptr<VideoFrame>* video_frame) { 582 scoped_refptr<VideoFrame>* video_frame) {
558 DCHECK(vpx_image); 583 DCHECK(vpx_image);
559 584
560 VideoPixelFormat codec_format; 585 VideoPixelFormat codec_format;
561 switch (vpx_image->fmt) { 586 switch (vpx_image->fmt) {
562 case VPX_IMG_FMT_I420: 587 case VPX_IMG_FMT_I420:
563 codec_format = vpx_codec_alpha_ ? PIXEL_FORMAT_YV12A : PIXEL_FORMAT_YV12; 588 codec_format = vpx_image_alpha ? PIXEL_FORMAT_YV12A : PIXEL_FORMAT_YV12;
564 break; 589 break;
565 590
566 case VPX_IMG_FMT_I444: 591 case VPX_IMG_FMT_I444:
567 codec_format = PIXEL_FORMAT_YV24; 592 codec_format = PIXEL_FORMAT_YV24;
568 break; 593 break;
569 594
570 default: 595 default:
571 DLOG(ERROR) << "Unsupported pixel format: " << vpx_image->fmt; 596 DLOG(ERROR) << "Unsupported pixel format: " << vpx_image->fmt;
572 return false; 597 return false;
573 } 598 }
574 599
575 // The mixed |w|/|d_h| in |coded_size| is intentional. Setting the correct 600 // The mixed |w|/|d_h| in |coded_size| is intentional. Setting the correct
576 // coded width is necessary to allow coalesced memory access, which may avoid 601 // coded width is necessary to allow coalesced memory access, which may avoid
577 // frame copies. Setting the correct coded height however does not have any 602 // frame copies. Setting the correct coded height however does not have any
578 // benefit, and only risk copying too much data. 603 // benefit, and only risk copying too much data.
579 const gfx::Size coded_size(vpx_image->w, vpx_image->d_h); 604 const gfx::Size coded_size(vpx_image->w, vpx_image->d_h);
580 const gfx::Size visible_size(vpx_image->d_w, vpx_image->d_h); 605 const gfx::Size visible_size(vpx_image->d_w, vpx_image->d_h);
581 606
582 if (memory_pool_.get()) { 607 if (memory_pool_.get()) {
583 DCHECK_EQ(kCodecVP9, config_.codec()); 608 DCHECK_EQ(kCodecVP9, config_.codec());
584 DCHECK(!vpx_codec_alpha_) << "Uh-oh, VP9 and Alpha shouldn't coexist."; 609 if (vpx_image_alpha) {
585 *video_frame = VideoFrame::WrapExternalYuvData( 610 VpxVideoDecoder::MemoryPool::VP9FrameBuffer* frame_buffer =
586 codec_format, 611 static_cast<VpxVideoDecoder::MemoryPool::VP9FrameBuffer*>(
587 coded_size, gfx::Rect(visible_size), config_.natural_size(), 612 vpx_image->fb_priv);
588 vpx_image->stride[VPX_PLANE_Y], 613 *video_frame = VideoFrame::WrapExternalYuvaData(
589 vpx_image->stride[VPX_PLANE_U], 614 codec_format, coded_size, gfx::Rect(visible_size),
590 vpx_image->stride[VPX_PLANE_V], 615 config_.natural_size(), vpx_image->stride[VPX_PLANE_Y],
591 vpx_image->planes[VPX_PLANE_Y], 616 vpx_image->stride[VPX_PLANE_U], vpx_image->stride[VPX_PLANE_V],
592 vpx_image->planes[VPX_PLANE_U], 617 vpx_image_alpha->stride[VPX_PLANE_Y], vpx_image->planes[VPX_PLANE_Y],
593 vpx_image->planes[VPX_PLANE_V], 618 vpx_image->planes[VPX_PLANE_U], vpx_image->planes[VPX_PLANE_V],
594 kNoTimestamp()); 619 &frame_buffer->alpha_data[0], kNoTimestamp());
620 } else {
621 *video_frame = VideoFrame::WrapExternalYuvData(
622 codec_format, coded_size, gfx::Rect(visible_size),
623 config_.natural_size(), vpx_image->stride[VPX_PLANE_Y],
624 vpx_image->stride[VPX_PLANE_U], vpx_image->stride[VPX_PLANE_V],
625 vpx_image->planes[VPX_PLANE_Y], vpx_image->planes[VPX_PLANE_U],
626 vpx_image->planes[VPX_PLANE_V], kNoTimestamp());
627 }
595 if (!(*video_frame)) 628 if (!(*video_frame))
596 return false; 629 return false;
597 630
598 video_frame->get()->AddDestructionObserver( 631 video_frame->get()->AddDestructionObserver(
599 memory_pool_->CreateFrameCallback(vpx_image->fb_priv)); 632 memory_pool_->CreateFrameCallback(vpx_image->fb_priv));
600 633
601 UMA_HISTOGRAM_COUNTS("Media.Vpx.VideoDecoderBuffersInUseByDecoder", 634 UMA_HISTOGRAM_COUNTS("Media.Vpx.VideoDecoderBuffersInUseByDecoder",
602 memory_pool_->NumberOfFrameBuffersInUseByDecoder()); 635 memory_pool_->NumberOfFrameBuffersInUseByDecoder());
603 UMA_HISTOGRAM_COUNTS( 636 UMA_HISTOGRAM_COUNTS(
604 "Media.Vpx.VideoDecoderBuffersInUseByDecoderAndVideoFrame", 637 "Media.Vpx.VideoDecoderBuffersInUseByDecoderAndVideoFrame",
(...skipping 20 matching lines...) Expand all
625 (*video_frame)->visible_data(VideoFrame::kUPlane), 658 (*video_frame)->visible_data(VideoFrame::kUPlane),
626 (*video_frame)->stride(VideoFrame::kUPlane), 659 (*video_frame)->stride(VideoFrame::kUPlane),
627 (*video_frame)->visible_data(VideoFrame::kVPlane), 660 (*video_frame)->visible_data(VideoFrame::kVPlane),
628 (*video_frame)->stride(VideoFrame::kVPlane), coded_size.width(), 661 (*video_frame)->stride(VideoFrame::kVPlane), coded_size.width(),
629 coded_size.height()); 662 coded_size.height());
630 663
631 return true; 664 return true;
632 } 665 }
633 666
634 } // namespace media 667 } // namespace media
OLDNEW
« no previous file with comments | « media/filters/vpx_video_decoder.h ('k') | media/test/data/bear-vp9a.webm » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698