Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(75)

Side by Side Diff: media/filters/vpx_video_decoder.cc

Issue 1561703002: media/vpx: Add support for VP9 alpha channel (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: avoid copying y,u and v. Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/filters/vpx_video_decoder.h ('k') | media/test/data/bear-vp9a.webm » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/filters/vpx_video_decoder.h" 5 #include "media/filters/vpx_video_decoder.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <stdint.h> 8 #include <stdint.h>
9 9
10 #include <algorithm> 10 #include <algorithm>
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
129 // Generates a "no_longer_needed" closure that holds a reference to this pool. 129 // Generates a "no_longer_needed" closure that holds a reference to this pool.
130 base::Closure CreateFrameCallback(void* fb_priv_data); 130 base::Closure CreateFrameCallback(void* fb_priv_data);
131 131
132 // base::MemoryDumpProvider. 132 // base::MemoryDumpProvider.
133 bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args, 133 bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args,
134 base::trace_event::ProcessMemoryDump* pmd) override; 134 base::trace_event::ProcessMemoryDump* pmd) override;
135 135
136 int NumberOfFrameBuffersInUseByDecoder() const; 136 int NumberOfFrameBuffersInUseByDecoder() const;
137 int NumberOfFrameBuffersInUseByDecoderAndVideoFrame() const; 137 int NumberOfFrameBuffersInUseByDecoderAndVideoFrame() const;
138 138
139 private:
140 friend class base::RefCountedThreadSafe<VpxVideoDecoder::MemoryPool>;
141 ~MemoryPool() override;
142
143 // Reference counted frame buffers used for VP9 decoding. Reference counting 139 // Reference counted frame buffers used for VP9 decoding. Reference counting
144 // is done manually because both chromium and libvpx has to release this 140 // is done manually because both chromium and libvpx has to release this
145 // before a buffer can be re-used. 141 // before a buffer can be re-used.
146 struct VP9FrameBuffer { 142 struct VP9FrameBuffer {
147 VP9FrameBuffer() : ref_cnt(0) {} 143 VP9FrameBuffer() : ref_cnt(0) {}
148 std::vector<uint8_t> data; 144 std::vector<uint8_t> data;
145 std::vector<uint8_t> alpha_data;
149 uint32_t ref_cnt; 146 uint32_t ref_cnt;
150 }; 147 };
151 148
149 private:
150 friend class base::RefCountedThreadSafe<VpxVideoDecoder::MemoryPool>;
151 ~MemoryPool() override;
152
152 // Gets the next available frame buffer for use by libvpx. 153 // Gets the next available frame buffer for use by libvpx.
153 VP9FrameBuffer* GetFreeFrameBuffer(size_t min_size); 154 VP9FrameBuffer* GetFreeFrameBuffer(size_t min_size);
154 155
155 // Method that gets called when a VideoFrame that references this pool gets 156 // Method that gets called when a VideoFrame that references this pool gets
156 // destroyed. 157 // destroyed.
157 void OnVideoFrameDestroyed(VP9FrameBuffer* frame_buffer); 158 void OnVideoFrameDestroyed(VP9FrameBuffer* frame_buffer);
158 159
159 // Frame buffers to be used by libvpx for VP9 Decoding. 160 // Frame buffers to be used by libvpx for VP9 Decoding.
160 std::vector<VP9FrameBuffer*> frame_buffers_; 161 std::vector<VP9FrameBuffer*> frame_buffers_;
161 162
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
373 state_ = kNormal; 374 state_ = kNormal;
374 // PostTask() to avoid calling |closure| inmediately. 375 // PostTask() to avoid calling |closure| inmediately.
375 base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, closure); 376 base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, closure);
376 } 377 }
377 378
378 bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) { 379 bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
379 if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9) 380 if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9)
380 return false; 381 return false;
381 382
382 // These are the combinations of codec-pixel format supported in principle. 383 // These are the combinations of codec-pixel format supported in principle.
383 // Note that VP9 does not support Alpha in the current implementation.
384 DCHECK( 384 DCHECK(
385 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12) || 385 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12) ||
386 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12A) || 386 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12A) ||
387 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12) || 387 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12) ||
388 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12A) ||
388 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV24)); 389 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV24));
389 390
390 #if !defined(DISABLE_FFMPEG_VIDEO_DECODERS) 391 #if !defined(DISABLE_FFMPEG_VIDEO_DECODERS)
391 // When FFmpegVideoDecoder is available it handles VP8 that doesn't have 392 // When FFmpegVideoDecoder is available it handles VP8 that doesn't have
392 // alpha, and VpxVideoDecoder will handle VP8 with alpha. 393 // alpha, and VpxVideoDecoder will handle VP8 with alpha.
393 if (config.codec() == kCodecVP8 && config.format() != PIXEL_FORMAT_YV12A) 394 if (config.codec() == kCodecVP8 && config.format() != PIXEL_FORMAT_YV12A)
394 return false; 395 return false;
395 #endif 396 #endif
396 397
397 CloseDecoder(); 398 CloseDecoder();
398 399
399 vpx_codec_ = InitializeVpxContext(vpx_codec_, config); 400 vpx_codec_ = InitializeVpxContext(vpx_codec_, config);
400 if (!vpx_codec_) 401 if (!vpx_codec_)
401 return false; 402 return false;
402 403
403 // Configure VP9 to decode on our buffers to skip a data copy on decoding. 404 // Configure VP9 to decode on our buffers to skip a data copy on
405 // decoding. For YV12A-VP9, we use our buffers for the Y, U and V planes and
406 // copy the A plane.
404 if (config.codec() == kCodecVP9) { 407 if (config.codec() == kCodecVP9) {
405 DCHECK_NE(PIXEL_FORMAT_YV12A, config.format());
406 DCHECK(vpx_codec_get_caps(vpx_codec_->iface) & 408 DCHECK(vpx_codec_get_caps(vpx_codec_->iface) &
407 VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER); 409 VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER);
408 410
409 memory_pool_ = new MemoryPool(); 411 memory_pool_ = new MemoryPool();
410 if (vpx_codec_set_frame_buffer_functions(vpx_codec_, 412 if (vpx_codec_set_frame_buffer_functions(vpx_codec_,
411 &MemoryPool::GetVP9FrameBuffer, 413 &MemoryPool::GetVP9FrameBuffer,
412 &MemoryPool::ReleaseVP9FrameBuffer, 414 &MemoryPool::ReleaseVP9FrameBuffer,
413 memory_pool_.get())) { 415 memory_pool_.get())) {
414 DLOG(ERROR) << "Failed to configure external buffers. " 416 DLOG(ERROR) << "Failed to configure external buffers. "
415 << vpx_codec_error(vpx_codec_); 417 << vpx_codec_error(vpx_codec_);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
463 if (!vpx_image) { 465 if (!vpx_image) {
464 *video_frame = nullptr; 466 *video_frame = nullptr;
465 return true; 467 return true;
466 } 468 }
467 469
468 if (vpx_image->user_priv != user_priv) { 470 if (vpx_image->user_priv != user_priv) {
469 DLOG(ERROR) << "Invalid output timestamp."; 471 DLOG(ERROR) << "Invalid output timestamp.";
470 return false; 472 return false;
471 } 473 }
472 474
473 if (!CopyVpxImageToVideoFrame(vpx_image, video_frame)) 475 const vpx_image_t* vpx_image_alpha = nullptr;
DaleCurtis 2016/01/12 01:18:20 Seems like this should go in its own function? Dec
vignesh 2016/01/13 01:26:04 Done.
476 if (vpx_codec_alpha_ && buffer->side_data_size() >= 8) {
477 // First 8 bytes of side data is |side_data_id| in big endian.
478 const uint64_t side_data_id = base::NetToHost64(
479 *(reinterpret_cast<const uint64_t*>(buffer->side_data())));
480 if (side_data_id == 1) {
481 // Try and decode buffer->side_data() minus the first 8 bytes as a full
482 // frame.
483 int64_t timestamp_alpha = buffer->timestamp().InMicroseconds();
484 void* user_priv_alpha = reinterpret_cast<void*>(&timestamp_alpha);
485 {
486 TRACE_EVENT1("video", "vpx_codec_decode_alpha", "timestamp_alpha",
487 timestamp_alpha);
488 vpx_codec_err_t status = vpx_codec_decode(
489 vpx_codec_alpha_, buffer->side_data() + 8,
490 buffer->side_data_size() - 8, user_priv_alpha, 0 /* deadline */);
491 if (status != VPX_CODEC_OK) {
492 DLOG(ERROR) << "vpx_codec_decode() failed for the alpha: "
493 << vpx_codec_error(vpx_codec_);
494 return false;
495 }
496 }
497
498 vpx_codec_iter_t iter_alpha = NULL;
499 vpx_image_alpha = vpx_codec_get_frame(vpx_codec_alpha_, &iter_alpha);
500 if (!vpx_image_alpha) {
501 *video_frame = nullptr;
502 return true;
503 }
504
505 if (vpx_image_alpha->user_priv != user_priv_alpha) {
506 DLOG(ERROR) << "Invalid output timestamp on alpha.";
507 return false;
508 }
509
510 if (vpx_image_alpha->d_h != vpx_image->d_h ||
511 vpx_image_alpha->d_w != vpx_image->d_w) {
512 DLOG(ERROR) << "The alpha plane dimensions are not the same as the "
513 "image dimensions.";
514 return false;
515 }
516
517 if (config_.codec() == kCodecVP9) {
518 VpxVideoDecoder::MemoryPool::VP9FrameBuffer* frame_buffer =
519 static_cast<VpxVideoDecoder::MemoryPool::VP9FrameBuffer*>(
520 vpx_image->fb_priv);
521 uint64_t alpha_plane_size =
522 vpx_image_alpha->stride[VPX_PLANE_Y] * vpx_image_alpha->d_h;
523 if (frame_buffer->alpha_data.size() < alpha_plane_size) {
524 frame_buffer->alpha_data.resize(alpha_plane_size);
525 }
526 libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
527 vpx_image_alpha->stride[VPX_PLANE_Y],
528 &frame_buffer->alpha_data[0],
529 vpx_image_alpha->stride[VPX_PLANE_Y],
530 vpx_image_alpha->d_w, vpx_image_alpha->d_h);
531 }
532 }
533 }
534 if (!CopyVpxImageToVideoFrame(vpx_image,
535 vpx_codec_alpha_ ? vpx_image_alpha : nullptr,
536 video_frame)) {
474 return false; 537 return false;
538 }
539
540 if (vpx_codec_alpha_ && config_.codec() == kCodecVP8) {
541 if (vpx_image_alpha) {
542 libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
543 vpx_image_alpha->stride[VPX_PLANE_Y],
544 (*video_frame)->visible_data(VideoFrame::kAPlane),
545 (*video_frame)->stride(VideoFrame::kAPlane),
546 (*video_frame)->visible_rect().width(),
547 (*video_frame)->visible_rect().height());
548 } else {
549 // TODO(mcasas): Is this a warning or an error?
550 DLOG(WARNING) << "Making Alpha channel opaque due to missing input";
551 const uint32_t kAlphaOpaqueValue = 255;
552 libyuv::SetPlane((*video_frame)->visible_data(VideoFrame::kAPlane),
553 (*video_frame)->stride(VideoFrame::kAPlane),
554 (*video_frame)->visible_rect().width(),
555 (*video_frame)->visible_rect().height(),
556 kAlphaOpaqueValue);
557 }
558 }
475 559
476 (*video_frame)->set_timestamp(base::TimeDelta::FromMicroseconds(timestamp)); 560 (*video_frame)->set_timestamp(base::TimeDelta::FromMicroseconds(timestamp));
477 561
478 // Default to the color space from the config, but if the bistream specifies 562 // Default to the color space from the config, but if the bistream specifies
479 // one, prefer that instead. 563 // one, prefer that instead.
480 ColorSpace color_space = config_.color_space(); 564 ColorSpace color_space = config_.color_space();
481 if (vpx_image->cs == VPX_CS_BT_709) 565 if (vpx_image->cs == VPX_CS_BT_709)
482 color_space = COLOR_SPACE_HD_REC709; 566 color_space = COLOR_SPACE_HD_REC709;
483 else if (vpx_image->cs == VPX_CS_BT_601) 567 else if (vpx_image->cs == VPX_CS_BT_601)
484 color_space = COLOR_SPACE_SD_REC601; 568 color_space = COLOR_SPACE_SD_REC601;
485 (*video_frame) 569 (*video_frame)
486 ->metadata() 570 ->metadata()
487 ->SetInteger(VideoFrameMetadata::COLOR_SPACE, color_space); 571 ->SetInteger(VideoFrameMetadata::COLOR_SPACE, color_space);
488
489 if (!vpx_codec_alpha_)
490 return true;
491
492 if (buffer->side_data_size() < 8) {
493 // TODO(mcasas): Is this a warning or an error?
494 DLOG(WARNING) << "Making Alpha channel opaque due to missing input";
495 const uint32_t kAlphaOpaqueValue = 255;
496 libyuv::SetPlane((*video_frame)->visible_data(VideoFrame::kAPlane),
497 (*video_frame)->stride(VideoFrame::kAPlane),
498 (*video_frame)->visible_rect().width(),
499 (*video_frame)->visible_rect().height(),
500 kAlphaOpaqueValue);
501 return true;
502 }
503
504 // First 8 bytes of side data is |side_data_id| in big endian.
505 const uint64_t side_data_id = base::NetToHost64(
506 *(reinterpret_cast<const uint64_t*>(buffer->side_data())));
507 if (side_data_id != 1)
508 return true;
509
510 // Try and decode buffer->side_data() minus the first 8 bytes as a full frame.
511 int64_t timestamp_alpha = buffer->timestamp().InMicroseconds();
512 void* user_priv_alpha = reinterpret_cast<void*>(&timestamp_alpha);
513 {
514 TRACE_EVENT1("video", "vpx_codec_decode_alpha", "timestamp_alpha",
515 timestamp_alpha);
516 vpx_codec_err_t status = vpx_codec_decode(
517 vpx_codec_alpha_, buffer->side_data() + 8, buffer->side_data_size() - 8,
518 user_priv_alpha, 0 /* deadline */);
519 if (status != VPX_CODEC_OK) {
520 DLOG(ERROR) << "vpx_codec_decode() failed for the alpha: "
521 << vpx_codec_error(vpx_codec_);
522 return false;
523 }
524 }
525
526 vpx_codec_iter_t iter_alpha = NULL;
527 const vpx_image_t* vpx_image_alpha =
528 vpx_codec_get_frame(vpx_codec_alpha_, &iter_alpha);
529 if (!vpx_image_alpha) {
530 *video_frame = nullptr;
531 return true;
532 }
533
534 if (vpx_image_alpha->user_priv != user_priv_alpha) {
535 DLOG(ERROR) << "Invalid output timestamp on alpha.";
536 return false;
537 }
538
539 if (vpx_image_alpha->d_h != vpx_image->d_h ||
540 vpx_image_alpha->d_w != vpx_image->d_w) {
541 DLOG(ERROR) << "The alpha plane dimensions are not the same as the "
542 "image dimensions.";
543 return false;
544 }
545
546 libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
547 vpx_image_alpha->stride[VPX_PLANE_Y],
548 (*video_frame)->visible_data(VideoFrame::kAPlane),
549 (*video_frame)->stride(VideoFrame::kAPlane),
550 (*video_frame)->visible_rect().width(),
551 (*video_frame)->visible_rect().height());
552 return true; 572 return true;
553 } 573 }
554 574
555 bool VpxVideoDecoder::CopyVpxImageToVideoFrame( 575 bool VpxVideoDecoder::CopyVpxImageToVideoFrame(
556 const struct vpx_image* vpx_image, 576 const struct vpx_image* vpx_image,
577 const struct vpx_image* vpx_image_alpha,
557 scoped_refptr<VideoFrame>* video_frame) { 578 scoped_refptr<VideoFrame>* video_frame) {
558 DCHECK(vpx_image); 579 DCHECK(vpx_image);
559 580
560 VideoPixelFormat codec_format; 581 VideoPixelFormat codec_format;
561 switch (vpx_image->fmt) { 582 switch (vpx_image->fmt) {
562 case VPX_IMG_FMT_I420: 583 case VPX_IMG_FMT_I420:
563 codec_format = vpx_codec_alpha_ ? PIXEL_FORMAT_YV12A : PIXEL_FORMAT_YV12; 584 codec_format = vpx_codec_alpha_ ? PIXEL_FORMAT_YV12A : PIXEL_FORMAT_YV12;
564 break; 585 break;
565 586
566 case VPX_IMG_FMT_I444: 587 case VPX_IMG_FMT_I444:
567 codec_format = PIXEL_FORMAT_YV24; 588 codec_format = PIXEL_FORMAT_YV24;
568 break; 589 break;
569 590
570 default: 591 default:
571 DLOG(ERROR) << "Unsupported pixel format: " << vpx_image->fmt; 592 DLOG(ERROR) << "Unsupported pixel format: " << vpx_image->fmt;
572 return false; 593 return false;
573 } 594 }
574 595
575 // The mixed |w|/|d_h| in |coded_size| is intentional. Setting the correct 596 // The mixed |w|/|d_h| in |coded_size| is intentional. Setting the correct
576 // coded width is necessary to allow coalesced memory access, which may avoid 597 // coded width is necessary to allow coalesced memory access, which may avoid
577 // frame copies. Setting the correct coded height however does not have any 598 // frame copies. Setting the correct coded height however does not have any
578 // benefit, and only risk copying too much data. 599 // benefit, and only risk copying too much data.
579 const gfx::Size coded_size(vpx_image->w, vpx_image->d_h); 600 const gfx::Size coded_size(vpx_image->w, vpx_image->d_h);
580 const gfx::Size visible_size(vpx_image->d_w, vpx_image->d_h); 601 const gfx::Size visible_size(vpx_image->d_w, vpx_image->d_h);
581 602
582 if (memory_pool_.get()) { 603 if (memory_pool_.get()) {
583 DCHECK_EQ(kCodecVP9, config_.codec()); 604 DCHECK_EQ(kCodecVP9, config_.codec());
584 DCHECK(!vpx_codec_alpha_) << "Uh-oh, VP9 and Alpha shouldn't coexist."; 605 if (vpx_image_alpha) {
585 *video_frame = VideoFrame::WrapExternalYuvData( 606 VpxVideoDecoder::MemoryPool::VP9FrameBuffer* frame_buffer =
586 codec_format, 607 static_cast<VpxVideoDecoder::MemoryPool::VP9FrameBuffer*>(
587 coded_size, gfx::Rect(visible_size), config_.natural_size(), 608 vpx_image->fb_priv);
588 vpx_image->stride[VPX_PLANE_Y], 609 *video_frame = VideoFrame::WrapExternalYuvaData(
589 vpx_image->stride[VPX_PLANE_U], 610 codec_format, coded_size, gfx::Rect(visible_size),
590 vpx_image->stride[VPX_PLANE_V], 611 config_.natural_size(), vpx_image->stride[VPX_PLANE_Y],
591 vpx_image->planes[VPX_PLANE_Y], 612 vpx_image->stride[VPX_PLANE_U], vpx_image->stride[VPX_PLANE_V],
592 vpx_image->planes[VPX_PLANE_U], 613 vpx_image_alpha->stride[VPX_PLANE_Y], vpx_image->planes[VPX_PLANE_Y],
593 vpx_image->planes[VPX_PLANE_V], 614 vpx_image->planes[VPX_PLANE_U], vpx_image->planes[VPX_PLANE_V],
594 kNoTimestamp()); 615 &frame_buffer->alpha_data[0], kNoTimestamp());
616 } else {
617 *video_frame = VideoFrame::WrapExternalYuvData(
618 codec_format, coded_size, gfx::Rect(visible_size),
619 config_.natural_size(), vpx_image->stride[VPX_PLANE_Y],
620 vpx_image->stride[VPX_PLANE_U], vpx_image->stride[VPX_PLANE_V],
621 vpx_image->planes[VPX_PLANE_Y], vpx_image->planes[VPX_PLANE_U],
622 vpx_image->planes[VPX_PLANE_V], kNoTimestamp());
623 }
595 video_frame->get()->AddDestructionObserver( 624 video_frame->get()->AddDestructionObserver(
596 memory_pool_->CreateFrameCallback(vpx_image->fb_priv)); 625 memory_pool_->CreateFrameCallback(vpx_image->fb_priv));
597 626
598 UMA_HISTOGRAM_COUNTS("Media.Vpx.VideoDecoderBuffersInUseByDecoder", 627 UMA_HISTOGRAM_COUNTS("Media.Vpx.VideoDecoderBuffersInUseByDecoder",
599 memory_pool_->NumberOfFrameBuffersInUseByDecoder()); 628 memory_pool_->NumberOfFrameBuffersInUseByDecoder());
600 UMA_HISTOGRAM_COUNTS( 629 UMA_HISTOGRAM_COUNTS(
601 "Media.Vpx.VideoDecoderBuffersInUseByDecoderAndVideoFrame", 630 "Media.Vpx.VideoDecoderBuffersInUseByDecoderAndVideoFrame",
602 memory_pool_->NumberOfFrameBuffersInUseByDecoderAndVideoFrame()); 631 memory_pool_->NumberOfFrameBuffersInUseByDecoderAndVideoFrame());
603 632
604 return true; 633 return true;
(...skipping 15 matching lines...) Expand all
620 (*video_frame)->visible_data(VideoFrame::kUPlane), 649 (*video_frame)->visible_data(VideoFrame::kUPlane),
621 (*video_frame)->stride(VideoFrame::kUPlane), 650 (*video_frame)->stride(VideoFrame::kUPlane),
622 (*video_frame)->visible_data(VideoFrame::kVPlane), 651 (*video_frame)->visible_data(VideoFrame::kVPlane),
623 (*video_frame)->stride(VideoFrame::kVPlane), coded_size.width(), 652 (*video_frame)->stride(VideoFrame::kVPlane), coded_size.width(),
624 coded_size.height()); 653 coded_size.height());
625 654
626 return true; 655 return true;
627 } 656 }
628 657
629 } // namespace media 658 } // namespace media
OLDNEW
« no previous file with comments | « media/filters/vpx_video_decoder.h ('k') | media/test/data/bear-vp9a.webm » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698