Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(96)

Side by Side Diff: media/filters/vpx_video_decoder.cc

Issue 1608113002: Revert of media/vpx: Add support for VP9 alpha channel (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/filters/vpx_video_decoder.h ('k') | media/test/data/bear-vp9a.webm » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/filters/vpx_video_decoder.h" 5 #include "media/filters/vpx_video_decoder.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <stdint.h> 8 #include <stdint.h>
9 9
10 #include <algorithm> 10 #include <algorithm>
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
129 // Generates a "no_longer_needed" closure that holds a reference to this pool. 129 // Generates a "no_longer_needed" closure that holds a reference to this pool.
130 base::Closure CreateFrameCallback(void* fb_priv_data); 130 base::Closure CreateFrameCallback(void* fb_priv_data);
131 131
132 // base::MemoryDumpProvider. 132 // base::MemoryDumpProvider.
133 bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args, 133 bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args,
134 base::trace_event::ProcessMemoryDump* pmd) override; 134 base::trace_event::ProcessMemoryDump* pmd) override;
135 135
136 int NumberOfFrameBuffersInUseByDecoder() const; 136 int NumberOfFrameBuffersInUseByDecoder() const;
137 int NumberOfFrameBuffersInUseByDecoderAndVideoFrame() const; 137 int NumberOfFrameBuffersInUseByDecoderAndVideoFrame() const;
138 138
139 private:
140 friend class base::RefCountedThreadSafe<VpxVideoDecoder::MemoryPool>;
141 ~MemoryPool() override;
142
139 // Reference counted frame buffers used for VP9 decoding. Reference counting 143 // Reference counted frame buffers used for VP9 decoding. Reference counting
140 // is done manually because both chromium and libvpx has to release this 144 // is done manually because both chromium and libvpx has to release this
141 // before a buffer can be re-used. 145 // before a buffer can be re-used.
142 struct VP9FrameBuffer { 146 struct VP9FrameBuffer {
143 VP9FrameBuffer() : ref_cnt(0) {} 147 VP9FrameBuffer() : ref_cnt(0) {}
144 std::vector<uint8_t> data; 148 std::vector<uint8_t> data;
145 std::vector<uint8_t> alpha_data;
146 uint32_t ref_cnt; 149 uint32_t ref_cnt;
147 }; 150 };
148 151
149 private:
150 friend class base::RefCountedThreadSafe<VpxVideoDecoder::MemoryPool>;
151 ~MemoryPool() override;
152
153 // Gets the next available frame buffer for use by libvpx. 152 // Gets the next available frame buffer for use by libvpx.
154 VP9FrameBuffer* GetFreeFrameBuffer(size_t min_size); 153 VP9FrameBuffer* GetFreeFrameBuffer(size_t min_size);
155 154
156 // Method that gets called when a VideoFrame that references this pool gets 155 // Method that gets called when a VideoFrame that references this pool gets
157 // destroyed. 156 // destroyed.
158 void OnVideoFrameDestroyed(VP9FrameBuffer* frame_buffer); 157 void OnVideoFrameDestroyed(VP9FrameBuffer* frame_buffer);
159 158
160 // Frame buffers to be used by libvpx for VP9 Decoding. 159 // Frame buffers to be used by libvpx for VP9 Decoding.
161 std::vector<VP9FrameBuffer*> frame_buffers_; 160 std::vector<VP9FrameBuffer*> frame_buffers_;
162 161
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
374 state_ = kNormal; 373 state_ = kNormal;
375 // PostTask() to avoid calling |closure| inmediately. 374 // PostTask() to avoid calling |closure| inmediately.
376 base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, closure); 375 base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, closure);
377 } 376 }
378 377
379 bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) { 378 bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
380 if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9) 379 if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9)
381 return false; 380 return false;
382 381
383 // These are the combinations of codec-pixel format supported in principle. 382 // These are the combinations of codec-pixel format supported in principle.
383 // Note that VP9 does not support Alpha in the current implementation.
384 DCHECK( 384 DCHECK(
385 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12) || 385 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12) ||
386 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12A) || 386 (config.codec() == kCodecVP8 && config.format() == PIXEL_FORMAT_YV12A) ||
387 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12) || 387 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12) ||
388 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV12A) ||
389 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV24)); 388 (config.codec() == kCodecVP9 && config.format() == PIXEL_FORMAT_YV24));
390 389
391 #if !defined(DISABLE_FFMPEG_VIDEO_DECODERS) 390 #if !defined(DISABLE_FFMPEG_VIDEO_DECODERS)
392 // When FFmpegVideoDecoder is available it handles VP8 that doesn't have 391 // When FFmpegVideoDecoder is available it handles VP8 that doesn't have
393 // alpha, and VpxVideoDecoder will handle VP8 with alpha. 392 // alpha, and VpxVideoDecoder will handle VP8 with alpha.
394 if (config.codec() == kCodecVP8 && config.format() != PIXEL_FORMAT_YV12A) 393 if (config.codec() == kCodecVP8 && config.format() != PIXEL_FORMAT_YV12A)
395 return false; 394 return false;
396 #endif 395 #endif
397 396
398 CloseDecoder(); 397 CloseDecoder();
399 398
400 vpx_codec_ = InitializeVpxContext(vpx_codec_, config); 399 vpx_codec_ = InitializeVpxContext(vpx_codec_, config);
401 if (!vpx_codec_) 400 if (!vpx_codec_)
402 return false; 401 return false;
403 402
404 // Configure VP9 to decode on our buffers to skip a data copy on 403 // Configure VP9 to decode on our buffers to skip a data copy on decoding.
405 // decoding. For YV12A-VP9, we use our buffers for the Y, U and V planes and
406 // copy the A plane.
407 if (config.codec() == kCodecVP9) { 404 if (config.codec() == kCodecVP9) {
405 DCHECK_NE(PIXEL_FORMAT_YV12A, config.format());
408 DCHECK(vpx_codec_get_caps(vpx_codec_->iface) & 406 DCHECK(vpx_codec_get_caps(vpx_codec_->iface) &
409 VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER); 407 VPX_CODEC_CAP_EXTERNAL_FRAME_BUFFER);
410 408
411 memory_pool_ = new MemoryPool(); 409 memory_pool_ = new MemoryPool();
412 if (vpx_codec_set_frame_buffer_functions(vpx_codec_, 410 if (vpx_codec_set_frame_buffer_functions(vpx_codec_,
413 &MemoryPool::GetVP9FrameBuffer, 411 &MemoryPool::GetVP9FrameBuffer,
414 &MemoryPool::ReleaseVP9FrameBuffer, 412 &MemoryPool::ReleaseVP9FrameBuffer,
415 memory_pool_.get())) { 413 memory_pool_.get())) {
416 DLOG(ERROR) << "Failed to configure external buffers. " 414 DLOG(ERROR) << "Failed to configure external buffers. "
417 << vpx_codec_error(vpx_codec_); 415 << vpx_codec_error(vpx_codec_);
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
465 if (!vpx_image) { 463 if (!vpx_image) {
466 *video_frame = nullptr; 464 *video_frame = nullptr;
467 return true; 465 return true;
468 } 466 }
469 467
470 if (vpx_image->user_priv != user_priv) { 468 if (vpx_image->user_priv != user_priv) {
471 DLOG(ERROR) << "Invalid output timestamp."; 469 DLOG(ERROR) << "Invalid output timestamp.";
472 return false; 470 return false;
473 } 471 }
474 472
475 const vpx_image_t* vpx_image_alpha = nullptr; 473 if (!CopyVpxImageToVideoFrame(vpx_image, video_frame))
476 AlphaDecodeStatus alpha_decode_status =
477 DecodeAlphaPlane(vpx_image, &vpx_image_alpha, buffer);
478 if (alpha_decode_status == kAlphaPlaneError) {
479 return false; 474 return false;
480 } else if (alpha_decode_status == kNoAlphaPlaneData) {
481 *video_frame = nullptr;
482 return true;
483 }
484 if (!CopyVpxImageToVideoFrame(vpx_image, vpx_image_alpha, video_frame)) {
485 return false;
486 }
487 if (vpx_image_alpha && config_.codec() == kCodecVP8) {
488 libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
489 vpx_image_alpha->stride[VPX_PLANE_Y],
490 (*video_frame)->visible_data(VideoFrame::kAPlane),
491 (*video_frame)->stride(VideoFrame::kAPlane),
492 (*video_frame)->visible_rect().width(),
493 (*video_frame)->visible_rect().height());
494 }
495 475
496 (*video_frame)->set_timestamp(base::TimeDelta::FromMicroseconds(timestamp)); 476 (*video_frame)->set_timestamp(base::TimeDelta::FromMicroseconds(timestamp));
497 477
498 // Default to the color space from the config, but if the bistream specifies 478 // Default to the color space from the config, but if the bistream specifies
499 // one, prefer that instead. 479 // one, prefer that instead.
500 ColorSpace color_space = config_.color_space(); 480 ColorSpace color_space = config_.color_space();
501 if (vpx_image->cs == VPX_CS_BT_709) 481 if (vpx_image->cs == VPX_CS_BT_709)
502 color_space = COLOR_SPACE_HD_REC709; 482 color_space = COLOR_SPACE_HD_REC709;
503 else if (vpx_image->cs == VPX_CS_BT_601) 483 else if (vpx_image->cs == VPX_CS_BT_601)
504 color_space = COLOR_SPACE_SD_REC601; 484 color_space = COLOR_SPACE_SD_REC601;
505 (*video_frame) 485 (*video_frame)
506 ->metadata() 486 ->metadata()
507 ->SetInteger(VideoFrameMetadata::COLOR_SPACE, color_space); 487 ->SetInteger(VideoFrameMetadata::COLOR_SPACE, color_space);
508 return true;
509 }
510 488
511 VpxVideoDecoder::AlphaDecodeStatus VpxVideoDecoder::DecodeAlphaPlane( 489 if (!vpx_codec_alpha_)
512 const struct vpx_image* vpx_image, 490 return true;
513 const struct vpx_image** vpx_image_alpha, 491
514 const scoped_refptr<DecoderBuffer>& buffer) { 492 if (buffer->side_data_size() < 8) {
515 if (!vpx_codec_alpha_ || buffer->side_data_size() < 8) { 493 // TODO(mcasas): Is this a warning or an error?
516 return kAlphaPlaneProcessed; 494 DLOG(WARNING) << "Making Alpha channel opaque due to missing input";
495 const uint32_t kAlphaOpaqueValue = 255;
496 libyuv::SetPlane((*video_frame)->visible_data(VideoFrame::kAPlane),
497 (*video_frame)->stride(VideoFrame::kAPlane),
498 (*video_frame)->visible_rect().width(),
499 (*video_frame)->visible_rect().height(),
500 kAlphaOpaqueValue);
501 return true;
517 } 502 }
518 503
519 // First 8 bytes of side data is |side_data_id| in big endian. 504 // First 8 bytes of side data is |side_data_id| in big endian.
520 const uint64_t side_data_id = base::NetToHost64( 505 const uint64_t side_data_id = base::NetToHost64(
521 *(reinterpret_cast<const uint64_t*>(buffer->side_data()))); 506 *(reinterpret_cast<const uint64_t*>(buffer->side_data())));
522 if (side_data_id != 1) { 507 if (side_data_id != 1)
523 return kAlphaPlaneProcessed; 508 return true;
524 }
525 509
526 // Try and decode buffer->side_data() minus the first 8 bytes as a full 510 // Try and decode buffer->side_data() minus the first 8 bytes as a full frame.
527 // frame.
528 int64_t timestamp_alpha = buffer->timestamp().InMicroseconds(); 511 int64_t timestamp_alpha = buffer->timestamp().InMicroseconds();
529 void* user_priv_alpha = reinterpret_cast<void*>(&timestamp_alpha); 512 void* user_priv_alpha = reinterpret_cast<void*>(&timestamp_alpha);
530 { 513 {
531 TRACE_EVENT1("video", "vpx_codec_decode_alpha", "timestamp_alpha", 514 TRACE_EVENT1("video", "vpx_codec_decode_alpha", "timestamp_alpha",
532 timestamp_alpha); 515 timestamp_alpha);
533 vpx_codec_err_t status = vpx_codec_decode( 516 vpx_codec_err_t status = vpx_codec_decode(
534 vpx_codec_alpha_, buffer->side_data() + 8, buffer->side_data_size() - 8, 517 vpx_codec_alpha_, buffer->side_data() + 8, buffer->side_data_size() - 8,
535 user_priv_alpha, 0 /* deadline */); 518 user_priv_alpha, 0 /* deadline */);
536 if (status != VPX_CODEC_OK) { 519 if (status != VPX_CODEC_OK) {
537 DLOG(ERROR) << "vpx_codec_decode() failed for the alpha: " 520 DLOG(ERROR) << "vpx_codec_decode() failed for the alpha: "
538 << vpx_codec_error(vpx_codec_); 521 << vpx_codec_error(vpx_codec_);
539 return kAlphaPlaneError; 522 return false;
540 } 523 }
541 } 524 }
542 525
543 vpx_codec_iter_t iter_alpha = NULL; 526 vpx_codec_iter_t iter_alpha = NULL;
544 *vpx_image_alpha = vpx_codec_get_frame(vpx_codec_alpha_, &iter_alpha); 527 const vpx_image_t* vpx_image_alpha =
545 if (!(*vpx_image_alpha)) { 528 vpx_codec_get_frame(vpx_codec_alpha_, &iter_alpha);
546 return kNoAlphaPlaneData; 529 if (!vpx_image_alpha) {
530 *video_frame = nullptr;
531 return true;
547 } 532 }
548 533
549 if ((*vpx_image_alpha)->user_priv != user_priv_alpha) { 534 if (vpx_image_alpha->user_priv != user_priv_alpha) {
550 DLOG(ERROR) << "Invalid output timestamp on alpha."; 535 DLOG(ERROR) << "Invalid output timestamp on alpha.";
551 return kAlphaPlaneError; 536 return false;
552 } 537 }
553 538
554 if ((*vpx_image_alpha)->d_h != vpx_image->d_h || 539 if (vpx_image_alpha->d_h != vpx_image->d_h ||
555 (*vpx_image_alpha)->d_w != vpx_image->d_w) { 540 vpx_image_alpha->d_w != vpx_image->d_w) {
556 DLOG(ERROR) << "The alpha plane dimensions are not the same as the " 541 DLOG(ERROR) << "The alpha plane dimensions are not the same as the "
557 "image dimensions."; 542 "image dimensions.";
558 return kAlphaPlaneError; 543 return false;
559 } 544 }
560 545
561 if (config_.codec() == kCodecVP9) { 546 libyuv::CopyPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
562 VpxVideoDecoder::MemoryPool::VP9FrameBuffer* frame_buffer = 547 vpx_image_alpha->stride[VPX_PLANE_Y],
563 static_cast<VpxVideoDecoder::MemoryPool::VP9FrameBuffer*>( 548 (*video_frame)->visible_data(VideoFrame::kAPlane),
564 vpx_image->fb_priv); 549 (*video_frame)->stride(VideoFrame::kAPlane),
565 uint64_t alpha_plane_size = 550 (*video_frame)->visible_rect().width(),
566 (*vpx_image_alpha)->stride[VPX_PLANE_Y] * (*vpx_image_alpha)->d_h; 551 (*video_frame)->visible_rect().height());
567 if (frame_buffer->alpha_data.size() < alpha_plane_size) { 552 return true;
568 frame_buffer->alpha_data.resize(alpha_plane_size);
569 }
570 libyuv::CopyPlane((*vpx_image_alpha)->planes[VPX_PLANE_Y],
571 (*vpx_image_alpha)->stride[VPX_PLANE_Y],
572 &frame_buffer->alpha_data[0],
573 (*vpx_image_alpha)->stride[VPX_PLANE_Y],
574 (*vpx_image_alpha)->d_w, (*vpx_image_alpha)->d_h);
575 }
576 return kAlphaPlaneProcessed;
577 } 553 }
578 554
579 bool VpxVideoDecoder::CopyVpxImageToVideoFrame( 555 bool VpxVideoDecoder::CopyVpxImageToVideoFrame(
580 const struct vpx_image* vpx_image, 556 const struct vpx_image* vpx_image,
581 const struct vpx_image* vpx_image_alpha,
582 scoped_refptr<VideoFrame>* video_frame) { 557 scoped_refptr<VideoFrame>* video_frame) {
583 DCHECK(vpx_image); 558 DCHECK(vpx_image);
584 559
585 VideoPixelFormat codec_format; 560 VideoPixelFormat codec_format;
586 switch (vpx_image->fmt) { 561 switch (vpx_image->fmt) {
587 case VPX_IMG_FMT_I420: 562 case VPX_IMG_FMT_I420:
588 codec_format = vpx_image_alpha ? PIXEL_FORMAT_YV12A : PIXEL_FORMAT_YV12; 563 codec_format = vpx_codec_alpha_ ? PIXEL_FORMAT_YV12A : PIXEL_FORMAT_YV12;
589 break; 564 break;
590 565
591 case VPX_IMG_FMT_I444: 566 case VPX_IMG_FMT_I444:
592 codec_format = PIXEL_FORMAT_YV24; 567 codec_format = PIXEL_FORMAT_YV24;
593 break; 568 break;
594 569
595 default: 570 default:
596 DLOG(ERROR) << "Unsupported pixel format: " << vpx_image->fmt; 571 DLOG(ERROR) << "Unsupported pixel format: " << vpx_image->fmt;
597 return false; 572 return false;
598 } 573 }
599 574
600 // The mixed |w|/|d_h| in |coded_size| is intentional. Setting the correct 575 // The mixed |w|/|d_h| in |coded_size| is intentional. Setting the correct
601 // coded width is necessary to allow coalesced memory access, which may avoid 576 // coded width is necessary to allow coalesced memory access, which may avoid
602 // frame copies. Setting the correct coded height however does not have any 577 // frame copies. Setting the correct coded height however does not have any
603 // benefit, and only risk copying too much data. 578 // benefit, and only risk copying too much data.
604 const gfx::Size coded_size(vpx_image->w, vpx_image->d_h); 579 const gfx::Size coded_size(vpx_image->w, vpx_image->d_h);
605 const gfx::Size visible_size(vpx_image->d_w, vpx_image->d_h); 580 const gfx::Size visible_size(vpx_image->d_w, vpx_image->d_h);
606 581
607 if (memory_pool_.get()) { 582 if (memory_pool_.get()) {
608 DCHECK_EQ(kCodecVP9, config_.codec()); 583 DCHECK_EQ(kCodecVP9, config_.codec());
609 if (vpx_image_alpha) { 584 DCHECK(!vpx_codec_alpha_) << "Uh-oh, VP9 and Alpha shouldn't coexist.";
610 VpxVideoDecoder::MemoryPool::VP9FrameBuffer* frame_buffer = 585 *video_frame = VideoFrame::WrapExternalYuvData(
611 static_cast<VpxVideoDecoder::MemoryPool::VP9FrameBuffer*>( 586 codec_format,
612 vpx_image->fb_priv); 587 coded_size, gfx::Rect(visible_size), config_.natural_size(),
613 *video_frame = VideoFrame::WrapExternalYuvaData( 588 vpx_image->stride[VPX_PLANE_Y],
614 codec_format, coded_size, gfx::Rect(visible_size), 589 vpx_image->stride[VPX_PLANE_U],
615 config_.natural_size(), vpx_image->stride[VPX_PLANE_Y], 590 vpx_image->stride[VPX_PLANE_V],
616 vpx_image->stride[VPX_PLANE_U], vpx_image->stride[VPX_PLANE_V], 591 vpx_image->planes[VPX_PLANE_Y],
617 vpx_image_alpha->stride[VPX_PLANE_Y], vpx_image->planes[VPX_PLANE_Y], 592 vpx_image->planes[VPX_PLANE_U],
618 vpx_image->planes[VPX_PLANE_U], vpx_image->planes[VPX_PLANE_V], 593 vpx_image->planes[VPX_PLANE_V],
619 &frame_buffer->alpha_data[0], kNoTimestamp()); 594 kNoTimestamp());
620 } else {
621 *video_frame = VideoFrame::WrapExternalYuvData(
622 codec_format, coded_size, gfx::Rect(visible_size),
623 config_.natural_size(), vpx_image->stride[VPX_PLANE_Y],
624 vpx_image->stride[VPX_PLANE_U], vpx_image->stride[VPX_PLANE_V],
625 vpx_image->planes[VPX_PLANE_Y], vpx_image->planes[VPX_PLANE_U],
626 vpx_image->planes[VPX_PLANE_V], kNoTimestamp());
627 }
628 if (!(*video_frame)) 595 if (!(*video_frame))
629 return false; 596 return false;
630 597
631 video_frame->get()->AddDestructionObserver( 598 video_frame->get()->AddDestructionObserver(
632 memory_pool_->CreateFrameCallback(vpx_image->fb_priv)); 599 memory_pool_->CreateFrameCallback(vpx_image->fb_priv));
633 600
634 UMA_HISTOGRAM_COUNTS("Media.Vpx.VideoDecoderBuffersInUseByDecoder", 601 UMA_HISTOGRAM_COUNTS("Media.Vpx.VideoDecoderBuffersInUseByDecoder",
635 memory_pool_->NumberOfFrameBuffersInUseByDecoder()); 602 memory_pool_->NumberOfFrameBuffersInUseByDecoder());
636 UMA_HISTOGRAM_COUNTS( 603 UMA_HISTOGRAM_COUNTS(
637 "Media.Vpx.VideoDecoderBuffersInUseByDecoderAndVideoFrame", 604 "Media.Vpx.VideoDecoderBuffersInUseByDecoderAndVideoFrame",
(...skipping 20 matching lines...) Expand all
658 (*video_frame)->visible_data(VideoFrame::kUPlane), 625 (*video_frame)->visible_data(VideoFrame::kUPlane),
659 (*video_frame)->stride(VideoFrame::kUPlane), 626 (*video_frame)->stride(VideoFrame::kUPlane),
660 (*video_frame)->visible_data(VideoFrame::kVPlane), 627 (*video_frame)->visible_data(VideoFrame::kVPlane),
661 (*video_frame)->stride(VideoFrame::kVPlane), coded_size.width(), 628 (*video_frame)->stride(VideoFrame::kVPlane), coded_size.width(),
662 coded_size.height()); 629 coded_size.height());
663 630
664 return true; 631 return true;
665 } 632 }
666 633
667 } // namespace media 634 } // namespace media
OLDNEW
« no previous file with comments | « media/filters/vpx_video_decoder.h ('k') | media/test/data/bear-vp9a.webm » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698