| Index: media/filters/ffmpeg_video_decoder.cc
 | 
| diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
 | 
| index 92028dce636ab97364e71c1d09651689e3b5dd99..c154360e82bb5642ba2bbe3e65b108a9c5632c42 100644
 | 
| --- a/media/filters/ffmpeg_video_decoder.cc
 | 
| +++ b/media/filters/ffmpeg_video_decoder.cc
 | 
| @@ -25,6 +25,7 @@
 | 
|  #include "media/base/video_util.h"
 | 
|  #include "media/ffmpeg/ffmpeg_common.h"
 | 
|  #include "media/filters/ffmpeg_glue.h"
 | 
| +#include "media/video/hybrid_video_frame_pool.h"
 | 
|  
 | 
|  namespace media {
 | 
|  
 | 
| @@ -64,8 +65,8 @@ static int GetVideoBufferImpl(struct AVCodecContext* s,
 | 
|  }
 | 
|  
 | 
|  static void ReleaseVideoBufferImpl(void* opaque, uint8_t* data) {
 | 
| -  scoped_refptr<VideoFrame> video_frame;
 | 
| -  video_frame.swap(reinterpret_cast<VideoFrame**>(&opaque));
 | 
| +  std::unique_ptr<VideoFrameFuture> video_frame_future(
 | 
| +      static_cast<VideoFrameFuture*>(opaque));
 | 
|  }
 | 
|  
 | 
|  // static
 | 
| @@ -75,8 +76,15 @@ bool FFmpegVideoDecoder::IsCodecSupported(VideoCodec codec) {
 | 
|  }
 | 
|  
 | 
|  FFmpegVideoDecoder::FFmpegVideoDecoder()
 | 
| +    : FFmpegVideoDecoder(std::unique_ptr<GpuMemoryBufferVideoFramePool>()) {}
 | 
| +
 | 
| +FFmpegVideoDecoder::FFmpegVideoDecoder(
 | 
| +    std::unique_ptr<GpuMemoryBufferVideoFramePool> gpu_video_frame_pool)
 | 
|      : state_(kUninitialized), decode_nalus_(false) {
 | 
|    thread_checker_.DetachFromThread();
 | 
| +  gpu_video_frame_pool->SetUsage(GpuMemoryBufferVideoFramePool::Usage::FFMPEG);
 | 
| +  hybrid_frame_pool_.reset(
 | 
| +      new HybridVideoFramePool(std::move(gpu_video_frame_pool)));
 | 
|  }
 | 
|  
 | 
|  int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
 | 
| @@ -129,21 +137,14 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
 | 
|  
 | 
|    // FFmpeg expects the initialize allocation to be zero-initialized.  Failure
 | 
|    // to do so can lead to unitialized value usage.  See http://crbug.com/390941
 | 
| -  scoped_refptr<VideoFrame> video_frame = frame_pool_.CreateFrame(
 | 
| -      format, coded_size, gfx::Rect(size), natural_size, kNoTimestamp());
 | 
| -
 | 
| -  // Prefer the color space from the codec context. If it's not specified (or is
 | 
| -  // set to an unsupported value), fall back on the value from the config.
 | 
| -  ColorSpace color_space = AVColorSpaceToColorSpace(codec_context->colorspace,
 | 
| -                                                    codec_context->color_range);
 | 
| -  if (color_space == COLOR_SPACE_UNSPECIFIED)
 | 
| -    color_space = config_.color_space();
 | 
| -  video_frame->metadata()->SetInteger(VideoFrameMetadata::COLOR_SPACE,
 | 
| -                                      color_space);
 | 
| -
 | 
| -  for (size_t i = 0; i < VideoFrame::NumPlanes(video_frame->format()); i++) {
 | 
| -    frame->data[i] = video_frame->data(i);
 | 
| -    frame->linesize[i] = video_frame->stride(i);
 | 
| +  std::unique_ptr<VideoFrameFuture> video_frame_future =
 | 
| +      hybrid_frame_pool_->CreateFrame(format, coded_size, gfx::Rect(size),
 | 
| +                                      natural_size, kNoTimestamp());
 | 
| +  DCHECK(video_frame_future);
 | 
| +
 | 
| +  for (size_t i = 0; i < VideoFrame::NumPlanes(format); i++) {
 | 
| +    frame->data[i] = video_frame_future->data(i);
 | 
| +    frame->linesize[i] = video_frame_future->stride(i);
 | 
|    }
 | 
|  
 | 
|    frame->width = coded_size.width();
 | 
| @@ -153,14 +154,9 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
 | 
|  
 | 
|    // Now create an AVBufferRef for the data just allocated. It will own the
 | 
|    // reference to the VideoFrame object.
 | 
| -  void* opaque = NULL;
 | 
| -  video_frame.swap(reinterpret_cast<VideoFrame**>(&opaque));
 | 
| +  void* opaque = video_frame_future.release();
 | 
|    frame->buf[0] =
 | 
| -      av_buffer_create(frame->data[0],
 | 
| -                       VideoFrame::AllocationSize(format, coded_size),
 | 
| -                       ReleaseVideoBufferImpl,
 | 
| -                       opaque,
 | 
| -                       0);
 | 
| +      av_buffer_create(nullptr, 0, ReleaseVideoBufferImpl, opaque, 0);
 | 
|    return 0;
 | 
|  }
 | 
|  
 | 
| @@ -333,8 +329,18 @@ bool FFmpegVideoDecoder::FFmpegDecode(
 | 
|      return false;
 | 
|    }
 | 
|  
 | 
| -  scoped_refptr<VideoFrame> frame =
 | 
| -      reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(av_frame_->buf[0]));
 | 
| +  VideoFrameFuture* frame_future =
 | 
| +      static_cast<VideoFrameFuture*>(av_buffer_get_opaque(av_frame_->buf[0]));
 | 
| +  scoped_refptr<VideoFrame> frame = frame_future->Release();
 | 
| +  DCHECK(frame);
 | 
| +
 | 
| +  // Prefer the color space from the codec context. If it's not specified (or is
 | 
| +  // set to an unsupported value), fall back on the value from the config.
 | 
| +  ColorSpace color_space = AVColorSpaceToColorSpace(
 | 
| +      codec_context_->colorspace, codec_context_->color_range);
 | 
| +  if (color_space == COLOR_SPACE_UNSPECIFIED)
 | 
| +    color_space = config_.color_space();
 | 
| +  frame->metadata()->SetInteger(VideoFrameMetadata::COLOR_SPACE, color_space);
 | 
|    frame->set_timestamp(
 | 
|        base::TimeDelta::FromMicroseconds(av_frame_->reordered_opaque));
 | 
|    *has_produced_frame = true;
 | 
| 
 |