| Index: media/video/video_decode_accelerator.h
|
| diff --git a/media/video/video_decode_accelerator.h b/media/video/video_decode_accelerator.h
|
| index 45529730ece3652a80f54c3688d5bdd9fd8190a9..d07ea8ebcd553fbd379d862f46b5bfc05932c3cf 100644
|
| --- a/media/video/video_decode_accelerator.h
|
| +++ b/media/video/video_decode_accelerator.h
|
| @@ -17,6 +17,7 @@
|
| #include "media/base/video_decoder_config.h"
|
| #include "media/video/picture.h"
|
| #include "ui/gfx/geometry/size.h"
|
| +#include "ui/gfx/gpu_memory_buffer.h"
|
|
|
| typedef unsigned int GLenum;
|
|
|
| @@ -97,6 +98,17 @@ class MEDIA_EXPORT VideoDecodeAccelerator {
|
| struct MEDIA_EXPORT Config {
|
| enum { kNoSurfaceID = SurfaceManager::kNoSurfaceID };
|
|
|
| + // Specifies the allocation and handling mode for output PictureBuffers.
|
| + // When set to ALLOCATE, the VDA is expected to allocate backing memory
|
| + // for PictureBuffers at the time of AssignPictureBuffers() call.
|
| + // When set to IMPORT, the VDA will not allocate, but after receiving
|
| + // AssignPictureBuffers() call, it will expect a call to
|
| + // ImportBufferForPicture() for each PictureBuffer before use.
|
| + enum class OutputMode {
|
| + ALLOCATE,
|
| + IMPORT,
|
| + };
|
| +
|
| Config() = default;
|
| Config(VideoCodecProfile profile);
|
| Config(const VideoDecoderConfig& video_decoder_config);
|
| @@ -113,6 +125,8 @@ class MEDIA_EXPORT VideoDecodeAccelerator {
|
| // an output SurfaceView on Android. It's only valid when not equal to
|
| // |kNoSurfaceID|.
|
| int surface_id = kNoSurfaceID;
|
| +
|
| + OutputMode output_mode = OutputMode::ALLOCATE;
|
| };
|
|
|
| // Interface for collaborating with picture interface to provide memory for
|
| @@ -204,6 +218,19 @@ class MEDIA_EXPORT VideoDecodeAccelerator {
|
| virtual void AssignPictureBuffers(
|
| const std::vector<PictureBuffer>& buffers) = 0;
|
|
|
| + // Imports |gpu_memory_buffer_handles| as backing memory for picture buffer
|
| + // associated with |picture_buffer_id|. The n-th element in
|
| + // |gpu_memory_buffer_handles| should be a handle to a GpuMemoryBuffer backing
|
| + // the n-th plane of the PictureBuffer. This can only be be used if the VDA
|
| + // has been Initialize()d with config.output_mode = IMPORT, and should be
|
| + // preceded by a call to AssignPictureBuffers() to set up the number of
|
| + // PictureBuffers and their details.
|
| + // After this call, the VDA becomes the owner of the GpuMemoryBufferHandles,
|
| + // and is responsible for closing them after use, also on import failure.
|
| + virtual void ImportBufferForPicture(
|
| + int32_t picture_buffer_id,
|
| + const std::vector<gfx::GpuMemoryBufferHandle>& gpu_memory_buffer_handles);
|
| +
|
| // Sends picture buffers to be reused by the decoder. This needs to be called
|
| // for each buffer that has been processed so that decoder may know onto which
|
| // picture buffers it can write the output to.
|
| @@ -268,6 +295,11 @@ class MEDIA_EXPORT VideoDecodeAccelerator {
|
| // TODO(dshwang): after moving to D3D11, remove this. crbug.com/438691
|
| virtual GLenum GetSurfaceInternalFormat() const;
|
|
|
| + // In IMPORT OutputMode, return the format that the VDA requires for imported
|
| + // picture buffers. In ALLOCATE mode, return the format that VDA is currently
|
| + // using or will be using for output picture buffers allocated by it.
|
| + virtual VideoPixelFormat GetOutputFormat() const;
|
| +
|
| protected:
|
| // Do not delete directly; use Destroy() or own it with a scoped_ptr, which
|
| // will Destroy() it properly by default.
|
|
|