Index: media/video/video_decode_accelerator.h |
diff --git a/media/video/video_decode_accelerator.h b/media/video/video_decode_accelerator.h |
index 27eb5905f0c9898bcaf1ae7bbc806f2b37c10228..84f14fdce42bfc12c5e9d4cc0caf866507b9ac99 100644 |
--- a/media/video/video_decode_accelerator.h |
+++ b/media/video/video_decode_accelerator.h |
@@ -167,11 +167,7 @@ |
// Callback to tell client how many and what size of buffers to provide. |
// Note that the actual count provided through AssignPictureBuffers() can be |
// larger than the value requested. |
- // |format| indicates what format the decoded frames will be produced in |
- // by the VDA, or PIXEL_FORMAT_UNKNOWN if the underlying platform handles |
- // this transparently. |
virtual void ProvidePictureBuffers(uint32_t requested_num_of_buffers, |
- VideoPixelFormat format, |
uint32_t textures_per_buffer, |
const gfx::Size& dimensions, |
uint32_t texture_target) = 0; |
@@ -321,6 +317,10 @@ |
// TODO(dshwang): after moving to D3D11, remove this. crbug.com/438691 |
virtual GLenum GetSurfaceInternalFormat() const; |
+ // In IMPORT OutputMode, if supported by the VDA, return the format that it |
+ // requires for imported picture buffers. |
+ virtual VideoPixelFormat GetOutputFormat() const; |
+ |
protected: |
// Do not delete directly; use Destroy() or own it with a scoped_ptr, which |
// will Destroy() it properly by default. |