Index: media/video/video_decode_accelerator.h |
diff --git a/media/video/video_decode_accelerator.h b/media/video/video_decode_accelerator.h |
index a00c9514d59f42de11dfc4a7b510ebf3c0739574..88a52fc3ead1e2ffbf974bedd27a06a5a4640eeb 100644 |
--- a/media/video/video_decode_accelerator.h |
+++ b/media/video/video_decode_accelerator.h |
@@ -161,7 +161,10 @@ class MEDIA_EXPORT VideoDecodeAccelerator { |
// Callback to tell client how many and what size of buffers to provide. |
// Note that the actual count provided through AssignPictureBuffers() can be |
// larger than the value requested. |
+ // If format is PIXEL_FORMAT_UNKNOWN, picture buffers can be treated as |
Pawel Osciak
2016/05/18 07:32:32
I'm wondering if perhaps this would reflect the cu
|
+ // either XRGB or ARGB. |
virtual void ProvidePictureBuffers(uint32_t requested_num_of_buffers, |
+ VideoPixelFormat format, |
uint32_t textures_per_buffer, |
const gfx::Size& dimensions, |
uint32_t texture_target) = 0; |
@@ -321,10 +324,6 @@ class MEDIA_EXPORT VideoDecodeAccelerator { |
// TODO(dshwang): after moving to D3D11, remove this. crbug.com/438691 |
virtual GLenum GetSurfaceInternalFormat() const; |
- // In IMPORT OutputMode, if supported by the VDA, return the format that it |
- // requires for imported picture buffers. |
- virtual VideoPixelFormat GetOutputFormat() const; |
- |
protected: |
// Do not delete directly; use Destroy() or own it with a scoped_ptr, which |
// will Destroy() it properly by default. |