OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/media/dxva_video_decode_accelerator_win.h" | 5 #include "media/gpu/dxva_video_decode_accelerator_win.h" |
6 | 6 |
7 #include <memory> | 7 #include <memory> |
8 | 8 |
9 #if !defined(OS_WIN) | 9 #if !defined(OS_WIN) |
10 #error This file should only be built on Windows. | 10 #error This file should only be built on Windows. |
11 #endif // !defined(OS_WIN) | 11 #endif // !defined(OS_WIN) |
12 | 12 |
13 #include <codecapi.h> | 13 #include <codecapi.h> |
14 #include <dxgi1_2.h> | 14 #include <dxgi1_2.h> |
15 #include <ks.h> | 15 #include <ks.h> |
16 #include <mfapi.h> | 16 #include <mfapi.h> |
17 #include <mferror.h> | 17 #include <mferror.h> |
18 #include <ntverp.h> | 18 #include <ntverp.h> |
19 #include <stddef.h> | 19 #include <stddef.h> |
20 #include <string.h> | 20 #include <string.h> |
21 #include <wmcodecdsp.h> | 21 #include <wmcodecdsp.h> |
(...skipping 21 matching lines...) Expand all Loading... |
43 #include "ui/gl/gl_fence.h" | 43 #include "ui/gl/gl_fence.h" |
44 #include "ui/gl/gl_surface_egl.h" | 44 #include "ui/gl/gl_surface_egl.h" |
45 | 45 |
46 namespace { | 46 namespace { |
47 | 47 |
48 // Path is appended on to the PROGRAM_FILES base path. | 48 // Path is appended on to the PROGRAM_FILES base path. |
49 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; | 49 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; |
50 | 50 |
51 const wchar_t kVP8DecoderDLLName[] = | 51 const wchar_t kVP8DecoderDLLName[] = |
52 #if defined(ARCH_CPU_X86) | 52 #if defined(ARCH_CPU_X86) |
53 L"mfx_mft_vp8vd_32.dll"; | 53 L"mfx_mft_vp8vd_32.dll"; |
54 #elif defined(ARCH_CPU_X86_64) | 54 #elif defined(ARCH_CPU_X86_64) |
55 L"mfx_mft_vp8vd_64.dll"; | 55 L"mfx_mft_vp8vd_64.dll"; |
56 #else | 56 #else |
57 #error Unsupported Windows CPU Architecture | 57 #error Unsupported Windows CPU Architecture |
58 #endif | 58 #endif |
59 | 59 |
60 const wchar_t kVP9DecoderDLLName[] = | 60 const wchar_t kVP9DecoderDLLName[] = |
61 #if defined(ARCH_CPU_X86) | 61 #if defined(ARCH_CPU_X86) |
62 L"mfx_mft_vp9vd_32.dll"; | 62 L"mfx_mft_vp9vd_32.dll"; |
63 #elif defined(ARCH_CPU_X86_64) | 63 #elif defined(ARCH_CPU_X86_64) |
64 L"mfx_mft_vp9vd_64.dll"; | 64 L"mfx_mft_vp9vd_64.dll"; |
65 #else | 65 #else |
66 #error Unsupported Windows CPU Architecture | 66 #error Unsupported Windows CPU Architecture |
67 #endif | 67 #endif |
68 | 68 |
69 const CLSID CLSID_WebmMfVp8Dec = { | 69 const CLSID CLSID_WebmMfVp8Dec = { |
70 0x451e3cb7, | 70 0x451e3cb7, |
71 0x2622, | 71 0x2622, |
72 0x4ba5, | 72 0x4ba5, |
73 { 0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24 } | 73 {0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24}}; |
74 }; | |
75 | 74 |
76 const CLSID CLSID_WebmMfVp9Dec = { | 75 const CLSID CLSID_WebmMfVp9Dec = { |
77 0x07ab4bd2, | 76 0x07ab4bd2, |
78 0x1979, | 77 0x1979, |
79 0x4fcd, | 78 0x4fcd, |
80 { 0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe } | 79 {0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe}}; |
81 }; | |
82 | 80 |
83 const CLSID MEDIASUBTYPE_VP80 = { | 81 const CLSID MEDIASUBTYPE_VP80 = { |
84 0x30385056, | 82 0x30385056, |
85 0x0000, | 83 0x0000, |
86 0x0010, | 84 0x0010, |
87 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | 85 {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}}; |
88 }; | |
89 | 86 |
90 const CLSID MEDIASUBTYPE_VP90 = { | 87 const CLSID MEDIASUBTYPE_VP90 = { |
91 0x30395056, | 88 0x30395056, |
92 0x0000, | 89 0x0000, |
93 0x0010, | 90 0x0010, |
94 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | 91 {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}}; |
95 }; | |
96 | 92 |
97 // The CLSID of the video processor media foundation transform which we use for | 93 // The CLSID of the video processor media foundation transform which we use for |
98 // texture color conversion in DX11. | 94 // texture color conversion in DX11. |
99 // Defined in mfidl.h in the Windows 10 SDK. ntverp.h provides VER_PRODUCTBUILD | 95 // Defined in mfidl.h in the Windows 10 SDK. ntverp.h provides VER_PRODUCTBUILD |
100 // to detect which SDK we are compiling with. | 96 // to detect which SDK we are compiling with. |
101 #if VER_PRODUCTBUILD < 10011 // VER_PRODUCTBUILD for 10.0.10158.0 SDK. | 97 #if VER_PRODUCTBUILD < 10011 // VER_PRODUCTBUILD for 10.0.10158.0 SDK. |
102 DEFINE_GUID(CLSID_VideoProcessorMFT, | 98 DEFINE_GUID(CLSID_VideoProcessorMFT, |
103 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, | 99 0x88753b26, |
104 0xc9, 0x82); | 100 0x5b24, |
| 101 0x49bd, |
| 102 0xb2, |
| 103 0xe7, |
| 104 0xc, |
| 105 0x44, |
| 106 0x5c, |
| 107 0x78, |
| 108 0xc9, |
| 109 0x82); |
105 #endif | 110 #endif |
106 | 111 |
107 // MF_XVP_PLAYBACK_MODE | 112 // MF_XVP_PLAYBACK_MODE |
108 // Data type: UINT32 (treat as BOOL) | 113 // Data type: UINT32 (treat as BOOL) |
109 // If this attribute is TRUE, the video processor will run in playback mode | 114 // If this attribute is TRUE, the video processor will run in playback mode |
110 // where it allows callers to allocate output samples and allows last frame | 115 // where it allows callers to allocate output samples and allows last frame |
111 // regeneration (repaint). | 116 // regeneration (repaint). |
112 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12, | 117 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, |
113 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9); | 118 0x3c5d293f, |
| 119 0xad67, |
| 120 0x4e29, |
| 121 0xaf, |
| 122 0x12, |
| 123 0xcf, |
| 124 0x3e, |
| 125 0x23, |
| 126 0x8a, |
| 127 0xcc, |
| 128 0xe9); |
114 | 129 |
115 // Defines the GUID for the Intel H264 DXVA device. | 130 // Defines the GUID for the Intel H264 DXVA device. |
116 static const GUID DXVA2_Intel_ModeH264_E = { | 131 static const GUID DXVA2_Intel_ModeH264_E = { |
117 0x604F8E68, 0x4951, 0x4c54,{ 0x88, 0xFE, 0xAB, 0xD2, 0x5C, 0x15, 0xB3, 0xD6} | 132 0x604F8E68, |
118 }; | 133 0x4951, |
| 134 0x4c54, |
| 135 {0x88, 0xFE, 0xAB, 0xD2, 0x5C, 0x15, 0xB3, 0xD6}}; |
119 | 136 |
120 // R600, R700, Evergreen and Cayman AMD cards. These support DXVA via UVD3 | 137 // R600, R700, Evergreen and Cayman AMD cards. These support DXVA via UVD3 |
121 // or earlier, and don't handle resolutions higher than 1920 x 1088 well. | 138 // or earlier, and don't handle resolutions higher than 1920 x 1088 well. |
122 static const DWORD g_AMDUVD3GPUList[] = { | 139 static const DWORD g_AMDUVD3GPUList[] = { |
123 0x9400, 0x9401, 0x9402, 0x9403, 0x9405, 0x940a, 0x940b, 0x940f, 0x94c0, | 140 0x9400, 0x9401, 0x9402, 0x9403, 0x9405, 0x940a, 0x940b, 0x940f, 0x94c0, |
124 0x94c1, 0x94c3, 0x94c4, 0x94c5, 0x94c6, 0x94c7, 0x94c8, 0x94c9, 0x94cb, | 141 0x94c1, 0x94c3, 0x94c4, 0x94c5, 0x94c6, 0x94c7, 0x94c8, 0x94c9, 0x94cb, |
125 0x94cc, 0x94cd, 0x9580, 0x9581, 0x9583, 0x9586, 0x9587, 0x9588, 0x9589, | 142 0x94cc, 0x94cd, 0x9580, 0x9581, 0x9583, 0x9586, 0x9587, 0x9588, 0x9589, |
126 0x958a, 0x958b, 0x958c, 0x958d, 0x958e, 0x958f, 0x9500, 0x9501, 0x9504, | 143 0x958a, 0x958b, 0x958c, 0x958d, 0x958e, 0x958f, 0x9500, 0x9501, 0x9504, |
127 0x9505, 0x9506, 0x9507, 0x9508, 0x9509, 0x950f, 0x9511, 0x9515, 0x9517, | 144 0x9505, 0x9506, 0x9507, 0x9508, 0x9509, 0x950f, 0x9511, 0x9515, 0x9517, |
128 0x9519, 0x95c0, 0x95c2, 0x95c4, 0x95c5, 0x95c6, 0x95c7, 0x95c9, 0x95cc, | 145 0x9519, 0x95c0, 0x95c2, 0x95c4, 0x95c5, 0x95c6, 0x95c7, 0x95c9, 0x95cc, |
129 0x95cd, 0x95ce, 0x95cf, 0x9590, 0x9591, 0x9593, 0x9595, 0x9596, 0x9597, | 146 0x95cd, 0x95ce, 0x95cf, 0x9590, 0x9591, 0x9593, 0x9595, 0x9596, 0x9597, |
130 0x9598, 0x9599, 0x959b, 0x9610, 0x9611, 0x9612, 0x9613, 0x9614, 0x9615, | 147 0x9598, 0x9599, 0x959b, 0x9610, 0x9611, 0x9612, 0x9613, 0x9614, 0x9615, |
131 0x9616, 0x9710, 0x9711, 0x9712, 0x9713, 0x9714, 0x9715, 0x9440, 0x9441, | 148 0x9616, 0x9710, 0x9711, 0x9712, 0x9713, 0x9714, 0x9715, 0x9440, 0x9441, |
132 0x9442, 0x9443, 0x9444, 0x9446, 0x944a, 0x944b, 0x944c, 0x944e, 0x9450, | 149 0x9442, 0x9443, 0x9444, 0x9446, 0x944a, 0x944b, 0x944c, 0x944e, 0x9450, |
133 0x9452, 0x9456, 0x945a, 0x945b, 0x945e, 0x9460, 0x9462, 0x946a, 0x946b, | 150 0x9452, 0x9456, 0x945a, 0x945b, 0x945e, 0x9460, 0x9462, 0x946a, 0x946b, |
134 0x947a, 0x947b, 0x9480, 0x9487, 0x9488, 0x9489, 0x948a, 0x948f, 0x9490, | 151 0x947a, 0x947b, 0x9480, 0x9487, 0x9488, 0x9489, 0x948a, 0x948f, 0x9490, |
135 0x9491, 0x9495, 0x9498, 0x949c, 0x949e, 0x949f, 0x9540, 0x9541, 0x9542, | 152 0x9491, 0x9495, 0x9498, 0x949c, 0x949e, 0x949f, 0x9540, 0x9541, 0x9542, |
136 0x954e, 0x954f, 0x9552, 0x9553, 0x9555, 0x9557, 0x955f, 0x94a0, 0x94a1, | 153 0x954e, 0x954f, 0x9552, 0x9553, 0x9555, 0x9557, 0x955f, 0x94a0, 0x94a1, |
137 0x94a3, 0x94b1, 0x94b3, 0x94b4, 0x94b5, 0x94b9, 0x68e0, 0x68e1, 0x68e4, | 154 0x94a3, 0x94b1, 0x94b3, 0x94b4, 0x94b5, 0x94b9, 0x68e0, 0x68e1, 0x68e4, |
138 0x68e5, 0x68e8, 0x68e9, 0x68f1, 0x68f2, 0x68f8, 0x68f9, 0x68fa, 0x68fe, | 155 0x68e5, 0x68e8, 0x68e9, 0x68f1, 0x68f2, 0x68f8, 0x68f9, 0x68fa, 0x68fe, |
139 0x68c0, 0x68c1, 0x68c7, 0x68c8, 0x68c9, 0x68d8, 0x68d9, 0x68da, 0x68de, | 156 0x68c0, 0x68c1, 0x68c7, 0x68c8, 0x68c9, 0x68d8, 0x68d9, 0x68da, 0x68de, |
140 0x68a0, 0x68a1, 0x68a8, 0x68a9, 0x68b0, 0x68b8, 0x68b9, 0x68ba, 0x68be, | 157 0x68a0, 0x68a1, 0x68a8, 0x68a9, 0x68b0, 0x68b8, 0x68b9, 0x68ba, 0x68be, |
141 0x68bf, 0x6880, 0x6888, 0x6889, 0x688a, 0x688c, 0x688d, 0x6898, 0x6899, | 158 0x68bf, 0x6880, 0x6888, 0x6889, 0x688a, 0x688c, 0x688d, 0x6898, 0x6899, |
142 0x689b, 0x689e, 0x689c, 0x689d, 0x9802, 0x9803, 0x9804, 0x9805, 0x9806, | 159 0x689b, 0x689e, 0x689c, 0x689d, 0x9802, 0x9803, 0x9804, 0x9805, 0x9806, |
143 0x9807, 0x9808, 0x9809, 0x980a, 0x9640, 0x9641, 0x9647, 0x9648, 0x964a, | 160 0x9807, 0x9808, 0x9809, 0x980a, 0x9640, 0x9641, 0x9647, 0x9648, 0x964a, |
144 0x964b, 0x964c, 0x964e, 0x964f, 0x9642, 0x9643, 0x9644, 0x9645, 0x9649, | 161 0x964b, 0x964c, 0x964e, 0x964f, 0x9642, 0x9643, 0x9644, 0x9645, 0x9649, |
145 0x6720, 0x6721, 0x6722, 0x6723, 0x6724, 0x6725, 0x6726, 0x6727, 0x6728, | 162 0x6720, 0x6721, 0x6722, 0x6723, 0x6724, 0x6725, 0x6726, 0x6727, 0x6728, |
146 0x6729, 0x6738, 0x6739, 0x673e, 0x6740, 0x6741, 0x6742, 0x6743, 0x6744, | 163 0x6729, 0x6738, 0x6739, 0x673e, 0x6740, 0x6741, 0x6742, 0x6743, 0x6744, |
147 0x6745, 0x6746, 0x6747, 0x6748, 0x6749, 0x674a, 0x6750, 0x6751, 0x6758, | 164 0x6745, 0x6746, 0x6747, 0x6748, 0x6749, 0x674a, 0x6750, 0x6751, 0x6758, |
148 0x6759, 0x675b, 0x675d, 0x675f, 0x6840, 0x6841, 0x6842, 0x6843, 0x6849, | 165 0x6759, 0x675b, 0x675d, 0x675f, 0x6840, 0x6841, 0x6842, 0x6843, 0x6849, |
149 0x6850, 0x6858, 0x6859, 0x6760, 0x6761, 0x6762, 0x6763, 0x6764, 0x6765, | 166 0x6850, 0x6858, 0x6859, 0x6760, 0x6761, 0x6762, 0x6763, 0x6764, 0x6765, |
150 0x6766, 0x6767, 0x6768, 0x6770, 0x6771, 0x6772, 0x6778, 0x6779, 0x677b, | 167 0x6766, 0x6767, 0x6768, 0x6770, 0x6771, 0x6772, 0x6778, 0x6779, 0x677b, |
151 0x6700, 0x6701, 0x6702, 0x6703, 0x6704, 0x6705, 0x6706, 0x6707, 0x6708, | 168 0x6700, 0x6701, 0x6702, 0x6703, 0x6704, 0x6705, 0x6706, 0x6707, 0x6708, |
152 0x6709, 0x6718, 0x6719, 0x671c, 0x671d, 0x671f, 0x683D, 0x9900, 0x9901, | 169 0x6709, 0x6718, 0x6719, 0x671c, 0x671d, 0x671f, 0x683D, 0x9900, 0x9901, |
153 0x9903, 0x9904, 0x9905, 0x9906, 0x9907, 0x9908, 0x9909, 0x990a, 0x990b, | 170 0x9903, 0x9904, 0x9905, 0x9906, 0x9907, 0x9908, 0x9909, 0x990a, 0x990b, |
154 0x990c, 0x990d, 0x990e, 0x990f, 0x9910, 0x9913, 0x9917, 0x9918, 0x9919, | 171 0x990c, 0x990d, 0x990e, 0x990f, 0x9910, 0x9913, 0x9917, 0x9918, 0x9919, |
155 0x9990, 0x9991, 0x9992, 0x9993, 0x9994, 0x9995, 0x9996, 0x9997, 0x9998, | 172 0x9990, 0x9991, 0x9992, 0x9993, 0x9994, 0x9995, 0x9996, 0x9997, 0x9998, |
156 0x9999, 0x999a, 0x999b, 0x999c, 0x999d, 0x99a0, 0x99a2, 0x99a4, | 173 0x9999, 0x999a, 0x999b, 0x999c, 0x999d, 0x99a0, 0x99a2, 0x99a4, |
157 }; | 174 }; |
158 | 175 |
159 // Legacy Intel GPUs (Second generation) which have trouble with resolutions | 176 // Legacy Intel GPUs (Second generation) which have trouble with resolutions |
160 // higher than 1920 x 1088 | 177 // higher than 1920 x 1088 |
161 static const DWORD g_IntelLegacyGPUList[] = { | 178 static const DWORD g_IntelLegacyGPUList[] = { |
162 0x102, 0x106, 0x116, 0x126, | 179 0x102, 0x106, 0x116, 0x126, |
163 }; | 180 }; |
164 | 181 |
165 // Provides scoped access to the underlying buffer in an IMFMediaBuffer | 182 // Provides scoped access to the underlying buffer in an IMFMediaBuffer |
166 // instance. | 183 // instance. |
167 class MediaBufferScopedPointer { | 184 class MediaBufferScopedPointer { |
168 public: | 185 public: |
169 MediaBufferScopedPointer(IMFMediaBuffer* media_buffer) | 186 MediaBufferScopedPointer(IMFMediaBuffer* media_buffer) |
170 : media_buffer_(media_buffer), | 187 : media_buffer_(media_buffer), |
171 buffer_(nullptr), | 188 buffer_(nullptr), |
172 max_length_(0), | 189 max_length_(0), |
173 current_length_(0) { | 190 current_length_(0) { |
174 HRESULT hr = media_buffer_->Lock(&buffer_, &max_length_, ¤t_length_); | 191 HRESULT hr = media_buffer_->Lock(&buffer_, &max_length_, ¤t_length_); |
175 CHECK(SUCCEEDED(hr)); | 192 CHECK(SUCCEEDED(hr)); |
176 } | 193 } |
177 | 194 |
178 ~MediaBufferScopedPointer() { | 195 ~MediaBufferScopedPointer() { |
179 HRESULT hr = media_buffer_->Unlock(); | 196 HRESULT hr = media_buffer_->Unlock(); |
180 CHECK(SUCCEEDED(hr)); | 197 CHECK(SUCCEEDED(hr)); |
181 } | 198 } |
182 | 199 |
183 uint8_t* get() { | 200 uint8_t* get() { return buffer_; } |
184 return buffer_; | |
185 } | |
186 | 201 |
187 DWORD current_length() const { | 202 DWORD current_length() const { return current_length_; } |
188 return current_length_; | |
189 } | |
190 | 203 |
191 private: | 204 private: |
192 base::win::ScopedComPtr<IMFMediaBuffer> media_buffer_; | 205 base::win::ScopedComPtr<IMFMediaBuffer> media_buffer_; |
193 uint8_t* buffer_; | 206 uint8_t* buffer_; |
194 DWORD max_length_; | 207 DWORD max_length_; |
195 DWORD current_length_; | 208 DWORD current_length_; |
196 | 209 |
197 DISALLOW_COPY_AND_ASSIGN(MediaBufferScopedPointer); | 210 DISALLOW_COPY_AND_ASSIGN(MediaBufferScopedPointer); |
198 }; | 211 }; |
199 | 212 |
200 } // namespace | 213 } // namespace |
201 | 214 |
202 namespace content { | 215 namespace media { |
203 | 216 |
204 static const media::VideoCodecProfile kSupportedProfiles[] = { | 217 static const media::VideoCodecProfile kSupportedProfiles[] = { |
205 media::H264PROFILE_BASELINE, | 218 media::H264PROFILE_BASELINE, media::H264PROFILE_MAIN, |
206 media::H264PROFILE_MAIN, | 219 media::H264PROFILE_HIGH, media::VP8PROFILE_ANY, |
207 media::H264PROFILE_HIGH, | 220 media::VP9PROFILE_PROFILE0, media::VP9PROFILE_PROFILE1, |
208 media::VP8PROFILE_ANY, | 221 media::VP9PROFILE_PROFILE2, media::VP9PROFILE_PROFILE3}; |
209 media::VP9PROFILE_PROFILE0, | |
210 media::VP9PROFILE_PROFILE1, | |
211 media::VP9PROFILE_PROFILE2, | |
212 media::VP9PROFILE_PROFILE3 | |
213 }; | |
214 | 222 |
215 CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_ | 223 CreateDXGIDeviceManager |
216 = NULL; | 224 DXVAVideoDecodeAccelerator::create_dxgi_device_manager_ = NULL; |
217 | 225 |
218 #define RETURN_ON_FAILURE(result, log, ret) \ | 226 #define RETURN_ON_FAILURE(result, log, ret) \ |
219 do { \ | 227 do { \ |
220 if (!(result)) { \ | 228 if (!(result)) { \ |
221 DLOG(ERROR) << log; \ | 229 DLOG(ERROR) << log; \ |
222 return ret; \ | 230 return ret; \ |
223 } \ | 231 } \ |
224 } while (0) | 232 } while (0) |
225 | 233 |
226 #define RETURN_ON_HR_FAILURE(result, log, ret) \ | 234 #define RETURN_ON_HR_FAILURE(result, log, ret) \ |
227 RETURN_ON_FAILURE(SUCCEEDED(result), \ | 235 RETURN_ON_FAILURE(SUCCEEDED(result), \ |
228 log << ", HRESULT: 0x" << std::hex << result, \ | 236 log << ", HRESULT: 0x" << std::hex << result, ret); |
229 ret); | |
230 | 237 |
231 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \ | 238 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \ |
232 do { \ | 239 do { \ |
233 if (!(result)) { \ | 240 if (!(result)) { \ |
234 DVLOG(1) << log; \ | 241 DVLOG(1) << log; \ |
235 StopOnError(error_code); \ | 242 StopOnError(error_code); \ |
236 return ret; \ | 243 return ret; \ |
237 } \ | 244 } \ |
238 } while (0) | 245 } while (0) |
239 | 246 |
240 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \ | 247 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \ |
241 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \ | 248 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \ |
242 log << ", HRESULT: 0x" << std::hex << result, \ | 249 log << ", HRESULT: 0x" << std::hex << result, \ |
243 error_code, ret); | 250 error_code, ret); |
244 | 251 |
245 enum { | 252 enum { |
246 // Maximum number of iterations we allow before aborting the attempt to flush | 253 // Maximum number of iterations we allow before aborting the attempt to flush |
247 // the batched queries to the driver and allow torn/corrupt frames to be | 254 // the batched queries to the driver and allow torn/corrupt frames to be |
248 // rendered. | 255 // rendered. |
249 kFlushDecoderSurfaceTimeoutMs = 1, | 256 kFlushDecoderSurfaceTimeoutMs = 1, |
250 // Maximum iterations where we try to flush the d3d device. | 257 // Maximum iterations where we try to flush the d3d device. |
251 kMaxIterationsForD3DFlush = 4, | 258 kMaxIterationsForD3DFlush = 4, |
(...skipping 25 matching lines...) Expand all Loading... |
277 base::win::ScopedComPtr<IMFSample> sample; | 284 base::win::ScopedComPtr<IMFSample> sample; |
278 sample.Attach(CreateEmptySample()); | 285 sample.Attach(CreateEmptySample()); |
279 | 286 |
280 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | 287 base::win::ScopedComPtr<IMFMediaBuffer> buffer; |
281 HRESULT hr = E_FAIL; | 288 HRESULT hr = E_FAIL; |
282 if (align == 0) { | 289 if (align == 0) { |
283 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer | 290 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer |
284 // with the align argument being 0. | 291 // with the align argument being 0. |
285 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive()); | 292 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive()); |
286 } else { | 293 } else { |
287 hr = MFCreateAlignedMemoryBuffer(buffer_length, | 294 hr = |
288 align - 1, | 295 MFCreateAlignedMemoryBuffer(buffer_length, align - 1, buffer.Receive()); |
289 buffer.Receive()); | |
290 } | 296 } |
291 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); | 297 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); |
292 | 298 |
293 hr = sample->AddBuffer(buffer.get()); | 299 hr = sample->AddBuffer(buffer.get()); |
294 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); | 300 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); |
295 | 301 |
296 buffer->SetCurrentLength(0); | 302 buffer->SetCurrentLength(0); |
297 return sample.Detach(); | 303 return sample.Detach(); |
298 } | 304 } |
299 | 305 |
300 // Creates a Media Foundation sample with one buffer containing a copy of the | 306 // Creates a Media Foundation sample with one buffer containing a copy of the |
301 // given Annex B stream data. | 307 // given Annex B stream data. |
302 // If duration and sample time are not known, provide 0. | 308 // If duration and sample time are not known, provide 0. |
303 // |min_size| specifies the minimum size of the buffer (might be required by | 309 // |min_size| specifies the minimum size of the buffer (might be required by |
304 // the decoder for input). If no alignment is required, provide 0. | 310 // the decoder for input). If no alignment is required, provide 0. |
305 static IMFSample* CreateInputSample(const uint8_t* stream, | 311 static IMFSample* CreateInputSample(const uint8_t* stream, |
306 uint32_t size, | 312 uint32_t size, |
307 uint32_t min_size, | 313 uint32_t min_size, |
308 int alignment) { | 314 int alignment) { |
309 CHECK(stream); | 315 CHECK(stream); |
310 CHECK_GT(size, 0U); | 316 CHECK_GT(size, 0U); |
311 base::win::ScopedComPtr<IMFSample> sample; | 317 base::win::ScopedComPtr<IMFSample> sample; |
312 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size), | 318 sample.Attach( |
313 alignment)); | 319 CreateEmptySampleWithBuffer(std::max(min_size, size), alignment)); |
314 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", NULL); | 320 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", NULL); |
315 | 321 |
316 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | 322 base::win::ScopedComPtr<IMFMediaBuffer> buffer; |
317 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); | 323 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); |
318 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL); | 324 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL); |
319 | 325 |
320 DWORD max_length = 0; | 326 DWORD max_length = 0; |
321 DWORD current_length = 0; | 327 DWORD current_length = 0; |
322 uint8_t* destination = NULL; | 328 uint8_t* destination = NULL; |
323 hr = buffer->Lock(&destination, &max_length, ¤t_length); | 329 hr = buffer->Lock(&destination, &max_length, ¤t_length); |
324 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL); | 330 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL); |
325 | 331 |
326 CHECK_EQ(current_length, 0u); | 332 CHECK_EQ(current_length, 0u); |
327 CHECK_GE(max_length, size); | 333 CHECK_GE(max_length, size); |
328 memcpy(destination, stream, size); | 334 memcpy(destination, stream, size); |
329 | 335 |
330 hr = buffer->SetCurrentLength(size); | 336 hr = buffer->SetCurrentLength(size); |
331 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL); | 337 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL); |
332 | 338 |
333 hr = buffer->Unlock(); | 339 hr = buffer->Unlock(); |
334 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL); | 340 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL); |
335 | 341 |
336 return sample.Detach(); | 342 return sample.Detach(); |
337 } | 343 } |
338 | 344 |
339 // Helper function to create a COM object instance from a DLL. The alternative | 345 // Helper function to create a COM object instance from a DLL. The alternative |
340 // is to use the CoCreateInstance API which requires the COM apartment to be | 346 // is to use the CoCreateInstance API which requires the COM apartment to be |
341 // initialized which is not the case on the GPU main thread. We want to avoid | 347 // initialized which is not the case on the GPU main thread. We want to avoid |
342 // initializing COM as it may have sideeffects. | 348 // initializing COM as it may have sideeffects. |
343 HRESULT CreateCOMObjectFromDll(HMODULE dll, const CLSID& clsid, const IID& iid, | 349 HRESULT CreateCOMObjectFromDll(HMODULE dll, |
| 350 const CLSID& clsid, |
| 351 const IID& iid, |
344 void** object) { | 352 void** object) { |
345 if (!dll || !object) | 353 if (!dll || !object) |
346 return E_INVALIDARG; | 354 return E_INVALIDARG; |
347 | 355 |
348 using GetClassObject = HRESULT (WINAPI*)( | 356 using GetClassObject = |
349 const CLSID& clsid, const IID& iid, void** object); | 357 HRESULT(WINAPI*)(const CLSID& clsid, const IID& iid, void** object); |
350 | 358 |
351 GetClassObject get_class_object = reinterpret_cast<GetClassObject>( | 359 GetClassObject get_class_object = reinterpret_cast<GetClassObject>( |
352 GetProcAddress(dll, "DllGetClassObject")); | 360 GetProcAddress(dll, "DllGetClassObject")); |
353 RETURN_ON_FAILURE( | 361 RETURN_ON_FAILURE(get_class_object, "Failed to get DllGetClassObject pointer", |
354 get_class_object, "Failed to get DllGetClassObject pointer", E_FAIL); | 362 E_FAIL); |
355 | 363 |
356 base::win::ScopedComPtr<IClassFactory> factory; | 364 base::win::ScopedComPtr<IClassFactory> factory; |
357 HRESULT hr = get_class_object( | 365 HRESULT hr = |
358 clsid, | 366 get_class_object(clsid, __uuidof(IClassFactory), factory.ReceiveVoid()); |
359 __uuidof(IClassFactory), | |
360 factory.ReceiveVoid()); | |
361 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject failed", hr); | 367 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject failed", hr); |
362 | 368 |
363 hr = factory->CreateInstance(NULL, iid, object); | 369 hr = factory->CreateInstance(NULL, iid, object); |
364 return hr; | 370 return hr; |
365 } | 371 } |
366 | 372 |
367 // Helper function to query the ANGLE device object. The template argument T | 373 // Helper function to query the ANGLE device object. The template argument T |
368 // identifies the device interface being queried. IDirect3DDevice9Ex for d3d9 | 374 // identifies the device interface being queried. IDirect3DDevice9Ex for d3d9 |
369 // and ID3D11Device for dx11. | 375 // and ID3D11Device for dx11. |
370 template<class T> | 376 template <class T> |
371 base::win::ScopedComPtr<T> QueryDeviceObjectFromANGLE(int object_type) { | 377 base::win::ScopedComPtr<T> QueryDeviceObjectFromANGLE(int object_type) { |
372 base::win::ScopedComPtr<T> device_object; | 378 base::win::ScopedComPtr<T> device_object; |
373 | 379 |
374 EGLDisplay egl_display = nullptr; | 380 EGLDisplay egl_display = nullptr; |
375 intptr_t egl_device = 0; | 381 intptr_t egl_device = 0; |
376 intptr_t device = 0; | 382 intptr_t device = 0; |
377 | 383 |
378 { | 384 { |
379 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. GetHardwareDisplay"); | 385 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. GetHardwareDisplay"); |
380 egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 386 egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
381 } | 387 } |
382 | 388 |
383 RETURN_ON_FAILURE( | 389 RETURN_ON_FAILURE(gfx::GLSurfaceEGL::HasEGLExtension("EGL_EXT_device_query"), |
384 gfx::GLSurfaceEGL::HasEGLExtension("EGL_EXT_device_query"), | 390 "EGL_EXT_device_query missing", device_object); |
385 "EGL_EXT_device_query missing", | |
386 device_object); | |
387 | 391 |
388 PFNEGLQUERYDISPLAYATTRIBEXTPROC QueryDisplayAttribEXT = nullptr; | 392 PFNEGLQUERYDISPLAYATTRIBEXTPROC QueryDisplayAttribEXT = nullptr; |
389 | 393 |
390 { | 394 { |
391 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. eglGetProcAddress"); | 395 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. eglGetProcAddress"); |
392 | 396 |
393 QueryDisplayAttribEXT = | 397 QueryDisplayAttribEXT = reinterpret_cast<PFNEGLQUERYDISPLAYATTRIBEXTPROC>( |
394 reinterpret_cast<PFNEGLQUERYDISPLAYATTRIBEXTPROC>(eglGetProcAddress( | 398 eglGetProcAddress("eglQueryDisplayAttribEXT")); |
395 "eglQueryDisplayAttribEXT")); | |
396 | 399 |
397 RETURN_ON_FAILURE( | 400 RETURN_ON_FAILURE( |
398 QueryDisplayAttribEXT, | 401 QueryDisplayAttribEXT, |
399 "Failed to get the eglQueryDisplayAttribEXT function from ANGLE", | 402 "Failed to get the eglQueryDisplayAttribEXT function from ANGLE", |
400 device_object); | 403 device_object); |
401 } | 404 } |
402 | 405 |
403 PFNEGLQUERYDEVICEATTRIBEXTPROC QueryDeviceAttribEXT = nullptr; | 406 PFNEGLQUERYDEVICEATTRIBEXTPROC QueryDeviceAttribEXT = nullptr; |
404 | 407 |
405 { | 408 { |
406 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. eglGetProcAddress"); | 409 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. eglGetProcAddress"); |
407 | 410 |
408 QueryDeviceAttribEXT = | 411 QueryDeviceAttribEXT = reinterpret_cast<PFNEGLQUERYDEVICEATTRIBEXTPROC>( |
409 reinterpret_cast<PFNEGLQUERYDEVICEATTRIBEXTPROC>(eglGetProcAddress( | 412 eglGetProcAddress("eglQueryDeviceAttribEXT")); |
410 "eglQueryDeviceAttribEXT")); | |
411 | 413 |
412 RETURN_ON_FAILURE( | 414 RETURN_ON_FAILURE( |
413 QueryDeviceAttribEXT, | 415 QueryDeviceAttribEXT, |
414 "Failed to get the eglQueryDeviceAttribEXT function from ANGLE", | 416 "Failed to get the eglQueryDeviceAttribEXT function from ANGLE", |
415 device_object); | 417 device_object); |
416 } | 418 } |
417 | 419 |
418 { | 420 { |
419 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. QueryDisplayAttribEXT"); | 421 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. QueryDisplayAttribEXT"); |
420 | 422 |
421 RETURN_ON_FAILURE( | 423 RETURN_ON_FAILURE( |
422 QueryDisplayAttribEXT(egl_display, EGL_DEVICE_EXT, &egl_device), | 424 QueryDisplayAttribEXT(egl_display, EGL_DEVICE_EXT, &egl_device), |
423 "The eglQueryDisplayAttribEXT function failed to get the EGL device", | 425 "The eglQueryDisplayAttribEXT function failed to get the EGL device", |
424 device_object); | 426 device_object); |
425 } | 427 } |
426 | 428 |
427 RETURN_ON_FAILURE( | 429 RETURN_ON_FAILURE(egl_device, "Failed to get the EGL device", device_object); |
428 egl_device, | |
429 "Failed to get the EGL device", | |
430 device_object); | |
431 | 430 |
432 { | 431 { |
433 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. QueryDisplayAttribEXT"); | 432 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. QueryDisplayAttribEXT"); |
434 | 433 |
435 RETURN_ON_FAILURE( | 434 RETURN_ON_FAILURE( |
436 QueryDeviceAttribEXT( | 435 QueryDeviceAttribEXT(reinterpret_cast<EGLDeviceEXT>(egl_device), |
437 reinterpret_cast<EGLDeviceEXT>(egl_device), object_type, &device), | 436 object_type, &device), |
438 "The eglQueryDeviceAttribEXT function failed to get the device", | 437 "The eglQueryDeviceAttribEXT function failed to get the device", |
439 device_object); | 438 device_object); |
440 | 439 |
441 RETURN_ON_FAILURE(device, "Failed to get the ANGLE device", device_object); | 440 RETURN_ON_FAILURE(device, "Failed to get the ANGLE device", device_object); |
442 } | 441 } |
443 | 442 |
444 device_object = reinterpret_cast<T*>(device); | 443 device_object = reinterpret_cast<T*>(device); |
445 return device_object; | 444 return device_object; |
446 } | 445 } |
447 | 446 |
448 H264ConfigChangeDetector::H264ConfigChangeDetector() | 447 H264ConfigChangeDetector::H264ConfigChangeDetector() |
449 : last_sps_id_(0), | 448 : last_sps_id_(0), |
450 last_pps_id_(0), | 449 last_pps_id_(0), |
451 config_changed_(false), | 450 config_changed_(false), |
452 pending_config_changed_(false) { | 451 pending_config_changed_(false) {} |
453 } | |
454 | 452 |
455 H264ConfigChangeDetector::~H264ConfigChangeDetector() { | 453 H264ConfigChangeDetector::~H264ConfigChangeDetector() {} |
456 } | |
457 | 454 |
458 bool H264ConfigChangeDetector::DetectConfig(const uint8_t* stream, | 455 bool H264ConfigChangeDetector::DetectConfig(const uint8_t* stream, |
459 unsigned int size) { | 456 unsigned int size) { |
460 std::vector<uint8_t> sps; | 457 std::vector<uint8_t> sps; |
461 std::vector<uint8_t> pps; | 458 std::vector<uint8_t> pps; |
462 media::H264NALU nalu; | 459 media::H264NALU nalu; |
463 bool idr_seen = false; | 460 bool idr_seen = false; |
464 | 461 |
465 if (!parser_.get()) | 462 if (!parser_.get()) |
466 parser_.reset(new media::H264Parser); | 463 parser_.reset(new media::H264Parser); |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
565 ~DXVAPictureBuffer(); | 562 ~DXVAPictureBuffer(); |
566 | 563 |
567 bool InitializeTexture(const DXVAVideoDecodeAccelerator& decoder, | 564 bool InitializeTexture(const DXVAVideoDecodeAccelerator& decoder, |
568 bool use_rgb); | 565 bool use_rgb); |
569 | 566 |
570 bool ReusePictureBuffer(); | 567 bool ReusePictureBuffer(); |
571 void ResetReuseFence(); | 568 void ResetReuseFence(); |
572 // Copies the output sample data to the picture buffer provided by the | 569 // Copies the output sample data to the picture buffer provided by the |
573 // client. | 570 // client. |
574 // The dest_surface parameter contains the decoded bits. | 571 // The dest_surface parameter contains the decoded bits. |
575 bool CopyOutputSampleDataToPictureBuffer( | 572 bool CopyOutputSampleDataToPictureBuffer(DXVAVideoDecodeAccelerator* decoder, |
576 DXVAVideoDecodeAccelerator* decoder, | 573 IDirect3DSurface9* dest_surface, |
577 IDirect3DSurface9* dest_surface, | 574 ID3D11Texture2D* dx11_texture, |
578 ID3D11Texture2D* dx11_texture, | 575 int input_buffer_id); |
579 int input_buffer_id); | |
580 | 576 |
581 bool available() const { | 577 bool available() const { return available_; } |
582 return available_; | |
583 } | |
584 | 578 |
585 void set_available(bool available) { | 579 void set_available(bool available) { available_ = available; } |
586 available_ = available; | |
587 } | |
588 | 580 |
589 int id() const { | 581 int id() const { return picture_buffer_.id(); } |
590 return picture_buffer_.id(); | |
591 } | |
592 | 582 |
593 gfx::Size size() const { | 583 gfx::Size size() const { return picture_buffer_.size(); } |
594 return picture_buffer_.size(); | |
595 } | |
596 | 584 |
597 bool waiting_to_reuse() const { return waiting_to_reuse_; } | 585 bool waiting_to_reuse() const { return waiting_to_reuse_; } |
598 | 586 |
599 gfx::GLFence* reuse_fence() { return reuse_fence_.get(); } | 587 gfx::GLFence* reuse_fence() { return reuse_fence_.get(); } |
600 | 588 |
601 // Called when the source surface |src_surface| is copied to the destination | 589 // Called when the source surface |src_surface| is copied to the destination |
602 // |dest_surface| | 590 // |dest_surface| |
603 bool CopySurfaceComplete(IDirect3DSurface9* src_surface, | 591 bool CopySurfaceComplete(IDirect3DSurface9* src_surface, |
604 IDirect3DSurface9* dest_surface); | 592 IDirect3DSurface9* dest_surface); |
605 | 593 |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
654 | 642 |
655 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 643 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
656 | 644 |
657 EGLint use_rgb = 1; | 645 EGLint use_rgb = 1; |
658 eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB, | 646 eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB, |
659 &use_rgb); | 647 &use_rgb); |
660 | 648 |
661 if (!picture_buffer->InitializeTexture(decoder, !!use_rgb)) | 649 if (!picture_buffer->InitializeTexture(decoder, !!use_rgb)) |
662 return linked_ptr<DXVAPictureBuffer>(nullptr); | 650 return linked_ptr<DXVAPictureBuffer>(nullptr); |
663 | 651 |
664 EGLint attrib_list[] = { | 652 EGLint attrib_list[] = {EGL_WIDTH, |
665 EGL_WIDTH, buffer.size().width(), | 653 buffer.size().width(), |
666 EGL_HEIGHT, buffer.size().height(), | 654 EGL_HEIGHT, |
667 EGL_TEXTURE_FORMAT, use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA, | 655 buffer.size().height(), |
668 EGL_TEXTURE_TARGET, EGL_TEXTURE_2D, | 656 EGL_TEXTURE_FORMAT, |
669 EGL_NONE | 657 use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA, |
670 }; | 658 EGL_TEXTURE_TARGET, |
| 659 EGL_TEXTURE_2D, |
| 660 EGL_NONE}; |
671 | 661 |
672 picture_buffer->decoding_surface_ = eglCreatePbufferFromClientBuffer( | 662 picture_buffer->decoding_surface_ = eglCreatePbufferFromClientBuffer( |
673 egl_display, EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, | 663 egl_display, EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, |
674 picture_buffer->texture_share_handle_, egl_config, attrib_list); | 664 picture_buffer->texture_share_handle_, egl_config, attrib_list); |
675 RETURN_ON_FAILURE(picture_buffer->decoding_surface_, | 665 RETURN_ON_FAILURE(picture_buffer->decoding_surface_, |
676 "Failed to create surface", | 666 "Failed to create surface", |
677 linked_ptr<DXVAPictureBuffer>(NULL)); | 667 linked_ptr<DXVAPictureBuffer>(NULL)); |
678 if (decoder.d3d11_device_ && decoder.use_keyed_mutex_) { | 668 if (decoder.d3d11_device_ && decoder.use_keyed_mutex_) { |
679 void* keyed_mutex = nullptr; | 669 void* keyed_mutex = nullptr; |
680 EGLBoolean ret = eglQuerySurfacePointerANGLE( | 670 EGLBoolean ret = eglQuerySurfacePointerANGLE( |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
745 picture_buffer_(buffer), | 735 picture_buffer_(buffer), |
746 decoding_surface_(NULL), | 736 decoding_surface_(NULL), |
747 texture_share_handle_(nullptr), | 737 texture_share_handle_(nullptr), |
748 keyed_mutex_value_(0), | 738 keyed_mutex_value_(0), |
749 use_rgb_(true) {} | 739 use_rgb_(true) {} |
750 | 740 |
751 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() { | 741 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() { |
752 if (decoding_surface_) { | 742 if (decoding_surface_) { |
753 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 743 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
754 | 744 |
755 eglReleaseTexImage( | 745 eglReleaseTexImage(egl_display, decoding_surface_, EGL_BACK_BUFFER); |
756 egl_display, | |
757 decoding_surface_, | |
758 EGL_BACK_BUFFER); | |
759 | 746 |
760 eglDestroySurface( | 747 eglDestroySurface(egl_display, decoding_surface_); |
761 egl_display, | |
762 decoding_surface_); | |
763 decoding_surface_ = NULL; | 748 decoding_surface_ = NULL; |
764 } | 749 } |
765 } | 750 } |
766 | 751 |
767 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { | 752 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { |
768 DCHECK(decoding_surface_); | 753 DCHECK(decoding_surface_); |
769 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 754 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
770 eglReleaseTexImage( | 755 eglReleaseTexImage(egl_display, decoding_surface_, EGL_BACK_BUFFER); |
771 egl_display, | |
772 decoding_surface_, | |
773 EGL_BACK_BUFFER); | |
774 decoder_surface_.Release(); | 756 decoder_surface_.Release(); |
775 target_surface_.Release(); | 757 target_surface_.Release(); |
776 decoder_dx11_texture_.Release(); | 758 decoder_dx11_texture_.Release(); |
777 waiting_to_reuse_ = false; | 759 waiting_to_reuse_ = false; |
778 set_available(true); | 760 set_available(true); |
779 if (egl_keyed_mutex_) { | 761 if (egl_keyed_mutex_) { |
780 HRESULT hr = egl_keyed_mutex_->ReleaseSync(++keyed_mutex_value_); | 762 HRESULT hr = egl_keyed_mutex_->ReleaseSync(++keyed_mutex_value_); |
781 RETURN_ON_FAILURE(hr == S_OK, "Could not release sync mutex", false); | 763 RETURN_ON_FAILURE(hr == S_OK, "Could not release sync mutex", false); |
782 } | 764 } |
783 return true; | 765 return true; |
784 } | 766 } |
785 | 767 |
786 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ResetReuseFence() { | 768 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ResetReuseFence() { |
787 if (!reuse_fence_ || !reuse_fence_->ResetSupported()) | 769 if (!reuse_fence_ || !reuse_fence_->ResetSupported()) |
788 reuse_fence_.reset(gfx::GLFence::Create()); | 770 reuse_fence_.reset(gfx::GLFence::Create()); |
789 else | 771 else |
790 reuse_fence_->ResetState(); | 772 reuse_fence_->ResetState(); |
791 waiting_to_reuse_ = true; | 773 waiting_to_reuse_ = true; |
792 } | 774 } |
793 | 775 |
794 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: | 776 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: |
795 CopyOutputSampleDataToPictureBuffer( | 777 CopyOutputSampleDataToPictureBuffer(DXVAVideoDecodeAccelerator* decoder, |
796 DXVAVideoDecodeAccelerator* decoder, | 778 IDirect3DSurface9* dest_surface, |
797 IDirect3DSurface9* dest_surface, | 779 ID3D11Texture2D* dx11_texture, |
798 ID3D11Texture2D* dx11_texture, | 780 int input_buffer_id) { |
799 int input_buffer_id) { | |
800 DCHECK(dest_surface || dx11_texture); | 781 DCHECK(dest_surface || dx11_texture); |
801 if (dx11_texture) { | 782 if (dx11_texture) { |
802 // Grab a reference on the decoder texture. This reference will be released | 783 // Grab a reference on the decoder texture. This reference will be released |
803 // when we receive a notification that the copy was completed or when the | 784 // when we receive a notification that the copy was completed or when the |
804 // DXVAPictureBuffer instance is destroyed. | 785 // DXVAPictureBuffer instance is destroyed. |
805 decoder_dx11_texture_ = dx11_texture; | 786 decoder_dx11_texture_ = dx11_texture; |
806 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), | 787 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), |
807 dx11_keyed_mutex_, keyed_mutex_value_, NULL, id(), | 788 dx11_keyed_mutex_, keyed_mutex_value_, NULL, id(), |
808 input_buffer_id); | 789 input_buffer_id); |
809 return true; | 790 return true; |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
867 decoder_dx11_texture_.Release(); | 848 decoder_dx11_texture_.Release(); |
868 } | 849 } |
869 if (egl_keyed_mutex_) { | 850 if (egl_keyed_mutex_) { |
870 keyed_mutex_value_++; | 851 keyed_mutex_value_++; |
871 HRESULT result = | 852 HRESULT result = |
872 egl_keyed_mutex_->AcquireSync(keyed_mutex_value_, kAcquireSyncWaitMs); | 853 egl_keyed_mutex_->AcquireSync(keyed_mutex_value_, kAcquireSyncWaitMs); |
873 RETURN_ON_FAILURE(result == S_OK, "Could not acquire sync mutex", false); | 854 RETURN_ON_FAILURE(result == S_OK, "Could not acquire sync mutex", false); |
874 } | 855 } |
875 | 856 |
876 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 857 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
877 eglBindTexImage( | 858 eglBindTexImage(egl_display, decoding_surface_, EGL_BACK_BUFFER); |
878 egl_display, | |
879 decoding_surface_, | |
880 EGL_BACK_BUFFER); | |
881 | 859 |
882 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 860 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
883 glBindTexture(GL_TEXTURE_2D, current_texture); | 861 glBindTexture(GL_TEXTURE_2D, current_texture); |
884 return true; | 862 return true; |
885 } | 863 } |
886 | 864 |
887 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( | 865 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( |
888 int32_t buffer_id, | 866 int32_t buffer_id, |
889 IMFSample* sample) | 867 IMFSample* sample) |
890 : input_buffer_id(buffer_id), picture_buffer_id(-1) { | 868 : input_buffer_id(buffer_id), picture_buffer_id(-1) { |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
946 | 924 |
947 bool profile_supported = false; | 925 bool profile_supported = false; |
948 for (const auto& supported_profile : kSupportedProfiles) { | 926 for (const auto& supported_profile : kSupportedProfiles) { |
949 if (config.profile == supported_profile) { | 927 if (config.profile == supported_profile) { |
950 profile_supported = true; | 928 profile_supported = true; |
951 break; | 929 break; |
952 } | 930 } |
953 } | 931 } |
954 if (!profile_supported) { | 932 if (!profile_supported) { |
955 RETURN_AND_NOTIFY_ON_FAILURE(false, | 933 RETURN_AND_NOTIFY_ON_FAILURE(false, |
956 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false); | 934 "Unsupported h.264, vp8, or vp9 profile", |
| 935 PLATFORM_FAILURE, false); |
957 } | 936 } |
958 | 937 |
959 // Not all versions of Windows 7 and later include Media Foundation DLLs. | 938 // Not all versions of Windows 7 and later include Media Foundation DLLs. |
960 // Instead of crashing while delay loading the DLL when calling MFStartup() | 939 // Instead of crashing while delay loading the DLL when calling MFStartup() |
961 // below, probe whether we can successfully load the DLL now. | 940 // below, probe whether we can successfully load the DLL now. |
962 // See http://crbug.com/339678 for details. | 941 // See http://crbug.com/339678 for details. |
963 HMODULE dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll"); | 942 HMODULE dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll"); |
964 RETURN_ON_FAILURE(dxgi_manager_dll, "MFPlat.dll is required for decoding", | 943 RETURN_ON_FAILURE(dxgi_manager_dll, "MFPlat.dll is required for decoding", |
965 false); | 944 false); |
966 | 945 |
967 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API. | 946 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API. |
968 // On Windows 7 mshtmlmedia.dll provides it. | 947 // On Windows 7 mshtmlmedia.dll provides it. |
969 | 948 |
970 // TODO(ananta) | 949 // TODO(ananta) |
971 // The code below works, as in we can create the DX11 device manager for | 950 // The code below works, as in we can create the DX11 device manager for |
972 // Windows 7. However the IMFTransform we use for texture conversion and | 951 // Windows 7. However the IMFTransform we use for texture conversion and |
973 // copy does not exist on Windows 7. Look into an alternate approach | 952 // copy does not exist on Windows 7. Look into an alternate approach |
974 // and enable the code below. | 953 // and enable the code below. |
975 #if defined(ENABLE_DX11_FOR_WIN7) | 954 #if defined(ENABLE_DX11_FOR_WIN7) |
976 if (base::win::GetVersion() == base::win::VERSION_WIN7) { | 955 if (base::win::GetVersion() == base::win::VERSION_WIN7) { |
977 dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll"); | 956 dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll"); |
978 RETURN_ON_FAILURE(dxgi_manager_dll, | 957 RETURN_ON_FAILURE(dxgi_manager_dll, |
979 "mshtmlmedia.dll is required for decoding", false); | 958 "mshtmlmedia.dll is required for decoding", false); |
980 } | 959 } |
981 #endif | 960 #endif |
982 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 | 961 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 |
983 // decoding. | 962 // decoding. |
984 if (dxgi_manager_dll && !create_dxgi_device_manager_) { | 963 if (dxgi_manager_dll && !create_dxgi_device_manager_) { |
985 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( | 964 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( |
986 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); | 965 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); |
987 } | 966 } |
988 | 967 |
989 RETURN_AND_NOTIFY_ON_FAILURE( | 968 RETURN_AND_NOTIFY_ON_FAILURE( |
990 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, | 969 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, |
991 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", | 970 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", |
992 PLATFORM_FAILURE, | 971 PLATFORM_FAILURE, false); |
993 false); | |
994 | 972 |
995 RETURN_AND_NOTIFY_ON_FAILURE(gfx::GLFence::IsSupported(), | 973 RETURN_AND_NOTIFY_ON_FAILURE(gfx::GLFence::IsSupported(), |
996 "GL fences are unsupported", PLATFORM_FAILURE, | 974 "GL fences are unsupported", PLATFORM_FAILURE, |
997 false); | 975 false); |
998 | 976 |
999 State state = GetState(); | 977 State state = GetState(); |
1000 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), | 978 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), |
1001 "Initialize: invalid state: " << state, ILLEGAL_STATE, false); | 979 "Initialize: invalid state: " << state, |
| 980 ILLEGAL_STATE, false); |
1002 | 981 |
1003 media::InitializeMediaFoundation(); | 982 media::InitializeMediaFoundation(); |
1004 | 983 |
1005 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(config.profile), | 984 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(config.profile), |
1006 "Failed to initialize decoder", PLATFORM_FAILURE, false); | 985 "Failed to initialize decoder", PLATFORM_FAILURE, |
| 986 false); |
1007 | 987 |
1008 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), | 988 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), |
1009 "Failed to get input/output stream info.", PLATFORM_FAILURE, false); | 989 "Failed to get input/output stream info.", |
| 990 PLATFORM_FAILURE, false); |
1010 | 991 |
1011 RETURN_AND_NOTIFY_ON_FAILURE( | 992 RETURN_AND_NOTIFY_ON_FAILURE( |
1012 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), | 993 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), |
1013 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", | 994 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", |
1014 PLATFORM_FAILURE, false); | 995 PLATFORM_FAILURE, false); |
1015 | 996 |
1016 RETURN_AND_NOTIFY_ON_FAILURE( | 997 RETURN_AND_NOTIFY_ON_FAILURE( |
1017 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), | 998 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), |
1018 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", | 999 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", |
1019 PLATFORM_FAILURE, false); | 1000 PLATFORM_FAILURE, false); |
(...skipping 23 matching lines...) Expand all Loading... |
1043 RETURN_ON_HR_FAILURE(hr, | 1024 RETURN_ON_HR_FAILURE(hr, |
1044 "D3D9 driver does not support H/W format conversion", false); | 1025 "D3D9 driver does not support H/W format conversion", false); |
1045 | 1026 |
1046 base::win::ScopedComPtr<IDirect3DDevice9> angle_device = | 1027 base::win::ScopedComPtr<IDirect3DDevice9> angle_device = |
1047 QueryDeviceObjectFromANGLE<IDirect3DDevice9>(EGL_D3D9_DEVICE_ANGLE); | 1028 QueryDeviceObjectFromANGLE<IDirect3DDevice9>(EGL_D3D9_DEVICE_ANGLE); |
1048 if (angle_device.get()) | 1029 if (angle_device.get()) |
1049 using_angle_device_ = true; | 1030 using_angle_device_ = true; |
1050 | 1031 |
1051 if (using_angle_device_) { | 1032 if (using_angle_device_) { |
1052 hr = d3d9_device_ex_.QueryFrom(angle_device.get()); | 1033 hr = d3d9_device_ex_.QueryFrom(angle_device.get()); |
1053 RETURN_ON_HR_FAILURE(hr, | 1034 RETURN_ON_HR_FAILURE( |
1054 "QueryInterface for IDirect3DDevice9Ex from angle device failed", | 1035 hr, "QueryInterface for IDirect3DDevice9Ex from angle device failed", |
1055 false); | 1036 false); |
1056 } else { | 1037 } else { |
1057 D3DPRESENT_PARAMETERS present_params = {0}; | 1038 D3DPRESENT_PARAMETERS present_params = {0}; |
1058 present_params.BackBufferWidth = 1; | 1039 present_params.BackBufferWidth = 1; |
1059 present_params.BackBufferHeight = 1; | 1040 present_params.BackBufferHeight = 1; |
1060 present_params.BackBufferFormat = D3DFMT_UNKNOWN; | 1041 present_params.BackBufferFormat = D3DFMT_UNKNOWN; |
1061 present_params.BackBufferCount = 1; | 1042 present_params.BackBufferCount = 1; |
1062 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; | 1043 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; |
1063 present_params.hDeviceWindow = NULL; | 1044 present_params.hDeviceWindow = NULL; |
1064 present_params.Windowed = TRUE; | 1045 present_params.Windowed = TRUE; |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1097 | 1078 |
1098 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { | 1079 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { |
1099 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, | 1080 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, |
1100 d3d11_device_manager_.Receive()); | 1081 d3d11_device_manager_.Receive()); |
1101 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); | 1082 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); |
1102 | 1083 |
1103 // This array defines the set of DirectX hardware feature levels we support. | 1084 // This array defines the set of DirectX hardware feature levels we support. |
1104 // The ordering MUST be preserved. All applications are assumed to support | 1085 // The ordering MUST be preserved. All applications are assumed to support |
1105 // 9.1 unless otherwise stated by the application. | 1086 // 9.1 unless otherwise stated by the application. |
1106 D3D_FEATURE_LEVEL feature_levels[] = { | 1087 D3D_FEATURE_LEVEL feature_levels[] = { |
1107 D3D_FEATURE_LEVEL_11_1, | 1088 D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, |
1108 D3D_FEATURE_LEVEL_11_0, | 1089 D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_3, D3D_FEATURE_LEVEL_9_2, |
1109 D3D_FEATURE_LEVEL_10_1, | 1090 D3D_FEATURE_LEVEL_9_1}; |
1110 D3D_FEATURE_LEVEL_10_0, | |
1111 D3D_FEATURE_LEVEL_9_3, | |
1112 D3D_FEATURE_LEVEL_9_2, | |
1113 D3D_FEATURE_LEVEL_9_1 | |
1114 }; | |
1115 | 1091 |
1116 UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT; | 1092 UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT; |
1117 | 1093 |
1118 #if defined _DEBUG | 1094 #if defined _DEBUG |
1119 flags |= D3D11_CREATE_DEVICE_DEBUG; | 1095 flags |= D3D11_CREATE_DEVICE_DEBUG; |
1120 #endif | 1096 #endif |
1121 | 1097 |
1122 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0; | 1098 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0; |
1123 hr = D3D11CreateDevice(NULL, | 1099 hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, flags, |
1124 D3D_DRIVER_TYPE_HARDWARE, | 1100 feature_levels, arraysize(feature_levels), |
1125 NULL, | 1101 D3D11_SDK_VERSION, d3d11_device_.Receive(), |
1126 flags, | 1102 &feature_level_out, d3d11_device_context_.Receive()); |
1127 feature_levels, | |
1128 arraysize(feature_levels), | |
1129 D3D11_SDK_VERSION, | |
1130 d3d11_device_.Receive(), | |
1131 &feature_level_out, | |
1132 d3d11_device_context_.Receive()); | |
1133 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false); | 1103 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false); |
1134 | 1104 |
1135 // Enable multithreaded mode on the device. This ensures that accesses to | 1105 // Enable multithreaded mode on the device. This ensures that accesses to |
1136 // context are synchronized across threads. We have multiple threads | 1106 // context are synchronized across threads. We have multiple threads |
1137 // accessing the context, the media foundation decoder threads and the | 1107 // accessing the context, the media foundation decoder threads and the |
1138 // decoder thread via the video format conversion transform. | 1108 // decoder thread via the video format conversion transform. |
1139 hr = multi_threaded_.QueryFrom(d3d11_device_.get()); | 1109 hr = multi_threaded_.QueryFrom(d3d11_device_.get()); |
1140 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); | 1110 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); |
1141 multi_threaded_->SetMultithreadProtected(TRUE); | 1111 multi_threaded_->SetMultithreadProtected(TRUE); |
1142 | 1112 |
1143 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), | 1113 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), |
1144 dx11_dev_manager_reset_token_); | 1114 dx11_dev_manager_reset_token_); |
1145 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | 1115 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
1146 | 1116 |
1147 D3D11_QUERY_DESC query_desc; | 1117 D3D11_QUERY_DESC query_desc; |
1148 query_desc.Query = D3D11_QUERY_EVENT; | 1118 query_desc.Query = D3D11_QUERY_EVENT; |
1149 query_desc.MiscFlags = 0; | 1119 query_desc.MiscFlags = 0; |
1150 hr = d3d11_device_->CreateQuery( | 1120 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); |
1151 &query_desc, | |
1152 d3d11_query_.Receive()); | |
1153 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); | 1121 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); |
1154 | 1122 |
1155 HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); | 1123 HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); |
1156 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", | 1124 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", |
1157 false); | 1125 false); |
1158 | 1126 |
1159 hr = CreateCOMObjectFromDll( | 1127 hr = CreateCOMObjectFromDll(video_processor_dll, CLSID_VideoProcessorMFT, |
1160 video_processor_dll, | 1128 __uuidof(IMFTransform), |
1161 CLSID_VideoProcessorMFT, | 1129 video_format_converter_mft_.ReceiveVoid()); |
1162 __uuidof(IMFTransform), | |
1163 video_format_converter_mft_.ReceiveVoid()); | |
1164 if (FAILED(hr)) { | 1130 if (FAILED(hr)) { |
1165 base::debug::Alias(&hr); | 1131 base::debug::Alias(&hr); |
1166 // TODO(ananta) | 1132 // TODO(ananta) |
1167 // Remove this CHECK when the change to use DX11 for H/W decoding | 1133 // Remove this CHECK when the change to use DX11 for H/W decoding |
1168 // stablizes. | 1134 // stablizes. |
1169 CHECK(false); | 1135 CHECK(false); |
1170 } | 1136 } |
1171 | 1137 |
1172 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); | 1138 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); |
1173 | 1139 |
1174 base::win::ScopedComPtr<IMFAttributes> converter_attributes; | 1140 base::win::ScopedComPtr<IMFAttributes> converter_attributes; |
1175 hr = video_format_converter_mft_->GetAttributes( | 1141 hr = video_format_converter_mft_->GetAttributes( |
1176 converter_attributes.Receive()); | 1142 converter_attributes.Receive()); |
1177 RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false); | 1143 RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false); |
1178 | 1144 |
1179 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); | 1145 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); |
1180 RETURN_ON_HR_FAILURE( | 1146 RETURN_ON_HR_FAILURE( |
1181 hr, | 1147 hr, "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", false); |
1182 "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", | |
1183 false); | |
1184 | 1148 |
1185 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); | 1149 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); |
1186 RETURN_ON_HR_FAILURE( | 1150 RETURN_ON_HR_FAILURE( |
1187 hr, | 1151 hr, "Failed to set MF_LOW_LATENCY attribute on converter", false); |
1188 "Failed to set MF_LOW_LATENCY attribute on converter", | |
1189 false); | |
1190 return true; | 1152 return true; |
1191 } | 1153 } |
1192 | 1154 |
1193 void DXVAVideoDecodeAccelerator::Decode( | 1155 void DXVAVideoDecodeAccelerator::Decode( |
1194 const media::BitstreamBuffer& bitstream_buffer) { | 1156 const media::BitstreamBuffer& bitstream_buffer) { |
1195 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1157 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1196 | 1158 |
1197 // SharedMemory will take over the ownership of handle. | 1159 // SharedMemory will take over the ownership of handle. |
1198 base::SharedMemory shm(bitstream_buffer.handle(), true); | 1160 base::SharedMemory shm(bitstream_buffer.handle(), true); |
1199 | 1161 |
1200 State state = GetState(); | 1162 State state = GetState(); |
1201 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped || | 1163 RETURN_AND_NOTIFY_ON_FAILURE( |
1202 state == kFlushing), | 1164 (state == kNormal || state == kStopped || state == kFlushing), |
1203 "Invalid state: " << state, ILLEGAL_STATE,); | 1165 "Invalid state: " << state, ILLEGAL_STATE, ); |
1204 if (bitstream_buffer.id() < 0) { | 1166 if (bitstream_buffer.id() < 0) { |
1205 RETURN_AND_NOTIFY_ON_FAILURE( | 1167 RETURN_AND_NOTIFY_ON_FAILURE( |
1206 false, "Invalid bitstream_buffer, id: " << bitstream_buffer.id(), | 1168 false, "Invalid bitstream_buffer, id: " << bitstream_buffer.id(), |
1207 INVALID_ARGUMENT, ); | 1169 INVALID_ARGUMENT, ); |
1208 } | 1170 } |
1209 | 1171 |
1210 base::win::ScopedComPtr<IMFSample> sample; | 1172 base::win::ScopedComPtr<IMFSample> sample; |
1211 RETURN_AND_NOTIFY_ON_FAILURE(shm.Map(bitstream_buffer.size()), | 1173 RETURN_AND_NOTIFY_ON_FAILURE(shm.Map(bitstream_buffer.size()), |
1212 "Failed in base::SharedMemory::Map", | 1174 "Failed in base::SharedMemory::Map", |
1213 PLATFORM_FAILURE, ); | 1175 PLATFORM_FAILURE, ); |
1214 | 1176 |
1215 sample.Attach(CreateInputSample( | 1177 sample.Attach(CreateInputSample( |
1216 reinterpret_cast<const uint8_t*>(shm.memory()), bitstream_buffer.size(), | 1178 reinterpret_cast<const uint8_t*>(shm.memory()), bitstream_buffer.size(), |
1217 std::min<uint32_t>(bitstream_buffer.size(), input_stream_info_.cbSize), | 1179 std::min<uint32_t>(bitstream_buffer.size(), input_stream_info_.cbSize), |
1218 input_stream_info_.cbAlignment)); | 1180 input_stream_info_.cbAlignment)); |
1219 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample", | 1181 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample", |
1220 PLATFORM_FAILURE, ); | 1182 PLATFORM_FAILURE, ); |
1221 | 1183 |
1222 RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()), | 1184 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
1223 "Failed to associate input buffer id with sample", PLATFORM_FAILURE,); | 1185 sample->SetSampleTime(bitstream_buffer.id()), |
| 1186 "Failed to associate input buffer id with sample", PLATFORM_FAILURE, ); |
1224 | 1187 |
1225 decoder_thread_task_runner_->PostTask( | 1188 decoder_thread_task_runner_->PostTask( |
1226 FROM_HERE, | 1189 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal, |
1227 base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal, | 1190 base::Unretained(this), sample)); |
1228 base::Unretained(this), sample)); | |
1229 } | 1191 } |
1230 | 1192 |
1231 void DXVAVideoDecodeAccelerator::AssignPictureBuffers( | 1193 void DXVAVideoDecodeAccelerator::AssignPictureBuffers( |
1232 const std::vector<media::PictureBuffer>& buffers) { | 1194 const std::vector<media::PictureBuffer>& buffers) { |
1233 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1195 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1234 | 1196 |
1235 State state = GetState(); | 1197 State state = GetState(); |
1236 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | 1198 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), |
1237 "Invalid state: " << state, ILLEGAL_STATE,); | 1199 "Invalid state: " << state, ILLEGAL_STATE, ); |
1238 RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers >= buffers.size()), | 1200 RETURN_AND_NOTIFY_ON_FAILURE( |
1239 "Failed to provide requested picture buffers. (Got " << buffers.size() << | 1201 (kNumPictureBuffers >= buffers.size()), |
1240 ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,); | 1202 "Failed to provide requested picture buffers. (Got " |
| 1203 << buffers.size() << ", requested " << kNumPictureBuffers << ")", |
| 1204 INVALID_ARGUMENT, ); |
1241 | 1205 |
1242 // Copy the picture buffers provided by the client to the available list, | 1206 // Copy the picture buffers provided by the client to the available list, |
1243 // and mark these buffers as available for use. | 1207 // and mark these buffers as available for use. |
1244 for (size_t buffer_index = 0; buffer_index < buffers.size(); | 1208 for (size_t buffer_index = 0; buffer_index < buffers.size(); ++buffer_index) { |
1245 ++buffer_index) { | |
1246 DCHECK_LE(1u, buffers[buffer_index].texture_ids().size()); | 1209 DCHECK_LE(1u, buffers[buffer_index].texture_ids().size()); |
1247 linked_ptr<DXVAPictureBuffer> picture_buffer = | 1210 linked_ptr<DXVAPictureBuffer> picture_buffer = |
1248 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); | 1211 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); |
1249 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), | 1212 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), |
1250 "Failed to allocate picture buffer", PLATFORM_FAILURE,); | 1213 "Failed to allocate picture buffer", |
| 1214 PLATFORM_FAILURE, ); |
1251 | 1215 |
1252 bool inserted = output_picture_buffers_.insert(std::make_pair( | 1216 bool inserted = |
1253 buffers[buffer_index].id(), picture_buffer)).second; | 1217 output_picture_buffers_ |
| 1218 .insert(std::make_pair(buffers[buffer_index].id(), picture_buffer)) |
| 1219 .second; |
1254 DCHECK(inserted); | 1220 DCHECK(inserted); |
1255 } | 1221 } |
1256 | 1222 |
1257 ProcessPendingSamples(); | 1223 ProcessPendingSamples(); |
1258 if (pending_flush_) { | 1224 if (pending_flush_) { |
1259 decoder_thread_task_runner_->PostTask( | 1225 decoder_thread_task_runner_->PostTask( |
1260 FROM_HERE, | 1226 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
1261 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 1227 base::Unretained(this))); |
1262 base::Unretained(this))); | |
1263 } | 1228 } |
1264 } | 1229 } |
1265 | 1230 |
1266 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { | 1231 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { |
1267 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1232 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1268 | 1233 |
1269 State state = GetState(); | 1234 State state = GetState(); |
1270 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | 1235 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), |
1271 "Invalid state: " << state, ILLEGAL_STATE,); | 1236 "Invalid state: " << state, ILLEGAL_STATE, ); |
1272 | 1237 |
1273 if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty()) | 1238 if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty()) |
1274 return; | 1239 return; |
1275 | 1240 |
1276 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id); | 1241 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id); |
1277 // If we didn't find the picture id in the |output_picture_buffers_| map we | 1242 // If we didn't find the picture id in the |output_picture_buffers_| map we |
1278 // try the |stale_output_picture_buffers_| map, as this may have been an | 1243 // try the |stale_output_picture_buffers_| map, as this may have been an |
1279 // output picture buffer from before a resolution change, that at resolution | 1244 // output picture buffer from before a resolution change, that at resolution |
1280 // change time had yet to be displayed. The client is calling us back to tell | 1245 // change time had yet to be displayed. The client is calling us back to tell |
1281 // us that we can now recycle this picture buffer, so if we were waiting to | 1246 // us that we can now recycle this picture buffer, so if we were waiting to |
1282 // dispose of it we now can. | 1247 // dispose of it we now can. |
1283 if (it == output_picture_buffers_.end()) { | 1248 if (it == output_picture_buffers_.end()) { |
1284 if (!stale_output_picture_buffers_.empty()) { | 1249 if (!stale_output_picture_buffers_.empty()) { |
1285 it = stale_output_picture_buffers_.find(picture_buffer_id); | 1250 it = stale_output_picture_buffers_.find(picture_buffer_id); |
1286 RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(), | 1251 RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(), |
1287 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,); | 1252 "Invalid picture id: " << picture_buffer_id, |
| 1253 INVALID_ARGUMENT, ); |
1288 main_thread_task_runner_->PostTask( | 1254 main_thread_task_runner_->PostTask( |
1289 FROM_HERE, | 1255 FROM_HERE, |
1290 base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer, | 1256 base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer, |
1291 weak_this_factory_.GetWeakPtr(), picture_buffer_id)); | 1257 weak_this_factory_.GetWeakPtr(), picture_buffer_id)); |
1292 } | 1258 } |
1293 return; | 1259 return; |
1294 } | 1260 } |
1295 | 1261 |
1296 if (it->second->available() || it->second->waiting_to_reuse()) | 1262 if (it->second->available() || it->second->waiting_to_reuse()) |
1297 return; | 1263 return; |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1341 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 1307 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
1342 return; | 1308 return; |
1343 } | 1309 } |
1344 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer->ReusePictureBuffer(), | 1310 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer->ReusePictureBuffer(), |
1345 "Failed to reuse picture buffer", | 1311 "Failed to reuse picture buffer", |
1346 PLATFORM_FAILURE, ); | 1312 PLATFORM_FAILURE, ); |
1347 | 1313 |
1348 ProcessPendingSamples(); | 1314 ProcessPendingSamples(); |
1349 if (pending_flush_) { | 1315 if (pending_flush_) { |
1350 decoder_thread_task_runner_->PostTask( | 1316 decoder_thread_task_runner_->PostTask( |
1351 FROM_HERE, | 1317 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
1352 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 1318 base::Unretained(this))); |
1353 base::Unretained(this))); | |
1354 } | 1319 } |
1355 } | 1320 } |
1356 | 1321 |
1357 void DXVAVideoDecodeAccelerator::Flush() { | 1322 void DXVAVideoDecodeAccelerator::Flush() { |
1358 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1323 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1359 | 1324 |
1360 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush"; | 1325 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush"; |
1361 | 1326 |
1362 State state = GetState(); | 1327 State state = GetState(); |
1363 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), | 1328 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), |
1364 "Unexpected decoder state: " << state, ILLEGAL_STATE,); | 1329 "Unexpected decoder state: " << state, |
| 1330 ILLEGAL_STATE, ); |
1365 | 1331 |
1366 SetState(kFlushing); | 1332 SetState(kFlushing); |
1367 | 1333 |
1368 pending_flush_ = true; | 1334 pending_flush_ = true; |
1369 | 1335 |
1370 decoder_thread_task_runner_->PostTask( | 1336 decoder_thread_task_runner_->PostTask( |
1371 FROM_HERE, | 1337 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
1372 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 1338 base::Unretained(this))); |
1373 base::Unretained(this))); | |
1374 } | 1339 } |
1375 | 1340 |
1376 void DXVAVideoDecodeAccelerator::Reset() { | 1341 void DXVAVideoDecodeAccelerator::Reset() { |
1377 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1342 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1378 | 1343 |
1379 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset"; | 1344 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset"; |
1380 | 1345 |
1381 State state = GetState(); | 1346 State state = GetState(); |
1382 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), | 1347 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), |
1383 "Reset: invalid state: " << state, ILLEGAL_STATE,); | 1348 "Reset: invalid state: " << state, |
| 1349 ILLEGAL_STATE, ); |
1384 | 1350 |
1385 decoder_thread_.Stop(); | 1351 decoder_thread_.Stop(); |
1386 | 1352 |
1387 SetState(kResetting); | 1353 SetState(kResetting); |
1388 | 1354 |
1389 // If we have pending output frames waiting for display then we drop those | 1355 // If we have pending output frames waiting for display then we drop those |
1390 // frames and set the corresponding picture buffer as available. | 1356 // frames and set the corresponding picture buffer as available. |
1391 PendingOutputSamples::iterator index; | 1357 PendingOutputSamples::iterator index; |
1392 for (index = pending_output_samples_.begin(); | 1358 for (index = pending_output_samples_.begin(); |
1393 index != pending_output_samples_.end(); | 1359 index != pending_output_samples_.end(); ++index) { |
1394 ++index) { | |
1395 if (index->picture_buffer_id != -1) { | 1360 if (index->picture_buffer_id != -1) { |
1396 OutputBuffers::iterator it = output_picture_buffers_.find( | 1361 OutputBuffers::iterator it = |
1397 index->picture_buffer_id); | 1362 output_picture_buffers_.find(index->picture_buffer_id); |
1398 if (it != output_picture_buffers_.end()) { | 1363 if (it != output_picture_buffers_.end()) { |
1399 DXVAPictureBuffer* picture_buffer = it->second.get(); | 1364 DXVAPictureBuffer* picture_buffer = it->second.get(); |
1400 picture_buffer->ReusePictureBuffer(); | 1365 picture_buffer->ReusePictureBuffer(); |
1401 } | 1366 } |
1402 } | 1367 } |
1403 } | 1368 } |
1404 | 1369 |
1405 pending_output_samples_.clear(); | 1370 pending_output_samples_.clear(); |
1406 | 1371 |
1407 NotifyInputBuffersDropped(); | 1372 NotifyInputBuffersDropped(); |
1408 | 1373 |
1409 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0), | 1374 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0), |
1410 "Reset: Failed to send message.", PLATFORM_FAILURE,); | 1375 "Reset: Failed to send message.", |
| 1376 PLATFORM_FAILURE, ); |
1411 | 1377 |
1412 main_thread_task_runner_->PostTask( | 1378 main_thread_task_runner_->PostTask( |
1413 FROM_HERE, | 1379 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone, |
1414 base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone, | 1380 weak_this_factory_.GetWeakPtr())); |
1415 weak_this_factory_.GetWeakPtr())); | |
1416 | 1381 |
1417 StartDecoderThread(); | 1382 StartDecoderThread(); |
1418 SetState(kNormal); | 1383 SetState(kNormal); |
1419 } | 1384 } |
1420 | 1385 |
1421 void DXVAVideoDecodeAccelerator::Destroy() { | 1386 void DXVAVideoDecodeAccelerator::Destroy() { |
1422 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1387 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1423 Invalidate(); | 1388 Invalidate(); |
1424 delete this; | 1389 delete this; |
1425 } | 1390 } |
1426 | 1391 |
1427 bool DXVAVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( | 1392 bool DXVAVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( |
1428 const base::WeakPtr<Client>& decode_client, | 1393 const base::WeakPtr<Client>& decode_client, |
1429 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { | 1394 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { |
1430 return false; | 1395 return false; |
1431 } | 1396 } |
1432 | 1397 |
1433 GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const { | 1398 GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const { |
1434 return GL_BGRA_EXT; | 1399 return GL_BGRA_EXT; |
1435 } | 1400 } |
1436 | 1401 |
1437 // static | 1402 // static |
1438 media::VideoDecodeAccelerator::SupportedProfiles | 1403 media::VideoDecodeAccelerator::SupportedProfiles |
1439 DXVAVideoDecodeAccelerator::GetSupportedProfiles() { | 1404 DXVAVideoDecodeAccelerator::GetSupportedProfiles() { |
1440 TRACE_EVENT0("gpu,startup", | 1405 TRACE_EVENT0("gpu,startup", |
1441 "DXVAVideoDecodeAccelerator::GetSupportedProfiles"); | 1406 "DXVAVideoDecodeAccelerator::GetSupportedProfiles"); |
1442 | 1407 |
1443 // TODO(henryhsu): Need to ensure the profiles are actually supported. | 1408 // TODO(henryhsu): Need to ensure the profiles are actually supported. |
1444 SupportedProfiles profiles; | 1409 SupportedProfiles profiles; |
1445 for (const auto& supported_profile : kSupportedProfiles) { | 1410 for (const auto& supported_profile : kSupportedProfiles) { |
1446 std::pair<int, int> min_resolution = GetMinResolution(supported_profile); | 1411 std::pair<int, int> min_resolution = GetMinResolution(supported_profile); |
1447 std::pair<int, int> max_resolution = GetMaxResolution(supported_profile); | 1412 std::pair<int, int> max_resolution = GetMaxResolution(supported_profile); |
1448 | 1413 |
1449 SupportedProfile profile; | 1414 SupportedProfile profile; |
1450 profile.profile = supported_profile; | 1415 profile.profile = supported_profile; |
1451 profile.min_resolution.SetSize(min_resolution.first, min_resolution.second); | 1416 profile.min_resolution.SetSize(min_resolution.first, min_resolution.second); |
(...skipping 15 matching lines...) Expand all Loading... |
1467 } else { | 1432 } else { |
1468 #if defined(ENABLE_DX11_FOR_WIN7) | 1433 #if defined(ENABLE_DX11_FOR_WIN7) |
1469 LoadLibrary(L"mshtmlmedia.dll"); | 1434 LoadLibrary(L"mshtmlmedia.dll"); |
1470 #endif | 1435 #endif |
1471 } | 1436 } |
1472 } | 1437 } |
1473 | 1438 |
1474 // static | 1439 // static |
1475 std::pair<int, int> DXVAVideoDecodeAccelerator::GetMinResolution( | 1440 std::pair<int, int> DXVAVideoDecodeAccelerator::GetMinResolution( |
1476 media::VideoCodecProfile profile) { | 1441 media::VideoCodecProfile profile) { |
1477 TRACE_EVENT0("gpu,startup", | 1442 TRACE_EVENT0("gpu,startup", "DXVAVideoDecodeAccelerator::GetMinResolution"); |
1478 "DXVAVideoDecodeAccelerator::GetMinResolution"); | |
1479 std::pair<int, int> min_resolution; | 1443 std::pair<int, int> min_resolution; |
1480 if (profile >= media::H264PROFILE_BASELINE && | 1444 if (profile >= media::H264PROFILE_BASELINE && |
1481 profile <= media::H264PROFILE_HIGH) { | 1445 profile <= media::H264PROFILE_HIGH) { |
1482 // Windows Media Foundation H.264 decoding does not support decoding videos | 1446 // Windows Media Foundation H.264 decoding does not support decoding videos |
1483 // with any dimension smaller than 48 pixels: | 1447 // with any dimension smaller than 48 pixels: |
1484 // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815 | 1448 // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815 |
1485 min_resolution = std::make_pair(48, 48); | 1449 min_resolution = std::make_pair(48, 48); |
1486 } else { | 1450 } else { |
1487 // TODO(ananta) | 1451 // TODO(ananta) |
1488 // Detect this properly for VP8/VP9 profiles. | 1452 // Detect this properly for VP8/VP9 profiles. |
1489 min_resolution = std::make_pair(16, 16); | 1453 min_resolution = std::make_pair(16, 16); |
1490 } | 1454 } |
1491 return min_resolution; | 1455 return min_resolution; |
1492 } | 1456 } |
1493 | 1457 |
1494 // static | 1458 // static |
1495 std::pair<int, int> DXVAVideoDecodeAccelerator::GetMaxResolution( | 1459 std::pair<int, int> DXVAVideoDecodeAccelerator::GetMaxResolution( |
1496 const media::VideoCodecProfile profile) { | 1460 const media::VideoCodecProfile profile) { |
1497 TRACE_EVENT0("gpu,startup", | 1461 TRACE_EVENT0("gpu,startup", "DXVAVideoDecodeAccelerator::GetMaxResolution"); |
1498 "DXVAVideoDecodeAccelerator::GetMaxResolution"); | |
1499 std::pair<int, int> max_resolution; | 1462 std::pair<int, int> max_resolution; |
1500 if (profile >= media::H264PROFILE_BASELINE && | 1463 if (profile >= media::H264PROFILE_BASELINE && |
1501 profile <= media::H264PROFILE_HIGH) { | 1464 profile <= media::H264PROFILE_HIGH) { |
1502 max_resolution = GetMaxH264Resolution(); | 1465 max_resolution = GetMaxH264Resolution(); |
1503 } else { | 1466 } else { |
1504 // TODO(ananta) | 1467 // TODO(ananta) |
1505 // Detect this properly for VP8/VP9 profiles. | 1468 // Detect this properly for VP8/VP9 profiles. |
1506 max_resolution = std::make_pair(4096, 2160); | 1469 max_resolution = std::make_pair(4096, 2160); |
1507 } | 1470 } |
1508 return max_resolution; | 1471 return max_resolution; |
(...skipping 18 matching lines...) Expand all Loading... |
1527 return max_resolution; | 1490 return max_resolution; |
1528 | 1491 |
1529 // To detect if a driver supports the desired resolutions, we try and create | 1492 // To detect if a driver supports the desired resolutions, we try and create |
1530 // a DXVA decoder instance for that resolution and profile. If that succeeds | 1493 // a DXVA decoder instance for that resolution and profile. If that succeeds |
1531 // we assume that the driver supports H/W H.264 decoding for that resolution. | 1494 // we assume that the driver supports H/W H.264 decoding for that resolution. |
1532 HRESULT hr = E_FAIL; | 1495 HRESULT hr = E_FAIL; |
1533 base::win::ScopedComPtr<ID3D11Device> device; | 1496 base::win::ScopedComPtr<ID3D11Device> device; |
1534 | 1497 |
1535 { | 1498 { |
1536 TRACE_EVENT0("gpu,startup", | 1499 TRACE_EVENT0("gpu,startup", |
1537 "GetMaxH264Resolution. QueryDeviceObjectFromANGLE"); | 1500 "GetMaxH264Resolution. QueryDeviceObjectFromANGLE"); |
1538 | 1501 |
1539 device = QueryDeviceObjectFromANGLE<ID3D11Device>(EGL_D3D11_DEVICE_ANGLE); | 1502 device = QueryDeviceObjectFromANGLE<ID3D11Device>(EGL_D3D11_DEVICE_ANGLE); |
1540 if (!device.get()) | 1503 if (!device.get()) |
1541 return max_resolution; | 1504 return max_resolution; |
1542 } | 1505 } |
1543 | 1506 |
1544 base::win::ScopedComPtr<ID3D11VideoDevice> video_device; | 1507 base::win::ScopedComPtr<ID3D11VideoDevice> video_device; |
1545 hr = device.QueryInterface(IID_ID3D11VideoDevice, | 1508 hr = device.QueryInterface(IID_ID3D11VideoDevice, video_device.ReceiveVoid()); |
1546 video_device.ReceiveVoid()); | |
1547 if (FAILED(hr)) | 1509 if (FAILED(hr)) |
1548 return max_resolution; | 1510 return max_resolution; |
1549 | 1511 |
1550 GUID decoder_guid = {}; | 1512 GUID decoder_guid = {}; |
1551 | 1513 |
1552 { | 1514 { |
1553 TRACE_EVENT0("gpu,startup", | 1515 TRACE_EVENT0("gpu,startup", |
1554 "GetMaxH264Resolution. H.264 guid search begin"); | 1516 "GetMaxH264Resolution. H.264 guid search begin"); |
1555 // Enumerate supported video profiles and look for the H264 profile. | 1517 // Enumerate supported video profiles and look for the H264 profile. |
1556 bool found = false; | 1518 bool found = false; |
1557 UINT profile_count = video_device->GetVideoDecoderProfileCount(); | 1519 UINT profile_count = video_device->GetVideoDecoderProfileCount(); |
1558 for (UINT profile_idx = 0; profile_idx < profile_count; profile_idx++) { | 1520 for (UINT profile_idx = 0; profile_idx < profile_count; profile_idx++) { |
1559 GUID profile_id = {}; | 1521 GUID profile_id = {}; |
1560 hr = video_device->GetVideoDecoderProfile(profile_idx, &profile_id); | 1522 hr = video_device->GetVideoDecoderProfile(profile_idx, &profile_id); |
1561 if (SUCCEEDED(hr) && | 1523 if (SUCCEEDED(hr) && (profile_id == DXVA2_ModeH264_E || |
1562 (profile_id == DXVA2_ModeH264_E || | 1524 profile_id == DXVA2_Intel_ModeH264_E)) { |
1563 profile_id == DXVA2_Intel_ModeH264_E)) { | |
1564 decoder_guid = profile_id; | 1525 decoder_guid = profile_id; |
1565 found = true; | 1526 found = true; |
1566 break; | 1527 break; |
1567 } | 1528 } |
1568 } | 1529 } |
1569 if (!found) | 1530 if (!found) |
1570 return max_resolution; | 1531 return max_resolution; |
1571 } | 1532 } |
1572 | 1533 |
1573 // Legacy AMD drivers with UVD3 or earlier and some Intel GPU's crash while | 1534 // Legacy AMD drivers with UVD3 or earlier and some Intel GPU's crash while |
1574 // creating surfaces larger than 1920 x 1088. | 1535 // creating surfaces larger than 1920 x 1088. |
1575 if (IsLegacyGPU(device.get())) | 1536 if (IsLegacyGPU(device.get())) |
1576 return max_resolution; | 1537 return max_resolution; |
1577 | 1538 |
1578 // We look for the following resolutions in the driver. | 1539 // We look for the following resolutions in the driver. |
1579 // TODO(ananta) | 1540 // TODO(ananta) |
1580 // Look into whether this list needs to be expanded. | 1541 // Look into whether this list needs to be expanded. |
1581 static std::pair<int, int> resolution_array[] = { | 1542 static std::pair<int, int> resolution_array[] = { |
1582 // Use 1088 to account for 16x16 macroblocks. | 1543 // Use 1088 to account for 16x16 macroblocks. |
1583 std::make_pair(1920, 1088), | 1544 std::make_pair(1920, 1088), std::make_pair(2560, 1440), |
1584 std::make_pair(2560, 1440), | 1545 std::make_pair(3840, 2160), std::make_pair(4096, 2160), |
1585 std::make_pair(3840, 2160), | 1546 std::make_pair(4096, 2304), |
1586 std::make_pair(4096, 2160), | |
1587 std::make_pair(4096, 2304), | |
1588 }; | 1547 }; |
1589 | 1548 |
1590 { | 1549 { |
1591 TRACE_EVENT0("gpu,startup", | 1550 TRACE_EVENT0("gpu,startup", |
1592 "GetMaxH264Resolution. Resolution search begin"); | 1551 "GetMaxH264Resolution. Resolution search begin"); |
1593 | 1552 |
1594 for (size_t res_idx = 0; res_idx < arraysize(resolution_array); | 1553 for (size_t res_idx = 0; res_idx < arraysize(resolution_array); res_idx++) { |
1595 res_idx++) { | |
1596 D3D11_VIDEO_DECODER_DESC desc = {}; | 1554 D3D11_VIDEO_DECODER_DESC desc = {}; |
1597 desc.Guid = decoder_guid; | 1555 desc.Guid = decoder_guid; |
1598 desc.SampleWidth = resolution_array[res_idx].first; | 1556 desc.SampleWidth = resolution_array[res_idx].first; |
1599 desc.SampleHeight = resolution_array[res_idx].second; | 1557 desc.SampleHeight = resolution_array[res_idx].second; |
1600 desc.OutputFormat = DXGI_FORMAT_NV12; | 1558 desc.OutputFormat = DXGI_FORMAT_NV12; |
1601 UINT config_count = 0; | 1559 UINT config_count = 0; |
1602 hr = video_device->GetVideoDecoderConfigCount(&desc, &config_count); | 1560 hr = video_device->GetVideoDecoderConfigCount(&desc, &config_count); |
1603 if (FAILED(hr) || config_count == 0) | 1561 if (FAILED(hr) || config_count == 0) |
1604 return max_resolution; | 1562 return max_resolution; |
1605 | 1563 |
1606 D3D11_VIDEO_DECODER_CONFIG config = {}; | 1564 D3D11_VIDEO_DECODER_CONFIG config = {}; |
1607 hr = video_device->GetVideoDecoderConfig(&desc, 0, &config); | 1565 hr = video_device->GetVideoDecoderConfig(&desc, 0, &config); |
1608 if (FAILED(hr)) | 1566 if (FAILED(hr)) |
1609 return max_resolution; | 1567 return max_resolution; |
1610 | 1568 |
1611 base::win::ScopedComPtr<ID3D11VideoDecoder> video_decoder; | 1569 base::win::ScopedComPtr<ID3D11VideoDecoder> video_decoder; |
1612 hr = video_device->CreateVideoDecoder(&desc, &config, | 1570 hr = video_device->CreateVideoDecoder(&desc, &config, |
1613 video_decoder.Receive()); | 1571 video_decoder.Receive()); |
1614 if (!video_decoder.get()) | 1572 if (!video_decoder.get()) |
1615 return max_resolution; | 1573 return max_resolution; |
1616 | 1574 |
1617 max_resolution = resolution_array[res_idx]; | 1575 max_resolution = resolution_array[res_idx]; |
1618 } | 1576 } |
1619 } | 1577 } |
1620 return max_resolution; | 1578 return max_resolution; |
1621 } | 1579 } |
1622 | 1580 |
1623 // static | 1581 // static |
(...skipping 23 matching lines...) Expand all Loading... |
1647 | 1605 |
1648 DXGI_ADAPTER_DESC adapter_desc = {}; | 1606 DXGI_ADAPTER_DESC adapter_desc = {}; |
1649 hr = adapter->GetDesc(&adapter_desc); | 1607 hr = adapter->GetDesc(&adapter_desc); |
1650 if (FAILED(hr)) | 1608 if (FAILED(hr)) |
1651 return legacy_gpu; | 1609 return legacy_gpu; |
1652 | 1610 |
1653 // We check if the device is an Intel or an AMD device and whether it is in | 1611 // We check if the device is an Intel or an AMD device and whether it is in |
1654 // the global list defined by the g_AMDUVD3GPUList and g_IntelLegacyGPUList | 1612 // the global list defined by the g_AMDUVD3GPUList and g_IntelLegacyGPUList |
1655 // arrays above. If yes then the device is treated as a legacy device. | 1613 // arrays above. If yes then the device is treated as a legacy device. |
1656 if ((adapter_desc.VendorId == kAMDGPUId1) || | 1614 if ((adapter_desc.VendorId == kAMDGPUId1) || |
1657 adapter_desc.VendorId == kAMDGPUId2) { | 1615 adapter_desc.VendorId == kAMDGPUId2) { |
1658 { | 1616 { |
1659 TRACE_EVENT0("gpu,startup", | 1617 TRACE_EVENT0("gpu,startup", |
1660 "DXVAVideoDecodeAccelerator::IsLegacyGPU. AMD check"); | 1618 "DXVAVideoDecodeAccelerator::IsLegacyGPU. AMD check"); |
1661 for (size_t i = 0; i < arraysize(g_AMDUVD3GPUList); i++) { | 1619 for (size_t i = 0; i < arraysize(g_AMDUVD3GPUList); i++) { |
1662 if (adapter_desc.DeviceId == g_AMDUVD3GPUList[i]) | 1620 if (adapter_desc.DeviceId == g_AMDUVD3GPUList[i]) |
1663 return legacy_gpu; | 1621 return legacy_gpu; |
1664 } | 1622 } |
1665 } | 1623 } |
1666 } else if (adapter_desc.VendorId == kIntelGPU) { | 1624 } else if (adapter_desc.VendorId == kIntelGPU) { |
1667 { | 1625 { |
1668 TRACE_EVENT0("gpu,startup", | 1626 TRACE_EVENT0("gpu,startup", |
1669 "DXVAVideoDecodeAccelerator::IsLegacyGPU. Intel check"); | 1627 "DXVAVideoDecodeAccelerator::IsLegacyGPU. Intel check"); |
1670 for (size_t i = 0; i < arraysize(g_IntelLegacyGPUList); i++) { | 1628 for (size_t i = 0; i < arraysize(g_IntelLegacyGPUList); i++) { |
1671 if (adapter_desc.DeviceId == g_IntelLegacyGPUList[i]) | 1629 if (adapter_desc.DeviceId == g_IntelLegacyGPUList[i]) |
1672 return legacy_gpu; | 1630 return legacy_gpu; |
1673 } | 1631 } |
1674 } | 1632 } |
1675 } | 1633 } |
1676 legacy_gpu = false; | 1634 legacy_gpu = false; |
1677 return legacy_gpu; | 1635 return legacy_gpu; |
1678 } | 1636 } |
1679 | 1637 |
(...skipping 11 matching lines...) Expand all Loading... |
1691 decoder_dll = ::GetModuleHandle(L"msmpeg2vdec.dll"); | 1649 decoder_dll = ::GetModuleHandle(L"msmpeg2vdec.dll"); |
1692 RETURN_ON_FAILURE(decoder_dll, | 1650 RETURN_ON_FAILURE(decoder_dll, |
1693 "msmpeg2vdec.dll required for decoding is not loaded", | 1651 "msmpeg2vdec.dll required for decoding is not loaded", |
1694 false); | 1652 false); |
1695 | 1653 |
1696 // Check version of DLL, version 6.1.7140 is blacklisted due to high crash | 1654 // Check version of DLL, version 6.1.7140 is blacklisted due to high crash |
1697 // rates in browsers loading that DLL. If that is the version installed we | 1655 // rates in browsers loading that DLL. If that is the version installed we |
1698 // fall back to software decoding. See crbug/403440. | 1656 // fall back to software decoding. See crbug/403440. |
1699 std::unique_ptr<FileVersionInfo> version_info( | 1657 std::unique_ptr<FileVersionInfo> version_info( |
1700 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll)); | 1658 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll)); |
1701 RETURN_ON_FAILURE(version_info, | 1659 RETURN_ON_FAILURE(version_info, "unable to get version of msmpeg2vdec.dll", |
1702 "unable to get version of msmpeg2vdec.dll", | |
1703 false); | 1660 false); |
1704 base::string16 file_version = version_info->file_version(); | 1661 base::string16 file_version = version_info->file_version(); |
1705 RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos, | 1662 RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos, |
1706 "blacklisted version of msmpeg2vdec.dll 6.1.7140", | 1663 "blacklisted version of msmpeg2vdec.dll 6.1.7140", false); |
1707 false); | |
1708 codec_ = media::kCodecH264; | 1664 codec_ = media::kCodecH264; |
1709 clsid = __uuidof(CMSH264DecoderMFT); | 1665 clsid = __uuidof(CMSH264DecoderMFT); |
1710 } else if (enable_accelerated_vpx_decode_ && | 1666 } else if (enable_accelerated_vpx_decode_ && |
1711 (profile == media::VP8PROFILE_ANY || | 1667 (profile == media::VP8PROFILE_ANY || |
1712 profile == media::VP9PROFILE_PROFILE0 || | 1668 profile == media::VP9PROFILE_PROFILE0 || |
1713 profile == media::VP9PROFILE_PROFILE1 || | 1669 profile == media::VP9PROFILE_PROFILE1 || |
1714 profile == media::VP9PROFILE_PROFILE2 || | 1670 profile == media::VP9PROFILE_PROFILE2 || |
1715 profile == media::VP9PROFILE_PROFILE3)) { | 1671 profile == media::VP9PROFILE_PROFILE3)) { |
1716 int program_files_key = base::DIR_PROGRAM_FILES; | 1672 int program_files_key = base::DIR_PROGRAM_FILES; |
1717 if (base::win::OSInfo::GetInstance()->wow64_status() == | 1673 if (base::win::OSInfo::GetInstance()->wow64_status() == |
1718 base::win::OSInfo::WOW64_ENABLED) { | 1674 base::win::OSInfo::WOW64_ENABLED) { |
1719 program_files_key = base::DIR_PROGRAM_FILES6432; | 1675 program_files_key = base::DIR_PROGRAM_FILES6432; |
1720 } | 1676 } |
1721 | 1677 |
1722 base::FilePath dll_path; | 1678 base::FilePath dll_path; |
1723 RETURN_ON_FAILURE(PathService::Get(program_files_key, &dll_path), | 1679 RETURN_ON_FAILURE(PathService::Get(program_files_key, &dll_path), |
1724 "failed to get path for Program Files", false); | 1680 "failed to get path for Program Files", false); |
1725 | 1681 |
1726 dll_path = dll_path.Append(kVPXDecoderDLLPath); | 1682 dll_path = dll_path.Append(kVPXDecoderDLLPath); |
1727 if (profile == media::VP8PROFILE_ANY) { | 1683 if (profile == media::VP8PROFILE_ANY) { |
1728 codec_ = media::kCodecVP8; | 1684 codec_ = media::kCodecVP8; |
1729 dll_path = dll_path.Append(kVP8DecoderDLLName); | 1685 dll_path = dll_path.Append(kVP8DecoderDLLName); |
1730 clsid = CLSID_WebmMfVp8Dec; | 1686 clsid = CLSID_WebmMfVp8Dec; |
1731 } else { | 1687 } else { |
1732 codec_ = media::kCodecVP9; | 1688 codec_ = media::kCodecVP9; |
1733 dll_path = dll_path.Append(kVP9DecoderDLLName); | 1689 dll_path = dll_path.Append(kVP9DecoderDLLName); |
1734 clsid = CLSID_WebmMfVp9Dec; | 1690 clsid = CLSID_WebmMfVp9Dec; |
1735 } | 1691 } |
1736 decoder_dll = ::LoadLibraryEx(dll_path.value().data(), NULL, | 1692 decoder_dll = ::LoadLibraryEx(dll_path.value().data(), NULL, |
1737 LOAD_WITH_ALTERED_SEARCH_PATH); | 1693 LOAD_WITH_ALTERED_SEARCH_PATH); |
1738 RETURN_ON_FAILURE(decoder_dll, "vpx decoder dll is not loaded", false); | 1694 RETURN_ON_FAILURE(decoder_dll, "vpx decoder dll is not loaded", false); |
1739 } else { | 1695 } else { |
1740 RETURN_ON_FAILURE(false, "Unsupported codec.", false); | 1696 RETURN_ON_FAILURE(false, "Unsupported codec.", false); |
1741 } | 1697 } |
1742 | 1698 |
1743 HRESULT hr = CreateCOMObjectFromDll(decoder_dll, | 1699 HRESULT hr = CreateCOMObjectFromDll( |
1744 clsid, | 1700 decoder_dll, clsid, __uuidof(IMFTransform), decoder_.ReceiveVoid()); |
1745 __uuidof(IMFTransform), | |
1746 decoder_.ReceiveVoid()); | |
1747 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); | 1701 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); |
1748 | 1702 |
1749 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), | 1703 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), |
1750 "Failed to check decoder DXVA support", false); | 1704 "Failed to check decoder DXVA support", false); |
1751 | 1705 |
1752 ULONG_PTR device_manager_to_use = NULL; | 1706 ULONG_PTR device_manager_to_use = NULL; |
1753 if (use_dx11_) { | 1707 if (use_dx11_) { |
1754 CHECK(create_dxgi_device_manager_); | 1708 CHECK(create_dxgi_device_manager_); |
1755 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), | 1709 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), |
1756 "Failed to initialize DX11 device and manager", | 1710 "Failed to initialize DX11 device and manager", |
1757 PLATFORM_FAILURE, | 1711 PLATFORM_FAILURE, false); |
1758 false); | 1712 device_manager_to_use = |
1759 device_manager_to_use = reinterpret_cast<ULONG_PTR>( | 1713 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get()); |
1760 d3d11_device_manager_.get()); | |
1761 } else { | 1714 } else { |
1762 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), | 1715 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), |
1763 "Failed to initialize D3D device and manager", | 1716 "Failed to initialize D3D device and manager", |
1764 PLATFORM_FAILURE, | 1717 PLATFORM_FAILURE, false); |
1765 false); | |
1766 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); | 1718 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); |
1767 } | 1719 } |
1768 | 1720 |
1769 hr = decoder_->ProcessMessage( | 1721 hr = decoder_->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, |
1770 MFT_MESSAGE_SET_D3D_MANAGER, | 1722 device_manager_to_use); |
1771 device_manager_to_use); | |
1772 if (use_dx11_) { | 1723 if (use_dx11_) { |
1773 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); | 1724 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); |
1774 } else { | 1725 } else { |
1775 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); | 1726 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); |
1776 } | 1727 } |
1777 | 1728 |
1778 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 1729 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
1779 | 1730 |
1780 EGLint config_attribs[] = { | 1731 EGLint config_attribs[] = {EGL_BUFFER_SIZE, 32, |
1781 EGL_BUFFER_SIZE, 32, | 1732 EGL_RED_SIZE, 8, |
1782 EGL_RED_SIZE, 8, | 1733 EGL_GREEN_SIZE, 8, |
1783 EGL_GREEN_SIZE, 8, | 1734 EGL_BLUE_SIZE, 8, |
1784 EGL_BLUE_SIZE, 8, | 1735 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, |
1785 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, | 1736 EGL_ALPHA_SIZE, 0, |
1786 EGL_ALPHA_SIZE, 0, | 1737 EGL_NONE}; |
1787 EGL_NONE | |
1788 }; | |
1789 | 1738 |
1790 EGLint num_configs; | 1739 EGLint num_configs; |
1791 | 1740 |
1792 if (!eglChooseConfig( | 1741 if (!eglChooseConfig(egl_display, config_attribs, &egl_config_, 1, |
1793 egl_display, | 1742 &num_configs)) |
1794 config_attribs, | |
1795 &egl_config_, | |
1796 1, | |
1797 &num_configs)) | |
1798 return false; | 1743 return false; |
1799 | 1744 |
1800 return SetDecoderMediaTypes(); | 1745 return SetDecoderMediaTypes(); |
1801 } | 1746 } |
1802 | 1747 |
1803 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() { | 1748 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() { |
1804 base::win::ScopedComPtr<IMFAttributes> attributes; | 1749 base::win::ScopedComPtr<IMFAttributes> attributes; |
1805 HRESULT hr = decoder_->GetAttributes(attributes.Receive()); | 1750 HRESULT hr = decoder_->GetAttributes(attributes.Receive()); |
1806 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false); | 1751 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false); |
1807 | 1752 |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1910 } | 1855 } |
1911 | 1856 |
1912 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize; | 1857 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize; |
1913 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead; | 1858 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead; |
1914 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment; | 1859 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment; |
1915 | 1860 |
1916 DVLOG(1) << "Output stream info: "; | 1861 DVLOG(1) << "Output stream info: "; |
1917 // The flags here should be the same and mean the same thing, except when | 1862 // The flags here should be the same and mean the same thing, except when |
1918 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will | 1863 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will |
1919 // allocate its own sample. | 1864 // allocate its own sample. |
1920 DVLOG(1) << "Flags: " | 1865 DVLOG(1) << "Flags: " << std::hex << std::showbase |
1921 << std::hex << std::showbase << output_stream_info_.dwFlags; | 1866 << output_stream_info_.dwFlags; |
1922 if (codec_ == media::kCodecH264) { | 1867 if (codec_ == media::kCodecH264) { |
1923 CHECK_EQ(output_stream_info_.dwFlags, 0x107u); | 1868 CHECK_EQ(output_stream_info_.dwFlags, 0x107u); |
1924 } | 1869 } |
1925 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize; | 1870 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize; |
1926 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment; | 1871 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment; |
1927 return true; | 1872 return true; |
1928 } | 1873 } |
1929 | 1874 |
1930 void DXVAVideoDecodeAccelerator::DoDecode() { | 1875 void DXVAVideoDecodeAccelerator::DoDecode() { |
1931 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1876 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1932 // This function is also called from FlushInternal in a loop which could | 1877 // This function is also called from FlushInternal in a loop which could |
1933 // result in the state transitioning to kStopped due to no decoded output. | 1878 // result in the state transitioning to kStopped due to no decoded output. |
1934 State state = GetState(); | 1879 State state = GetState(); |
1935 RETURN_AND_NOTIFY_ON_FAILURE( | 1880 RETURN_AND_NOTIFY_ON_FAILURE( |
1936 (state == kNormal || state == kFlushing || state == kStopped), | 1881 (state == kNormal || state == kFlushing || state == kStopped), |
1937 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,); | 1882 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE, ); |
1938 | 1883 |
1939 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; | 1884 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; |
1940 DWORD status = 0; | 1885 DWORD status = 0; |
1941 | 1886 |
1942 HRESULT hr = decoder_->ProcessOutput(0, // No flags | 1887 HRESULT hr = decoder_->ProcessOutput(0, // No flags |
1943 1, // # of out streams to pull from | 1888 1, // # of out streams to pull from |
1944 &output_data_buffer, | 1889 &output_data_buffer, &status); |
1945 &status); | |
1946 IMFCollection* events = output_data_buffer.pEvents; | 1890 IMFCollection* events = output_data_buffer.pEvents; |
1947 if (events != NULL) { | 1891 if (events != NULL) { |
1948 DVLOG(1) << "Got events from ProcessOuput, but discarding"; | 1892 DVLOG(1) << "Got events from ProcessOuput, but discarding"; |
1949 events->Release(); | 1893 events->Release(); |
1950 } | 1894 } |
1951 if (FAILED(hr)) { | 1895 if (FAILED(hr)) { |
1952 // A stream change needs further ProcessInput calls to get back decoder | 1896 // A stream change needs further ProcessInput calls to get back decoder |
1953 // output which is why we need to set the state to stopped. | 1897 // output which is why we need to set the state to stopped. |
1954 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { | 1898 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { |
1955 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) { | 1899 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) { |
(...skipping 17 matching lines...) Expand all Loading... |
1973 } | 1917 } |
1974 } | 1918 } |
1975 TRACE_EVENT_ASYNC_END0("gpu", "DXVAVideoDecodeAccelerator.Decoding", this); | 1919 TRACE_EVENT_ASYNC_END0("gpu", "DXVAVideoDecodeAccelerator.Decoding", this); |
1976 | 1920 |
1977 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode", | 1921 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode", |
1978 inputs_before_decode_); | 1922 inputs_before_decode_); |
1979 | 1923 |
1980 inputs_before_decode_ = 0; | 1924 inputs_before_decode_ = 0; |
1981 | 1925 |
1982 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), | 1926 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), |
1983 "Failed to process output sample.", PLATFORM_FAILURE,); | 1927 "Failed to process output sample.", |
| 1928 PLATFORM_FAILURE, ); |
1984 } | 1929 } |
1985 | 1930 |
1986 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { | 1931 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { |
1987 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); | 1932 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); |
1988 | 1933 |
1989 LONGLONG input_buffer_id = 0; | 1934 LONGLONG input_buffer_id = 0; |
1990 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), | 1935 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), |
1991 "Failed to get input buffer id associated with sample", | 1936 "Failed to get input buffer id associated with sample", |
1992 false); | 1937 false); |
1993 | 1938 |
(...skipping 15 matching lines...) Expand all Loading... |
2009 | 1954 |
2010 int width = 0; | 1955 int width = 0; |
2011 int height = 0; | 1956 int height = 0; |
2012 if (!GetVideoFrameDimensions(sample, &width, &height)) { | 1957 if (!GetVideoFrameDimensions(sample, &width, &height)) { |
2013 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", | 1958 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", |
2014 false); | 1959 false); |
2015 } | 1960 } |
2016 | 1961 |
2017 // Go ahead and request picture buffers. | 1962 // Go ahead and request picture buffers. |
2018 main_thread_task_runner_->PostTask( | 1963 main_thread_task_runner_->PostTask( |
2019 FROM_HERE, | 1964 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
2020 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 1965 weak_this_factory_.GetWeakPtr(), width, height)); |
2021 weak_this_factory_.GetWeakPtr(), | |
2022 width, | |
2023 height)); | |
2024 | 1966 |
2025 pictures_requested_ = true; | 1967 pictures_requested_ = true; |
2026 return true; | 1968 return true; |
2027 } | 1969 } |
2028 | 1970 |
2029 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { | 1971 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { |
2030 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1972 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2031 | 1973 |
2032 if (!output_picture_buffers_.size()) | 1974 if (!output_picture_buffers_.size()) |
2033 return; | 1975 return; |
2034 | 1976 |
2035 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), | 1977 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), |
2036 "Failed to make context current", | 1978 "Failed to make context current", |
2037 PLATFORM_FAILURE, ); | 1979 PLATFORM_FAILURE, ); |
2038 | 1980 |
2039 OutputBuffers::iterator index; | 1981 OutputBuffers::iterator index; |
2040 | 1982 |
2041 for (index = output_picture_buffers_.begin(); | 1983 for (index = output_picture_buffers_.begin(); |
2042 index != output_picture_buffers_.end() && | 1984 index != output_picture_buffers_.end() && OutputSamplesPresent(); |
2043 OutputSamplesPresent(); | |
2044 ++index) { | 1985 ++index) { |
2045 if (index->second->available()) { | 1986 if (index->second->available()) { |
2046 PendingSampleInfo* pending_sample = NULL; | 1987 PendingSampleInfo* pending_sample = NULL; |
2047 { | 1988 { |
2048 base::AutoLock lock(decoder_lock_); | 1989 base::AutoLock lock(decoder_lock_); |
2049 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 1990 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
2050 if (sample_info.picture_buffer_id != -1) | 1991 if (sample_info.picture_buffer_id != -1) |
2051 continue; | 1992 continue; |
2052 pending_sample = &sample_info; | 1993 pending_sample = &sample_info; |
2053 } | 1994 } |
2054 | 1995 |
2055 int width = 0; | 1996 int width = 0; |
2056 int height = 0; | 1997 int height = 0; |
2057 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), | 1998 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), &width, |
2058 &width, &height)) { | 1999 &height)) { |
2059 RETURN_AND_NOTIFY_ON_FAILURE(false, | 2000 RETURN_AND_NOTIFY_ON_FAILURE( |
2060 "Failed to get D3D surface from output sample", PLATFORM_FAILURE,); | 2001 false, "Failed to get D3D surface from output sample", |
| 2002 PLATFORM_FAILURE, ); |
2061 } | 2003 } |
2062 | 2004 |
2063 if (width != index->second->size().width() || | 2005 if (width != index->second->size().width() || |
2064 height != index->second->size().height()) { | 2006 height != index->second->size().height()) { |
2065 HandleResolutionChanged(width, height); | 2007 HandleResolutionChanged(width, height); |
2066 return; | 2008 return; |
2067 } | 2009 } |
2068 | 2010 |
2069 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2011 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2070 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( | 2012 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( |
2071 0, output_buffer.Receive()); | 2013 0, output_buffer.Receive()); |
2072 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2014 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2073 "Failed to get buffer from output sample", PLATFORM_FAILURE,); | 2015 hr, "Failed to get buffer from output sample", PLATFORM_FAILURE, ); |
2074 | 2016 |
2075 base::win::ScopedComPtr<IDirect3DSurface9> surface; | 2017 base::win::ScopedComPtr<IDirect3DSurface9> surface; |
2076 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | 2018 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
2077 | 2019 |
2078 if (use_dx11_) { | 2020 if (use_dx11_) { |
2079 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 2021 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
2080 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | 2022 hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
2081 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2023 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2082 "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,); | 2024 hr, "Failed to get DXGIBuffer from output sample", |
| 2025 PLATFORM_FAILURE, ); |
2083 hr = dxgi_buffer->GetResource( | 2026 hr = dxgi_buffer->GetResource( |
2084 __uuidof(ID3D11Texture2D), | 2027 __uuidof(ID3D11Texture2D), |
2085 reinterpret_cast<void**>(d3d11_texture.Receive())); | 2028 reinterpret_cast<void**>(d3d11_texture.Receive())); |
2086 } else { | 2029 } else { |
2087 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | 2030 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, |
2088 IID_PPV_ARGS(surface.Receive())); | 2031 IID_PPV_ARGS(surface.Receive())); |
2089 } | 2032 } |
2090 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2033 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2091 "Failed to get surface from output sample", PLATFORM_FAILURE,); | 2034 hr, "Failed to get surface from output sample", PLATFORM_FAILURE, ); |
2092 | 2035 |
2093 pending_sample->picture_buffer_id = index->second->id(); | 2036 pending_sample->picture_buffer_id = index->second->id(); |
2094 | 2037 |
2095 RETURN_AND_NOTIFY_ON_FAILURE( | 2038 RETURN_AND_NOTIFY_ON_FAILURE( |
2096 index->second->CopyOutputSampleDataToPictureBuffer( | 2039 index->second->CopyOutputSampleDataToPictureBuffer( |
2097 this, | 2040 this, surface.get(), d3d11_texture.get(), |
2098 surface.get(), | |
2099 d3d11_texture.get(), | |
2100 pending_sample->input_buffer_id), | 2041 pending_sample->input_buffer_id), |
2101 "Failed to copy output sample", PLATFORM_FAILURE,); | 2042 "Failed to copy output sample", PLATFORM_FAILURE, ); |
2102 | 2043 |
2103 index->second->set_available(false); | 2044 index->second->set_available(false); |
2104 } | 2045 } |
2105 } | 2046 } |
2106 } | 2047 } |
2107 | 2048 |
2108 void DXVAVideoDecodeAccelerator::StopOnError( | 2049 void DXVAVideoDecodeAccelerator::StopOnError( |
2109 media::VideoDecodeAccelerator::Error error) { | 2050 media::VideoDecodeAccelerator::Error error) { |
2110 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | 2051 if (!main_thread_task_runner_->BelongsToCurrentThread()) { |
2111 main_thread_task_runner_->PostTask( | 2052 main_thread_task_runner_->PostTask( |
2112 FROM_HERE, | 2053 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::StopOnError, |
2113 base::Bind(&DXVAVideoDecodeAccelerator::StopOnError, | 2054 weak_this_factory_.GetWeakPtr(), error)); |
2114 weak_this_factory_.GetWeakPtr(), | |
2115 error)); | |
2116 return; | 2055 return; |
2117 } | 2056 } |
2118 | 2057 |
2119 if (client_) | 2058 if (client_) |
2120 client_->NotifyError(error); | 2059 client_->NotifyError(error); |
2121 client_ = NULL; | 2060 client_ = NULL; |
2122 | 2061 |
2123 if (GetState() != kUninitialized) { | 2062 if (GetState() != kUninitialized) { |
2124 Invalidate(); | 2063 Invalidate(); |
2125 } | 2064 } |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2195 | 2134 |
2196 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) { | 2135 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) { |
2197 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2136 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2198 // This task could execute after the decoder has been torn down. | 2137 // This task could execute after the decoder has been torn down. |
2199 if (GetState() != kUninitialized && client_) { | 2138 if (GetState() != kUninitialized && client_) { |
2200 client_->ProvidePictureBuffers(kNumPictureBuffers, 1, | 2139 client_->ProvidePictureBuffers(kNumPictureBuffers, 1, |
2201 gfx::Size(width, height), GL_TEXTURE_2D); | 2140 gfx::Size(width, height), GL_TEXTURE_2D); |
2202 } | 2141 } |
2203 } | 2142 } |
2204 | 2143 |
2205 void DXVAVideoDecodeAccelerator::NotifyPictureReady( | 2144 void DXVAVideoDecodeAccelerator::NotifyPictureReady(int picture_buffer_id, |
2206 int picture_buffer_id, | 2145 int input_buffer_id) { |
2207 int input_buffer_id) { | |
2208 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2146 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2209 // This task could execute after the decoder has been torn down. | 2147 // This task could execute after the decoder has been torn down. |
2210 if (GetState() != kUninitialized && client_) { | 2148 if (GetState() != kUninitialized && client_) { |
2211 // TODO(henryhsu): Use correct visible size instead of (0, 0). We can't use | 2149 // TODO(henryhsu): Use correct visible size instead of (0, 0). We can't use |
2212 // coded size here so use (0, 0) intentionally to have the client choose. | 2150 // coded size here so use (0, 0) intentionally to have the client choose. |
2213 media::Picture picture(picture_buffer_id, input_buffer_id, | 2151 media::Picture picture(picture_buffer_id, input_buffer_id, gfx::Rect(0, 0), |
2214 gfx::Rect(0, 0), false); | 2152 false); |
2215 client_->PictureReady(picture); | 2153 client_->PictureReady(picture); |
2216 } | 2154 } |
2217 } | 2155 } |
2218 | 2156 |
2219 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() { | 2157 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() { |
2220 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2158 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2221 if (!client_) | 2159 if (!client_) |
2222 return; | 2160 return; |
2223 | 2161 |
2224 for (PendingInputs::iterator it = pending_input_buffers_.begin(); | 2162 for (PendingInputs::iterator it = pending_input_buffers_.begin(); |
2225 it != pending_input_buffers_.end(); ++it) { | 2163 it != pending_input_buffers_.end(); ++it) { |
2226 LONGLONG input_buffer_id = 0; | 2164 LONGLONG input_buffer_id = 0; |
2227 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id), | 2165 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id), |
2228 "Failed to get buffer id associated with sample",); | 2166 "Failed to get buffer id associated with sample", ); |
2229 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | 2167 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
2230 } | 2168 } |
2231 pending_input_buffers_.clear(); | 2169 pending_input_buffers_.clear(); |
2232 } | 2170 } |
2233 | 2171 |
2234 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() { | 2172 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() { |
2235 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2173 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2236 State state = GetState(); | 2174 State state = GetState(); |
2237 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | 2175 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), |
2238 "Invalid state: " << state, ILLEGAL_STATE,); | 2176 "Invalid state: " << state, ILLEGAL_STATE, ); |
2239 | 2177 |
2240 if (pending_input_buffers_.empty() || OutputSamplesPresent()) | 2178 if (pending_input_buffers_.empty() || OutputSamplesPresent()) |
2241 return; | 2179 return; |
2242 | 2180 |
2243 PendingInputs pending_input_buffers_copy; | 2181 PendingInputs pending_input_buffers_copy; |
2244 std::swap(pending_input_buffers_, pending_input_buffers_copy); | 2182 std::swap(pending_input_buffers_, pending_input_buffers_copy); |
2245 | 2183 |
2246 for (PendingInputs::iterator it = pending_input_buffers_copy.begin(); | 2184 for (PendingInputs::iterator it = pending_input_buffers_copy.begin(); |
2247 it != pending_input_buffers_copy.end(); ++it) { | 2185 it != pending_input_buffers_copy.end(); ++it) { |
2248 DecodeInternal(*it); | 2186 DecodeInternal(*it); |
2249 } | 2187 } |
2250 } | 2188 } |
2251 | 2189 |
2252 void DXVAVideoDecodeAccelerator::FlushInternal() { | 2190 void DXVAVideoDecodeAccelerator::FlushInternal() { |
2253 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2191 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2254 | 2192 |
2255 // We allow only one output frame to be present at any given time. If we have | 2193 // We allow only one output frame to be present at any given time. If we have |
2256 // an output frame, then we cannot complete the flush at this time. | 2194 // an output frame, then we cannot complete the flush at this time. |
2257 if (OutputSamplesPresent()) | 2195 if (OutputSamplesPresent()) |
2258 return; | 2196 return; |
2259 | 2197 |
2260 // First drain the pending input because once the drain message is sent below, | 2198 // First drain the pending input because once the drain message is sent below, |
2261 // the decoder will ignore further input until it's drained. | 2199 // the decoder will ignore further input until it's drained. |
2262 if (!pending_input_buffers_.empty()) { | 2200 if (!pending_input_buffers_.empty()) { |
2263 decoder_thread_task_runner_->PostTask( | 2201 decoder_thread_task_runner_->PostTask( |
2264 FROM_HERE, | 2202 FROM_HERE, |
2265 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2203 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2266 base::Unretained(this))); | 2204 base::Unretained(this))); |
2267 decoder_thread_task_runner_->PostTask( | 2205 decoder_thread_task_runner_->PostTask( |
2268 FROM_HERE, | 2206 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
2269 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 2207 base::Unretained(this))); |
2270 base::Unretained(this))); | |
2271 return; | 2208 return; |
2272 } | 2209 } |
2273 | 2210 |
2274 { | 2211 { |
2275 base::AutoLock lock(decoder_lock_); | 2212 base::AutoLock lock(decoder_lock_); |
2276 if (!sent_drain_message_) { | 2213 if (!sent_drain_message_) { |
2277 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0), | 2214 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0), |
2278 "Failed to send drain message", | 2215 "Failed to send drain message", |
2279 PLATFORM_FAILURE,); | 2216 PLATFORM_FAILURE, ); |
2280 sent_drain_message_ = true; | 2217 sent_drain_message_ = true; |
2281 } | 2218 } |
2282 } | 2219 } |
2283 | 2220 |
2284 // Attempt to retrieve an output frame from the decoder. If we have one, | 2221 // Attempt to retrieve an output frame from the decoder. If we have one, |
2285 // return and proceed when the output frame is processed. If we don't have a | 2222 // return and proceed when the output frame is processed. If we don't have a |
2286 // frame then we are done. | 2223 // frame then we are done. |
2287 DoDecode(); | 2224 DoDecode(); |
2288 if (OutputSamplesPresent()) | 2225 if (OutputSamplesPresent()) |
2289 return; | 2226 return; |
2290 | 2227 |
2291 SetState(kFlushing); | 2228 SetState(kFlushing); |
2292 | 2229 |
2293 main_thread_task_runner_->PostTask( | 2230 main_thread_task_runner_->PostTask( |
2294 FROM_HERE, | 2231 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone, |
2295 base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone, | 2232 weak_this_factory_.GetWeakPtr())); |
2296 weak_this_factory_.GetWeakPtr())); | |
2297 | 2233 |
2298 SetState(kNormal); | 2234 SetState(kNormal); |
2299 } | 2235 } |
2300 | 2236 |
2301 void DXVAVideoDecodeAccelerator::DecodeInternal( | 2237 void DXVAVideoDecodeAccelerator::DecodeInternal( |
2302 const base::win::ScopedComPtr<IMFSample>& sample) { | 2238 const base::win::ScopedComPtr<IMFSample>& sample) { |
2303 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2239 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2304 | 2240 |
2305 if (GetState() == kUninitialized) | 2241 if (GetState() == kUninitialized) |
2306 return; | 2242 return; |
2307 | 2243 |
2308 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) { | 2244 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) { |
2309 pending_input_buffers_.push_back(sample); | 2245 pending_input_buffers_.push_back(sample); |
2310 return; | 2246 return; |
2311 } | 2247 } |
2312 | 2248 |
2313 // Check if the resolution, bit rate, etc changed in the stream. If yes we | 2249 // Check if the resolution, bit rate, etc changed in the stream. If yes we |
2314 // reinitialize the decoder to ensure that the stream decodes correctly. | 2250 // reinitialize the decoder to ensure that the stream decodes correctly. |
2315 bool config_changed = false; | 2251 bool config_changed = false; |
2316 | 2252 |
2317 HRESULT hr = CheckConfigChanged(sample.get(), &config_changed); | 2253 HRESULT hr = CheckConfigChanged(sample.get(), &config_changed); |
2318 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to check video stream config", | 2254 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to check video stream config", |
2319 PLATFORM_FAILURE,); | 2255 PLATFORM_FAILURE, ); |
2320 | 2256 |
2321 if (config_changed) { | 2257 if (config_changed) { |
2322 pending_input_buffers_.push_back(sample); | 2258 pending_input_buffers_.push_back(sample); |
2323 main_thread_task_runner_->PostTask( | 2259 main_thread_task_runner_->PostTask( |
2324 FROM_HERE, | 2260 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::ConfigChanged, |
2325 base::Bind(&DXVAVideoDecodeAccelerator::ConfigChanged, | 2261 weak_this_factory_.GetWeakPtr(), config_)); |
2326 weak_this_factory_.GetWeakPtr(), | |
2327 config_)); | |
2328 return; | 2262 return; |
2329 } | 2263 } |
2330 | 2264 |
2331 if (!inputs_before_decode_) { | 2265 if (!inputs_before_decode_) { |
2332 TRACE_EVENT_ASYNC_BEGIN0("gpu", "DXVAVideoDecodeAccelerator.Decoding", | 2266 TRACE_EVENT_ASYNC_BEGIN0("gpu", "DXVAVideoDecodeAccelerator.Decoding", |
2333 this); | 2267 this); |
2334 } | 2268 } |
2335 inputs_before_decode_++; | 2269 inputs_before_decode_++; |
2336 | 2270 |
2337 hr = decoder_->ProcessInput(0, sample.get(), 0); | 2271 hr = decoder_->ProcessInput(0, sample.get(), 0); |
2338 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it | 2272 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it |
2339 // has enough data to produce one or more output samples. In this case the | 2273 // has enough data to produce one or more output samples. In this case the |
2340 // recommended options are to | 2274 // recommended options are to |
2341 // 1. Generate new output by calling IMFTransform::ProcessOutput until it | 2275 // 1. Generate new output by calling IMFTransform::ProcessOutput until it |
2342 // returns MF_E_TRANSFORM_NEED_MORE_INPUT. | 2276 // returns MF_E_TRANSFORM_NEED_MORE_INPUT. |
2343 // 2. Flush the input data | 2277 // 2. Flush the input data |
2344 // We implement the first option, i.e to retrieve the output sample and then | 2278 // We implement the first option, i.e to retrieve the output sample and then |
2345 // process the input again. Failure in either of these steps is treated as a | 2279 // process the input again. Failure in either of these steps is treated as a |
2346 // decoder failure. | 2280 // decoder failure. |
2347 if (hr == MF_E_NOTACCEPTING) { | 2281 if (hr == MF_E_NOTACCEPTING) { |
2348 DoDecode(); | 2282 DoDecode(); |
2349 // If the DoDecode call resulted in an output frame then we should not | 2283 // If the DoDecode call resulted in an output frame then we should not |
2350 // process any more input until that frame is copied to the target surface. | 2284 // process any more input until that frame is copied to the target surface. |
2351 if (!OutputSamplesPresent()) { | 2285 if (!OutputSamplesPresent()) { |
2352 State state = GetState(); | 2286 State state = GetState(); |
2353 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal || | 2287 RETURN_AND_NOTIFY_ON_FAILURE( |
2354 state == kFlushing), | 2288 (state == kStopped || state == kNormal || state == kFlushing), |
2355 "Failed to process output. Unexpected decoder state: " << state, | 2289 "Failed to process output. Unexpected decoder state: " << state, |
2356 PLATFORM_FAILURE,); | 2290 PLATFORM_FAILURE, ); |
2357 hr = decoder_->ProcessInput(0, sample.get(), 0); | 2291 hr = decoder_->ProcessInput(0, sample.get(), 0); |
2358 } | 2292 } |
2359 // If we continue to get the MF_E_NOTACCEPTING error we do the following:- | 2293 // If we continue to get the MF_E_NOTACCEPTING error we do the following:- |
2360 // 1. Add the input sample to the pending queue. | 2294 // 1. Add the input sample to the pending queue. |
2361 // 2. If we don't have any output samples we post the | 2295 // 2. If we don't have any output samples we post the |
2362 // DecodePendingInputBuffers task to process the pending input samples. | 2296 // DecodePendingInputBuffers task to process the pending input samples. |
2363 // If we have an output sample then the above task is posted when the | 2297 // If we have an output sample then the above task is posted when the |
2364 // output samples are sent to the client. | 2298 // output samples are sent to the client. |
2365 // This is because we only support 1 pending output sample at any | 2299 // This is because we only support 1 pending output sample at any |
2366 // given time due to the limitation with the Microsoft media foundation | 2300 // given time due to the limitation with the Microsoft media foundation |
2367 // decoder where it recycles the output Decoder surfaces. | 2301 // decoder where it recycles the output Decoder surfaces. |
2368 if (hr == MF_E_NOTACCEPTING) { | 2302 if (hr == MF_E_NOTACCEPTING) { |
2369 pending_input_buffers_.push_back(sample); | 2303 pending_input_buffers_.push_back(sample); |
2370 decoder_thread_task_runner_->PostTask( | 2304 decoder_thread_task_runner_->PostTask( |
2371 FROM_HERE, | 2305 FROM_HERE, |
2372 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2306 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2373 base::Unretained(this))); | 2307 base::Unretained(this))); |
2374 return; | 2308 return; |
2375 } | 2309 } |
2376 } | 2310 } |
2377 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample", | 2311 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample", |
2378 PLATFORM_FAILURE,); | 2312 PLATFORM_FAILURE, ); |
2379 | 2313 |
2380 DoDecode(); | 2314 DoDecode(); |
2381 | 2315 |
2382 State state = GetState(); | 2316 State state = GetState(); |
2383 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal || | 2317 RETURN_AND_NOTIFY_ON_FAILURE( |
2384 state == kFlushing), | 2318 (state == kStopped || state == kNormal || state == kFlushing), |
2385 "Failed to process output. Unexpected decoder state: " << state, | 2319 "Failed to process output. Unexpected decoder state: " << state, |
2386 ILLEGAL_STATE,); | 2320 ILLEGAL_STATE, ); |
2387 | 2321 |
2388 LONGLONG input_buffer_id = 0; | 2322 LONGLONG input_buffer_id = 0; |
2389 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), | 2323 RETURN_ON_HR_FAILURE( |
2390 "Failed to get input buffer id associated with sample",); | 2324 sample->GetSampleTime(&input_buffer_id), |
| 2325 "Failed to get input buffer id associated with sample", ); |
2391 // The Microsoft Media foundation decoder internally buffers up to 30 frames | 2326 // The Microsoft Media foundation decoder internally buffers up to 30 frames |
2392 // before returning a decoded frame. We need to inform the client that this | 2327 // before returning a decoded frame. We need to inform the client that this |
2393 // input buffer is processed as it may stop sending us further input. | 2328 // input buffer is processed as it may stop sending us further input. |
2394 // Note: This may break clients which expect every input buffer to be | 2329 // Note: This may break clients which expect every input buffer to be |
2395 // associated with a decoded output buffer. | 2330 // associated with a decoded output buffer. |
2396 // TODO(ananta) | 2331 // TODO(ananta) |
2397 // Do some more investigation into whether it is possible to get the MFT | 2332 // Do some more investigation into whether it is possible to get the MFT |
2398 // decoder to emit an output packet for every input packet. | 2333 // decoder to emit an output packet for every input packet. |
2399 // http://code.google.com/p/chromium/issues/detail?id=108121 | 2334 // http://code.google.com/p/chromium/issues/detail?id=108121 |
2400 // http://code.google.com/p/chromium/issues/detail?id=150925 | 2335 // http://code.google.com/p/chromium/issues/detail?id=150925 |
2401 main_thread_task_runner_->PostTask( | 2336 main_thread_task_runner_->PostTask( |
2402 FROM_HERE, | 2337 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, |
2403 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, | 2338 weak_this_factory_.GetWeakPtr(), input_buffer_id)); |
2404 weak_this_factory_.GetWeakPtr(), | |
2405 input_buffer_id)); | |
2406 } | 2339 } |
2407 | 2340 |
2408 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, | 2341 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, |
2409 int height) { | 2342 int height) { |
2410 dx11_video_format_converter_media_type_needs_init_ = true; | 2343 dx11_video_format_converter_media_type_needs_init_ = true; |
2411 | 2344 |
2412 main_thread_task_runner_->PostTask( | 2345 main_thread_task_runner_->PostTask( |
2413 FROM_HERE, | 2346 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, |
2414 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, | 2347 weak_this_factory_.GetWeakPtr(), false)); |
2415 weak_this_factory_.GetWeakPtr(), false)); | |
2416 | 2348 |
2417 main_thread_task_runner_->PostTask( | 2349 main_thread_task_runner_->PostTask( |
2418 FROM_HERE, | 2350 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
2419 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 2351 weak_this_factory_.GetWeakPtr(), width, height)); |
2420 weak_this_factory_.GetWeakPtr(), | |
2421 width, | |
2422 height)); | |
2423 } | 2352 } |
2424 | 2353 |
2425 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(bool force) { | 2354 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(bool force) { |
2426 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), | 2355 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), |
2427 "Failed to make context current", | 2356 "Failed to make context current", |
2428 PLATFORM_FAILURE, ); | 2357 PLATFORM_FAILURE, ); |
2429 | 2358 |
2430 OutputBuffers::iterator index; | 2359 OutputBuffers::iterator index; |
2431 | 2360 |
2432 for (index = output_picture_buffers_.begin(); | 2361 for (index = output_picture_buffers_.begin(); |
2433 index != output_picture_buffers_.end(); | 2362 index != output_picture_buffers_.end(); ++index) { |
2434 ++index) { | |
2435 if (force || index->second->available()) { | 2363 if (force || index->second->available()) { |
2436 DVLOG(1) << "Dismissing picture id: " << index->second->id(); | 2364 DVLOG(1) << "Dismissing picture id: " << index->second->id(); |
2437 client_->DismissPictureBuffer(index->second->id()); | 2365 client_->DismissPictureBuffer(index->second->id()); |
2438 } else { | 2366 } else { |
2439 // Move to |stale_output_picture_buffers_| for deferred deletion. | 2367 // Move to |stale_output_picture_buffers_| for deferred deletion. |
2440 stale_output_picture_buffers_.insert( | 2368 stale_output_picture_buffers_.insert( |
2441 std::make_pair(index->first, index->second)); | 2369 std::make_pair(index->first, index->second)); |
2442 } | 2370 } |
2443 } | 2371 } |
2444 | 2372 |
2445 output_picture_buffers_.clear(); | 2373 output_picture_buffers_.clear(); |
2446 } | 2374 } |
2447 | 2375 |
2448 void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer( | 2376 void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer( |
2449 int32_t picture_buffer_id) { | 2377 int32_t picture_buffer_id) { |
2450 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), | 2378 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), |
2451 "Failed to make context current", | 2379 "Failed to make context current", |
2452 PLATFORM_FAILURE, ); | 2380 PLATFORM_FAILURE, ); |
2453 | 2381 |
2454 OutputBuffers::iterator it = stale_output_picture_buffers_.find( | 2382 OutputBuffers::iterator it = |
2455 picture_buffer_id); | 2383 stale_output_picture_buffers_.find(picture_buffer_id); |
2456 DCHECK(it != stale_output_picture_buffers_.end()); | 2384 DCHECK(it != stale_output_picture_buffers_.end()); |
2457 DVLOG(1) << "Dismissing picture id: " << it->second->id(); | 2385 DVLOG(1) << "Dismissing picture id: " << it->second->id(); |
2458 client_->DismissPictureBuffer(it->second->id()); | 2386 client_->DismissPictureBuffer(it->second->id()); |
2459 stale_output_picture_buffers_.erase(it); | 2387 stale_output_picture_buffers_.erase(it); |
2460 } | 2388 } |
2461 | 2389 |
2462 DXVAVideoDecodeAccelerator::State | 2390 DXVAVideoDecodeAccelerator::State DXVAVideoDecodeAccelerator::GetState() { |
2463 DXVAVideoDecodeAccelerator::GetState() { | |
2464 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); | 2391 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); |
2465 State state = static_cast<State>( | 2392 State state = static_cast<State>( |
2466 InterlockedAdd(reinterpret_cast<volatile long*>(&state_), 0)); | 2393 InterlockedAdd(reinterpret_cast<volatile long*>(&state_), 0)); |
2467 return state; | 2394 return state; |
2468 } | 2395 } |
2469 | 2396 |
2470 void DXVAVideoDecodeAccelerator::SetState(State new_state) { | 2397 void DXVAVideoDecodeAccelerator::SetState(State new_state) { |
2471 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | 2398 if (!main_thread_task_runner_->BelongsToCurrentThread()) { |
2472 main_thread_task_runner_->PostTask( | 2399 main_thread_task_runner_->PostTask( |
2473 FROM_HERE, | 2400 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::SetState, |
2474 base::Bind(&DXVAVideoDecodeAccelerator::SetState, | 2401 weak_this_factory_.GetWeakPtr(), new_state)); |
2475 weak_this_factory_.GetWeakPtr(), | |
2476 new_state)); | |
2477 return; | 2402 return; |
2478 } | 2403 } |
2479 | 2404 |
2480 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); | 2405 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); |
2481 ::InterlockedExchange(reinterpret_cast<volatile long*>(&state_), | 2406 ::InterlockedExchange(reinterpret_cast<volatile long*>(&state_), new_state); |
2482 new_state); | |
2483 DCHECK_EQ(state_, new_state); | 2407 DCHECK_EQ(state_, new_state); |
2484 } | 2408 } |
2485 | 2409 |
2486 void DXVAVideoDecodeAccelerator::StartDecoderThread() { | 2410 void DXVAVideoDecodeAccelerator::StartDecoderThread() { |
2487 decoder_thread_.init_com_with_mta(false); | 2411 decoder_thread_.init_com_with_mta(false); |
2488 decoder_thread_.Start(); | 2412 decoder_thread_.Start(); |
2489 decoder_thread_task_runner_ = decoder_thread_.task_runner(); | 2413 decoder_thread_task_runner_ = decoder_thread_.task_runner(); |
2490 } | 2414 } |
2491 | 2415 |
2492 bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() { | 2416 bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() { |
2493 base::AutoLock lock(decoder_lock_); | 2417 base::AutoLock lock(decoder_lock_); |
2494 return !pending_output_samples_.empty(); | 2418 return !pending_output_samples_.empty(); |
2495 } | 2419 } |
2496 | 2420 |
2497 void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface, | 2421 void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface, |
2498 IDirect3DSurface9* dest_surface, | 2422 IDirect3DSurface9* dest_surface, |
2499 int picture_buffer_id, | 2423 int picture_buffer_id, |
2500 int input_buffer_id) { | 2424 int input_buffer_id) { |
2501 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { | 2425 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { |
2502 decoder_thread_task_runner_->PostTask( | 2426 decoder_thread_task_runner_->PostTask( |
2503 FROM_HERE, | 2427 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, |
2504 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, | 2428 base::Unretained(this), src_surface, dest_surface, |
2505 base::Unretained(this), | 2429 picture_buffer_id, input_buffer_id)); |
2506 src_surface, | |
2507 dest_surface, | |
2508 picture_buffer_id, | |
2509 input_buffer_id)); | |
2510 return; | 2430 return; |
2511 } | 2431 } |
2512 | 2432 |
2513 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, | 2433 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, |
2514 NULL, D3DTEXF_NONE); | 2434 NULL, D3DTEXF_NONE); |
2515 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",); | 2435 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed", ); |
2516 | 2436 |
2517 // Ideally, this should be done immediately before the draw call that uses | 2437 // Ideally, this should be done immediately before the draw call that uses |
2518 // the texture. Flush it once here though. | 2438 // the texture. Flush it once here though. |
2519 hr = query_->Issue(D3DISSUE_END); | 2439 hr = query_->Issue(D3DISSUE_END); |
2520 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",); | 2440 RETURN_ON_HR_FAILURE(hr, "Failed to issue END", ); |
2521 | 2441 |
2522 // If we are sharing the ANGLE device we don't need to wait for the Flush to | 2442 // If we are sharing the ANGLE device we don't need to wait for the Flush to |
2523 // complete. | 2443 // complete. |
2524 if (using_angle_device_) { | 2444 if (using_angle_device_) { |
2525 main_thread_task_runner_->PostTask( | 2445 main_thread_task_runner_->PostTask( |
2526 FROM_HERE, | 2446 FROM_HERE, |
2527 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 2447 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
2528 weak_this_factory_.GetWeakPtr(), | 2448 weak_this_factory_.GetWeakPtr(), src_surface, dest_surface, |
2529 src_surface, | 2449 picture_buffer_id, input_buffer_id)); |
2530 dest_surface, | |
2531 picture_buffer_id, | |
2532 input_buffer_id)); | |
2533 return; | 2450 return; |
2534 } | 2451 } |
2535 | 2452 |
2536 // Flush the decoder device to ensure that the decoded frame is copied to the | 2453 // Flush the decoder device to ensure that the decoded frame is copied to the |
2537 // target surface. | 2454 // target surface. |
2538 decoder_thread_task_runner_->PostDelayedTask( | 2455 decoder_thread_task_runner_->PostDelayedTask( |
2539 FROM_HERE, | 2456 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
2540 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 2457 base::Unretained(this), 0, src_surface, |
2541 base::Unretained(this), 0, src_surface, dest_surface, | 2458 dest_surface, picture_buffer_id, input_buffer_id), |
2542 picture_buffer_id, input_buffer_id), | |
2543 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2459 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2544 } | 2460 } |
2545 | 2461 |
2546 void DXVAVideoDecodeAccelerator::CopySurfaceComplete( | 2462 void DXVAVideoDecodeAccelerator::CopySurfaceComplete( |
2547 IDirect3DSurface9* src_surface, | 2463 IDirect3DSurface9* src_surface, |
2548 IDirect3DSurface9* dest_surface, | 2464 IDirect3DSurface9* dest_surface, |
2549 int picture_buffer_id, | 2465 int picture_buffer_id, |
2550 int input_buffer_id) { | 2466 int input_buffer_id) { |
2551 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2467 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2552 | 2468 |
(...skipping 25 matching lines...) Expand all Loading... |
2578 NotifyPictureReady(picture_buffer->id(), input_buffer_id); | 2494 NotifyPictureReady(picture_buffer->id(), input_buffer_id); |
2579 | 2495 |
2580 { | 2496 { |
2581 base::AutoLock lock(decoder_lock_); | 2497 base::AutoLock lock(decoder_lock_); |
2582 if (!pending_output_samples_.empty()) | 2498 if (!pending_output_samples_.empty()) |
2583 pending_output_samples_.pop_front(); | 2499 pending_output_samples_.pop_front(); |
2584 } | 2500 } |
2585 | 2501 |
2586 if (pending_flush_) { | 2502 if (pending_flush_) { |
2587 decoder_thread_task_runner_->PostTask( | 2503 decoder_thread_task_runner_->PostTask( |
2588 FROM_HERE, | 2504 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
2589 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 2505 base::Unretained(this))); |
2590 base::Unretained(this))); | |
2591 return; | 2506 return; |
2592 } | 2507 } |
2593 decoder_thread_task_runner_->PostTask( | 2508 decoder_thread_task_runner_->PostTask( |
2594 FROM_HERE, | 2509 FROM_HERE, |
2595 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2510 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2596 base::Unretained(this))); | 2511 base::Unretained(this))); |
2597 } | 2512 } |
2598 | 2513 |
2599 void DXVAVideoDecodeAccelerator::CopyTexture( | 2514 void DXVAVideoDecodeAccelerator::CopyTexture( |
2600 ID3D11Texture2D* src_texture, | 2515 ID3D11Texture2D* src_texture, |
(...skipping 20 matching lines...) Expand all Loading... |
2621 // conversion as per msdn is done in the GPU. | 2536 // conversion as per msdn is done in the GPU. |
2622 | 2537 |
2623 D3D11_TEXTURE2D_DESC source_desc; | 2538 D3D11_TEXTURE2D_DESC source_desc; |
2624 src_texture->GetDesc(&source_desc); | 2539 src_texture->GetDesc(&source_desc); |
2625 | 2540 |
2626 // Set up the input and output types for the video processor MFT. | 2541 // Set up the input and output types for the video processor MFT. |
2627 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width, | 2542 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width, |
2628 source_desc.Height)) { | 2543 source_desc.Height)) { |
2629 RETURN_AND_NOTIFY_ON_FAILURE( | 2544 RETURN_AND_NOTIFY_ON_FAILURE( |
2630 false, "Failed to initialize media types for convesion.", | 2545 false, "Failed to initialize media types for convesion.", |
2631 PLATFORM_FAILURE,); | 2546 PLATFORM_FAILURE, ); |
2632 } | 2547 } |
2633 | 2548 |
2634 // The input to the video processor is the output sample. | 2549 // The input to the video processor is the output sample. |
2635 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; | 2550 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; |
2636 { | 2551 { |
2637 base::AutoLock lock(decoder_lock_); | 2552 base::AutoLock lock(decoder_lock_); |
2638 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 2553 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
2639 input_sample_for_conversion = sample_info.output_sample; | 2554 input_sample_for_conversion = sample_info.output_sample; |
2640 } | 2555 } |
2641 | 2556 |
(...skipping 21 matching lines...) Expand all Loading... |
2663 PLATFORM_FAILURE, ); | 2578 PLATFORM_FAILURE, ); |
2664 } | 2579 } |
2665 // The video processor MFT requires output samples to be allocated by the | 2580 // The video processor MFT requires output samples to be allocated by the |
2666 // caller. We create a sample with a buffer backed with the ID3D11Texture2D | 2581 // caller. We create a sample with a buffer backed with the ID3D11Texture2D |
2667 // interface exposed by ANGLE. This works nicely as this ensures that the | 2582 // interface exposed by ANGLE. This works nicely as this ensures that the |
2668 // video processor coverts the color space of the output frame and copies | 2583 // video processor coverts the color space of the output frame and copies |
2669 // the result into the ANGLE texture. | 2584 // the result into the ANGLE texture. |
2670 base::win::ScopedComPtr<IMFSample> output_sample; | 2585 base::win::ScopedComPtr<IMFSample> output_sample; |
2671 hr = MFCreateSample(output_sample.Receive()); | 2586 hr = MFCreateSample(output_sample.Receive()); |
2672 if (FAILED(hr)) { | 2587 if (FAILED(hr)) { |
2673 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2588 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
2674 "Failed to create output sample.", PLATFORM_FAILURE,); | 2589 PLATFORM_FAILURE, ); |
2675 } | 2590 } |
2676 | 2591 |
2677 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2592 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2678 hr = MFCreateDXGISurfaceBuffer( | 2593 hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), dest_texture, 0, |
2679 __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE, | 2594 FALSE, output_buffer.Receive()); |
2680 output_buffer.Receive()); | |
2681 if (FAILED(hr)) { | 2595 if (FAILED(hr)) { |
2682 base::debug::Alias(&hr); | 2596 base::debug::Alias(&hr); |
2683 // TODO(ananta) | 2597 // TODO(ananta) |
2684 // Remove this CHECK when the change to use DX11 for H/W decoding | 2598 // Remove this CHECK when the change to use DX11 for H/W decoding |
2685 // stablizes. | 2599 // stablizes. |
2686 CHECK(false); | 2600 CHECK(false); |
2687 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2601 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
2688 "Failed to create output sample.", PLATFORM_FAILURE,); | 2602 PLATFORM_FAILURE, ); |
2689 } | 2603 } |
2690 | 2604 |
2691 output_sample->AddBuffer(output_buffer.get()); | 2605 output_sample->AddBuffer(output_buffer.get()); |
2692 | 2606 |
2693 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0); | 2607 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0); |
2694 if (FAILED(hr)) { | 2608 if (FAILED(hr)) { |
2695 DCHECK(false); | 2609 DCHECK(false); |
2696 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2610 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2697 "Failed to convert output sample format.", PLATFORM_FAILURE,); | 2611 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
2698 } | 2612 } |
2699 | 2613 |
2700 DWORD status = 0; | 2614 DWORD status = 0; |
2701 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; | 2615 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; |
2702 format_converter_output.pSample = output_sample.get(); | 2616 format_converter_output.pSample = output_sample.get(); |
2703 hr = video_format_converter_mft_->ProcessOutput( | 2617 hr = video_format_converter_mft_->ProcessOutput( |
2704 0, // No flags | 2618 0, // No flags |
2705 1, // # of out streams to pull from | 2619 1, // # of out streams to pull from |
2706 &format_converter_output, | 2620 &format_converter_output, &status); |
2707 &status); | |
2708 | 2621 |
2709 if (FAILED(hr)) { | 2622 if (FAILED(hr)) { |
2710 base::debug::Alias(&hr); | 2623 base::debug::Alias(&hr); |
2711 // TODO(ananta) | 2624 // TODO(ananta) |
2712 // Remove this CHECK when the change to use DX11 for H/W decoding | 2625 // Remove this CHECK when the change to use DX11 for H/W decoding |
2713 // stablizes. | 2626 // stablizes. |
2714 CHECK(false); | 2627 CHECK(false); |
2715 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2628 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2716 "Failed to convert output sample format.", PLATFORM_FAILURE,); | 2629 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
2717 } | 2630 } |
2718 | 2631 |
2719 if (dest_keyed_mutex) { | 2632 if (dest_keyed_mutex) { |
2720 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); | 2633 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); |
2721 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", | 2634 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", |
2722 PLATFORM_FAILURE, ); | 2635 PLATFORM_FAILURE, ); |
2723 | 2636 |
2724 main_thread_task_runner_->PostTask( | 2637 main_thread_task_runner_->PostTask( |
2725 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 2638 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
2726 weak_this_factory_.GetWeakPtr(), nullptr, nullptr, | 2639 weak_this_factory_.GetWeakPtr(), nullptr, nullptr, |
2727 picture_buffer_id, input_buffer_id)); | 2640 picture_buffer_id, input_buffer_id)); |
2728 } else { | 2641 } else { |
2729 d3d11_device_context_->Flush(); | 2642 d3d11_device_context_->Flush(); |
2730 d3d11_device_context_->End(d3d11_query_.get()); | 2643 d3d11_device_context_->End(d3d11_query_.get()); |
2731 | 2644 |
2732 decoder_thread_task_runner_->PostDelayedTask( | 2645 decoder_thread_task_runner_->PostDelayedTask( |
2733 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 2646 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
2734 base::Unretained(this), 0, | 2647 base::Unretained(this), 0, |
2735 reinterpret_cast<IDirect3DSurface9*>(NULL), | 2648 reinterpret_cast<IDirect3DSurface9*>(NULL), |
2736 reinterpret_cast<IDirect3DSurface9*>(NULL), | 2649 reinterpret_cast<IDirect3DSurface9*>(NULL), |
2737 picture_buffer_id, input_buffer_id), | 2650 picture_buffer_id, input_buffer_id), |
2738 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2651 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2739 } | 2652 } |
2740 } | 2653 } |
2741 | 2654 |
2742 void DXVAVideoDecodeAccelerator::FlushDecoder( | 2655 void DXVAVideoDecodeAccelerator::FlushDecoder(int iterations, |
2743 int iterations, | 2656 IDirect3DSurface9* src_surface, |
2744 IDirect3DSurface9* src_surface, | 2657 IDirect3DSurface9* dest_surface, |
2745 IDirect3DSurface9* dest_surface, | 2658 int picture_buffer_id, |
2746 int picture_buffer_id, | 2659 int input_buffer_id) { |
2747 int input_buffer_id) { | |
2748 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2660 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2749 | 2661 |
2750 // The DXVA decoder has its own device which it uses for decoding. ANGLE | 2662 // The DXVA decoder has its own device which it uses for decoding. ANGLE |
2751 // has its own device which we don't have access to. | 2663 // has its own device which we don't have access to. |
2752 // The above code attempts to copy the decoded picture into a surface | 2664 // The above code attempts to copy the decoded picture into a surface |
2753 // which is owned by ANGLE. As there are multiple devices involved in | 2665 // which is owned by ANGLE. As there are multiple devices involved in |
2754 // this, the StretchRect call above is not synchronous. | 2666 // this, the StretchRect call above is not synchronous. |
2755 // We attempt to flush the batched operations to ensure that the picture is | 2667 // We attempt to flush the batched operations to ensure that the picture is |
2756 // copied to the surface owned by ANGLE. | 2668 // copied to the surface owned by ANGLE. |
2757 // We need to do this in a loop and call flush multiple times. | 2669 // We need to do this in a loop and call flush multiple times. |
(...skipping 14 matching lines...) Expand all Loading... |
2772 // Remove this CHECK when the change to use DX11 for H/W decoding | 2684 // Remove this CHECK when the change to use DX11 for H/W decoding |
2773 // stablizes. | 2685 // stablizes. |
2774 CHECK(false); | 2686 CHECK(false); |
2775 } | 2687 } |
2776 } else { | 2688 } else { |
2777 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); | 2689 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); |
2778 } | 2690 } |
2779 | 2691 |
2780 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { | 2692 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { |
2781 decoder_thread_task_runner_->PostDelayedTask( | 2693 decoder_thread_task_runner_->PostDelayedTask( |
2782 FROM_HERE, | 2694 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
2783 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 2695 base::Unretained(this), iterations, src_surface, |
2784 base::Unretained(this), iterations, src_surface, | 2696 dest_surface, picture_buffer_id, input_buffer_id), |
2785 dest_surface, picture_buffer_id, input_buffer_id), | |
2786 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2697 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2787 return; | 2698 return; |
2788 } | 2699 } |
2789 | 2700 |
2790 main_thread_task_runner_->PostTask( | 2701 main_thread_task_runner_->PostTask( |
2791 FROM_HERE, | 2702 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
2792 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 2703 weak_this_factory_.GetWeakPtr(), src_surface, |
2793 weak_this_factory_.GetWeakPtr(), | 2704 dest_surface, picture_buffer_id, input_buffer_id)); |
2794 src_surface, | |
2795 dest_surface, | |
2796 picture_buffer_id, | |
2797 input_buffer_id)); | |
2798 } | 2705 } |
2799 | 2706 |
2800 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( | 2707 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( |
2801 int width, int height) { | 2708 int width, |
| 2709 int height) { |
2802 if (!dx11_video_format_converter_media_type_needs_init_) | 2710 if (!dx11_video_format_converter_media_type_needs_init_) |
2803 return true; | 2711 return true; |
2804 | 2712 |
2805 CHECK(video_format_converter_mft_.get()); | 2713 CHECK(video_format_converter_mft_.get()); |
2806 | 2714 |
2807 HRESULT hr = video_format_converter_mft_->ProcessMessage( | 2715 HRESULT hr = video_format_converter_mft_->ProcessMessage( |
2808 MFT_MESSAGE_SET_D3D_MANAGER, | 2716 MFT_MESSAGE_SET_D3D_MANAGER, |
2809 reinterpret_cast<ULONG_PTR>( | 2717 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get())); |
2810 d3d11_device_manager_.get())); | |
2811 | 2718 |
2812 if (FAILED(hr)) { | 2719 if (FAILED(hr)) { |
2813 base::debug::Alias(&hr); | 2720 base::debug::Alias(&hr); |
2814 // TODO(ananta) | 2721 // TODO(ananta) |
2815 // Remove this CHECK when the change to use DX11 for H/W decoding | 2722 // Remove this CHECK when the change to use DX11 for H/W decoding |
2816 // stablizes. | 2723 // stablizes. |
2817 CHECK(false); | 2724 CHECK(false); |
2818 } | 2725 } |
2819 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2726 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
2820 "Failed to initialize video format converter", PLATFORM_FAILURE, false); | 2727 "Failed to initialize video format converter", |
| 2728 PLATFORM_FAILURE, false); |
2821 | 2729 |
2822 video_format_converter_mft_->ProcessMessage( | 2730 video_format_converter_mft_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, |
2823 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); | 2731 0); |
2824 | 2732 |
2825 base::win::ScopedComPtr<IMFMediaType> media_type; | 2733 base::win::ScopedComPtr<IMFMediaType> media_type; |
2826 hr = MFCreateMediaType(media_type.Receive()); | 2734 hr = MFCreateMediaType(media_type.Receive()); |
2827 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", | 2735 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", |
2828 PLATFORM_FAILURE, false); | 2736 PLATFORM_FAILURE, false); |
2829 | 2737 |
2830 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | 2738 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
2831 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", | 2739 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", |
2832 PLATFORM_FAILURE, false); | 2740 PLATFORM_FAILURE, false); |
2833 | 2741 |
2834 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); | 2742 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); |
2835 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", | 2743 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", |
2836 PLATFORM_FAILURE, false); | 2744 PLATFORM_FAILURE, false); |
2837 | 2745 |
2838 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); | 2746 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); |
2839 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", | 2747 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", |
2840 PLATFORM_FAILURE, false); | 2748 PLATFORM_FAILURE, false); |
2841 | 2749 |
2842 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); | 2750 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); |
2843 if (FAILED(hr)) { | 2751 if (FAILED(hr)) { |
2844 base::debug::Alias(&hr); | 2752 base::debug::Alias(&hr); |
2845 // TODO(ananta) | 2753 // TODO(ananta) |
2846 // Remove this CHECK when the change to use DX11 for H/W decoding | 2754 // Remove this CHECK when the change to use DX11 for H/W decoding |
2847 // stablizes. | 2755 // stablizes. |
2848 CHECK(false); | 2756 CHECK(false); |
2849 } | 2757 } |
2850 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", | 2758 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", |
2851 PLATFORM_FAILURE, false); | 2759 PLATFORM_FAILURE, false); |
2852 | 2760 |
2853 // It appears that we fail to set MFVideoFormat_ARGB32 as the output media | 2761 // It appears that we fail to set MFVideoFormat_ARGB32 as the output media |
2854 // type in certain configurations. Try to fallback to MFVideoFormat_RGB32 | 2762 // type in certain configurations. Try to fallback to MFVideoFormat_RGB32 |
2855 // in such cases. If both fail, then bail. | 2763 // in such cases. If both fail, then bail. |
2856 bool media_type_set = | 2764 bool media_type_set = SetTransformOutputType( |
2857 SetTransformOutputType(video_format_converter_mft_.get(), | 2765 video_format_converter_mft_.get(), MFVideoFormat_ARGB32, width, height); |
2858 MFVideoFormat_ARGB32, | |
2859 width, | |
2860 height); | |
2861 if (!media_type_set) { | 2766 if (!media_type_set) { |
2862 media_type_set = | 2767 media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), |
2863 SetTransformOutputType(video_format_converter_mft_.get(), | 2768 MFVideoFormat_RGB32, width, height); |
2864 MFVideoFormat_RGB32, | |
2865 width, | |
2866 height); | |
2867 } | 2769 } |
2868 | 2770 |
2869 if (!media_type_set) { | 2771 if (!media_type_set) { |
2870 // Remove this once this stabilizes in the field. | 2772 // Remove this once this stabilizes in the field. |
2871 CHECK(false); | 2773 CHECK(false); |
2872 LOG(ERROR) << "Failed to find a matching RGB output type in the converter"; | 2774 LOG(ERROR) << "Failed to find a matching RGB output type in the converter"; |
2873 return false; | 2775 return false; |
2874 } | 2776 } |
2875 | 2777 |
2876 dx11_video_format_converter_media_type_needs_init_ = false; | 2778 dx11_video_format_converter_media_type_needs_init_ = false; |
2877 return true; | 2779 return true; |
2878 } | 2780 } |
2879 | 2781 |
2880 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions( | 2782 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, |
2881 IMFSample* sample, | 2783 int* width, |
2882 int* width, | 2784 int* height) { |
2883 int* height) { | |
2884 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2785 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2885 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | 2786 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); |
2886 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | 2787 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); |
2887 | 2788 |
2888 if (use_dx11_) { | 2789 if (use_dx11_) { |
2889 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 2790 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
2890 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | 2791 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
2891 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | 2792 hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
2892 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", | 2793 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", |
2893 false); | 2794 false); |
(...skipping 14 matching lines...) Expand all Loading... |
2908 false); | 2809 false); |
2909 D3DSURFACE_DESC surface_desc; | 2810 D3DSURFACE_DESC surface_desc; |
2910 hr = surface->GetDesc(&surface_desc); | 2811 hr = surface->GetDesc(&surface_desc); |
2911 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | 2812 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); |
2912 *width = surface_desc.Width; | 2813 *width = surface_desc.Width; |
2913 *height = surface_desc.Height; | 2814 *height = surface_desc.Height; |
2914 } | 2815 } |
2915 return true; | 2816 return true; |
2916 } | 2817 } |
2917 | 2818 |
2918 bool DXVAVideoDecodeAccelerator::SetTransformOutputType( | 2819 bool DXVAVideoDecodeAccelerator::SetTransformOutputType(IMFTransform* transform, |
2919 IMFTransform* transform, | 2820 const GUID& output_type, |
2920 const GUID& output_type, | 2821 int width, |
2921 int width, | 2822 int height) { |
2922 int height) { | |
2923 HRESULT hr = E_FAIL; | 2823 HRESULT hr = E_FAIL; |
2924 base::win::ScopedComPtr<IMFMediaType> media_type; | 2824 base::win::ScopedComPtr<IMFMediaType> media_type; |
2925 | 2825 |
2926 for (uint32_t i = 0; | 2826 for (uint32_t i = 0; |
2927 SUCCEEDED(transform->GetOutputAvailableType( | 2827 SUCCEEDED(transform->GetOutputAvailableType(0, i, media_type.Receive())); |
2928 0, i, media_type.Receive())); | |
2929 ++i) { | 2828 ++i) { |
2930 GUID out_subtype = {0}; | 2829 GUID out_subtype = {0}; |
2931 hr = media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); | 2830 hr = media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); |
2932 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false); | 2831 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false); |
2933 | 2832 |
2934 if (out_subtype == output_type) { | 2833 if (out_subtype == output_type) { |
2935 if (width && height) { | 2834 if (width && height) { |
2936 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, | 2835 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, |
2937 height); | 2836 height); |
2938 RETURN_ON_HR_FAILURE(hr, "Failed to set media type attributes", false); | 2837 RETURN_ON_HR_FAILURE(hr, "Failed to set media type attributes", false); |
2939 } | 2838 } |
2940 hr = transform->SetOutputType(0, media_type.get(), 0); // No flags | 2839 hr = transform->SetOutputType(0, media_type.get(), 0); // No flags |
2941 RETURN_ON_HR_FAILURE(hr, "Failed to set output type", false); | 2840 RETURN_ON_HR_FAILURE(hr, "Failed to set output type", false); |
2942 return true; | 2841 return true; |
2943 } | 2842 } |
2944 media_type.Release(); | 2843 media_type.Release(); |
2945 } | 2844 } |
2946 return false; | 2845 return false; |
2947 } | 2846 } |
2948 | 2847 |
2949 HRESULT DXVAVideoDecodeAccelerator::CheckConfigChanged( | 2848 HRESULT DXVAVideoDecodeAccelerator::CheckConfigChanged(IMFSample* sample, |
2950 IMFSample* sample, bool* config_changed) { | 2849 bool* config_changed) { |
2951 if (codec_ != media::kCodecH264) | 2850 if (codec_ != media::kCodecH264) |
2952 return S_FALSE; | 2851 return S_FALSE; |
2953 | 2852 |
2954 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | 2853 base::win::ScopedComPtr<IMFMediaBuffer> buffer; |
2955 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); | 2854 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); |
2956 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from input sample", hr); | 2855 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from input sample", hr); |
2957 | 2856 |
2958 MediaBufferScopedPointer scoped_media_buffer(buffer.get()); | 2857 MediaBufferScopedPointer scoped_media_buffer(buffer.get()); |
2959 | 2858 |
2960 if (!config_change_detector_->DetectConfig( | 2859 if (!config_change_detector_->DetectConfig( |
2961 scoped_media_buffer.get(), | 2860 scoped_media_buffer.get(), scoped_media_buffer.current_length())) { |
2962 scoped_media_buffer.current_length())) { | |
2963 RETURN_ON_HR_FAILURE(E_FAIL, "Failed to detect H.264 stream config", | 2861 RETURN_ON_HR_FAILURE(E_FAIL, "Failed to detect H.264 stream config", |
2964 E_FAIL); | 2862 E_FAIL); |
2965 } | 2863 } |
2966 *config_changed = config_change_detector_->config_changed(); | 2864 *config_changed = config_change_detector_->config_changed(); |
2967 return S_OK; | 2865 return S_OK; |
2968 } | 2866 } |
2969 | 2867 |
2970 void DXVAVideoDecodeAccelerator::ConfigChanged( | 2868 void DXVAVideoDecodeAccelerator::ConfigChanged(const Config& config) { |
2971 const Config& config) { | |
2972 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2869 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2973 | 2870 |
2974 SetState(kConfigChange); | 2871 SetState(kConfigChange); |
2975 DismissStaleBuffers(true); | 2872 DismissStaleBuffers(true); |
2976 Invalidate(); | 2873 Invalidate(); |
2977 Initialize(config_, client_); | 2874 Initialize(config_, client_); |
2978 decoder_thread_task_runner_->PostTask( | 2875 decoder_thread_task_runner_->PostTask( |
2979 FROM_HERE, | 2876 FROM_HERE, |
2980 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2877 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2981 base::Unretained(this))); | 2878 base::Unretained(this))); |
2982 } | 2879 } |
2983 | 2880 |
2984 } // namespace content | 2881 } // namespace media |
OLD | NEW |