OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/media/dxva_video_decode_accelerator_win.h" | 5 #include "media/gpu/dxva_video_decode_accelerator_win.h" |
6 | 6 |
7 #include <memory> | 7 #include <memory> |
8 | 8 |
9 #if !defined(OS_WIN) | 9 #if !defined(OS_WIN) |
10 #error This file should only be built on Windows. | 10 #error This file should only be built on Windows. |
11 #endif // !defined(OS_WIN) | 11 #endif // !defined(OS_WIN) |
12 | 12 |
13 #include <codecapi.h> | 13 #include <codecapi.h> |
14 #include <dxgi1_2.h> | 14 #include <dxgi1_2.h> |
15 #include <ks.h> | 15 #include <ks.h> |
16 #include <mfapi.h> | 16 #include <mfapi.h> |
17 #include <mferror.h> | 17 #include <mferror.h> |
18 #include <ntverp.h> | 18 #include <ntverp.h> |
19 #include <stddef.h> | 19 #include <stddef.h> |
20 #include <string.h> | 20 #include <string.h> |
21 #include <wmcodecdsp.h> | 21 #include <wmcodecdsp.h> |
(...skipping 21 matching lines...) Expand all Loading... |
43 #include "ui/gl/gl_fence.h" | 43 #include "ui/gl/gl_fence.h" |
44 #include "ui/gl/gl_surface_egl.h" | 44 #include "ui/gl/gl_surface_egl.h" |
45 | 45 |
46 namespace { | 46 namespace { |
47 | 47 |
48 // Path is appended on to the PROGRAM_FILES base path. | 48 // Path is appended on to the PROGRAM_FILES base path. |
49 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; | 49 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; |
50 | 50 |
51 const wchar_t kVP8DecoderDLLName[] = | 51 const wchar_t kVP8DecoderDLLName[] = |
52 #if defined(ARCH_CPU_X86) | 52 #if defined(ARCH_CPU_X86) |
53 L"mfx_mft_vp8vd_32.dll"; | 53 L"mfx_mft_vp8vd_32.dll"; |
54 #elif defined(ARCH_CPU_X86_64) | 54 #elif defined(ARCH_CPU_X86_64) |
55 L"mfx_mft_vp8vd_64.dll"; | 55 L"mfx_mft_vp8vd_64.dll"; |
56 #else | 56 #else |
57 #error Unsupported Windows CPU Architecture | 57 #error Unsupported Windows CPU Architecture |
58 #endif | 58 #endif |
59 | 59 |
60 const wchar_t kVP9DecoderDLLName[] = | 60 const wchar_t kVP9DecoderDLLName[] = |
61 #if defined(ARCH_CPU_X86) | 61 #if defined(ARCH_CPU_X86) |
62 L"mfx_mft_vp9vd_32.dll"; | 62 L"mfx_mft_vp9vd_32.dll"; |
63 #elif defined(ARCH_CPU_X86_64) | 63 #elif defined(ARCH_CPU_X86_64) |
64 L"mfx_mft_vp9vd_64.dll"; | 64 L"mfx_mft_vp9vd_64.dll"; |
65 #else | 65 #else |
66 #error Unsupported Windows CPU Architecture | 66 #error Unsupported Windows CPU Architecture |
67 #endif | 67 #endif |
68 | 68 |
69 const CLSID CLSID_WebmMfVp8Dec = { | 69 const CLSID CLSID_WebmMfVp8Dec = { |
70 0x451e3cb7, | 70 0x451e3cb7, |
71 0x2622, | 71 0x2622, |
72 0x4ba5, | 72 0x4ba5, |
73 { 0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24 } | 73 {0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24}}; |
74 }; | |
75 | 74 |
76 const CLSID CLSID_WebmMfVp9Dec = { | 75 const CLSID CLSID_WebmMfVp9Dec = { |
77 0x07ab4bd2, | 76 0x07ab4bd2, |
78 0x1979, | 77 0x1979, |
79 0x4fcd, | 78 0x4fcd, |
80 { 0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe } | 79 {0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe}}; |
81 }; | |
82 | 80 |
83 const CLSID MEDIASUBTYPE_VP80 = { | 81 const CLSID MEDIASUBTYPE_VP80 = { |
84 0x30385056, | 82 0x30385056, |
85 0x0000, | 83 0x0000, |
86 0x0010, | 84 0x0010, |
87 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | 85 {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}}; |
88 }; | |
89 | 86 |
90 const CLSID MEDIASUBTYPE_VP90 = { | 87 const CLSID MEDIASUBTYPE_VP90 = { |
91 0x30395056, | 88 0x30395056, |
92 0x0000, | 89 0x0000, |
93 0x0010, | 90 0x0010, |
94 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | 91 {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}}; |
95 }; | |
96 | 92 |
97 // The CLSID of the video processor media foundation transform which we use for | 93 // The CLSID of the video processor media foundation transform which we use for |
98 // texture color conversion in DX11. | 94 // texture color conversion in DX11. |
99 // Defined in mfidl.h in the Windows 10 SDK. ntverp.h provides VER_PRODUCTBUILD | 95 // Defined in mfidl.h in the Windows 10 SDK. ntverp.h provides VER_PRODUCTBUILD |
100 // to detect which SDK we are compiling with. | 96 // to detect which SDK we are compiling with. |
101 #if VER_PRODUCTBUILD < 10011 // VER_PRODUCTBUILD for 10.0.10158.0 SDK. | 97 #if VER_PRODUCTBUILD < 10011 // VER_PRODUCTBUILD for 10.0.10158.0 SDK. |
102 DEFINE_GUID(CLSID_VideoProcessorMFT, | 98 DEFINE_GUID(CLSID_VideoProcessorMFT, |
103 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, | 99 0x88753b26, |
104 0xc9, 0x82); | 100 0x5b24, |
| 101 0x49bd, |
| 102 0xb2, |
| 103 0xe7, |
| 104 0xc, |
| 105 0x44, |
| 106 0x5c, |
| 107 0x78, |
| 108 0xc9, |
| 109 0x82); |
105 #endif | 110 #endif |
106 | 111 |
107 // MF_XVP_PLAYBACK_MODE | 112 // MF_XVP_PLAYBACK_MODE |
108 // Data type: UINT32 (treat as BOOL) | 113 // Data type: UINT32 (treat as BOOL) |
109 // If this attribute is TRUE, the video processor will run in playback mode | 114 // If this attribute is TRUE, the video processor will run in playback mode |
110 // where it allows callers to allocate output samples and allows last frame | 115 // where it allows callers to allocate output samples and allows last frame |
111 // regeneration (repaint). | 116 // regeneration (repaint). |
112 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12, | 117 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, |
113 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9); | 118 0x3c5d293f, |
| 119 0xad67, |
| 120 0x4e29, |
| 121 0xaf, |
| 122 0x12, |
| 123 0xcf, |
| 124 0x3e, |
| 125 0x23, |
| 126 0x8a, |
| 127 0xcc, |
| 128 0xe9); |
114 | 129 |
115 // Defines the GUID for the Intel H264 DXVA device. | 130 // Defines the GUID for the Intel H264 DXVA device. |
116 static const GUID DXVA2_Intel_ModeH264_E = { | 131 static const GUID DXVA2_Intel_ModeH264_E = { |
117 0x604F8E68, 0x4951, 0x4c54,{ 0x88, 0xFE, 0xAB, 0xD2, 0x5C, 0x15, 0xB3, 0xD6} | 132 0x604F8E68, |
118 }; | 133 0x4951, |
| 134 0x4c54, |
| 135 {0x88, 0xFE, 0xAB, 0xD2, 0x5C, 0x15, 0xB3, 0xD6}}; |
119 | 136 |
120 // R600, R700, Evergreen and Cayman AMD cards. These support DXVA via UVD3 | 137 // R600, R700, Evergreen and Cayman AMD cards. These support DXVA via UVD3 |
121 // or earlier, and don't handle resolutions higher than 1920 x 1088 well. | 138 // or earlier, and don't handle resolutions higher than 1920 x 1088 well. |
122 static const DWORD g_AMDUVD3GPUList[] = { | 139 static const DWORD g_AMDUVD3GPUList[] = { |
123 0x9400, 0x9401, 0x9402, 0x9403, 0x9405, 0x940a, 0x940b, 0x940f, 0x94c0, | 140 0x9400, 0x9401, 0x9402, 0x9403, 0x9405, 0x940a, 0x940b, 0x940f, 0x94c0, |
124 0x94c1, 0x94c3, 0x94c4, 0x94c5, 0x94c6, 0x94c7, 0x94c8, 0x94c9, 0x94cb, | 141 0x94c1, 0x94c3, 0x94c4, 0x94c5, 0x94c6, 0x94c7, 0x94c8, 0x94c9, 0x94cb, |
125 0x94cc, 0x94cd, 0x9580, 0x9581, 0x9583, 0x9586, 0x9587, 0x9588, 0x9589, | 142 0x94cc, 0x94cd, 0x9580, 0x9581, 0x9583, 0x9586, 0x9587, 0x9588, 0x9589, |
126 0x958a, 0x958b, 0x958c, 0x958d, 0x958e, 0x958f, 0x9500, 0x9501, 0x9504, | 143 0x958a, 0x958b, 0x958c, 0x958d, 0x958e, 0x958f, 0x9500, 0x9501, 0x9504, |
127 0x9505, 0x9506, 0x9507, 0x9508, 0x9509, 0x950f, 0x9511, 0x9515, 0x9517, | 144 0x9505, 0x9506, 0x9507, 0x9508, 0x9509, 0x950f, 0x9511, 0x9515, 0x9517, |
128 0x9519, 0x95c0, 0x95c2, 0x95c4, 0x95c5, 0x95c6, 0x95c7, 0x95c9, 0x95cc, | 145 0x9519, 0x95c0, 0x95c2, 0x95c4, 0x95c5, 0x95c6, 0x95c7, 0x95c9, 0x95cc, |
129 0x95cd, 0x95ce, 0x95cf, 0x9590, 0x9591, 0x9593, 0x9595, 0x9596, 0x9597, | 146 0x95cd, 0x95ce, 0x95cf, 0x9590, 0x9591, 0x9593, 0x9595, 0x9596, 0x9597, |
130 0x9598, 0x9599, 0x959b, 0x9610, 0x9611, 0x9612, 0x9613, 0x9614, 0x9615, | 147 0x9598, 0x9599, 0x959b, 0x9610, 0x9611, 0x9612, 0x9613, 0x9614, 0x9615, |
131 0x9616, 0x9710, 0x9711, 0x9712, 0x9713, 0x9714, 0x9715, 0x9440, 0x9441, | 148 0x9616, 0x9710, 0x9711, 0x9712, 0x9713, 0x9714, 0x9715, 0x9440, 0x9441, |
132 0x9442, 0x9443, 0x9444, 0x9446, 0x944a, 0x944b, 0x944c, 0x944e, 0x9450, | 149 0x9442, 0x9443, 0x9444, 0x9446, 0x944a, 0x944b, 0x944c, 0x944e, 0x9450, |
133 0x9452, 0x9456, 0x945a, 0x945b, 0x945e, 0x9460, 0x9462, 0x946a, 0x946b, | 150 0x9452, 0x9456, 0x945a, 0x945b, 0x945e, 0x9460, 0x9462, 0x946a, 0x946b, |
134 0x947a, 0x947b, 0x9480, 0x9487, 0x9488, 0x9489, 0x948a, 0x948f, 0x9490, | 151 0x947a, 0x947b, 0x9480, 0x9487, 0x9488, 0x9489, 0x948a, 0x948f, 0x9490, |
135 0x9491, 0x9495, 0x9498, 0x949c, 0x949e, 0x949f, 0x9540, 0x9541, 0x9542, | 152 0x9491, 0x9495, 0x9498, 0x949c, 0x949e, 0x949f, 0x9540, 0x9541, 0x9542, |
136 0x954e, 0x954f, 0x9552, 0x9553, 0x9555, 0x9557, 0x955f, 0x94a0, 0x94a1, | 153 0x954e, 0x954f, 0x9552, 0x9553, 0x9555, 0x9557, 0x955f, 0x94a0, 0x94a1, |
137 0x94a3, 0x94b1, 0x94b3, 0x94b4, 0x94b5, 0x94b9, 0x68e0, 0x68e1, 0x68e4, | 154 0x94a3, 0x94b1, 0x94b3, 0x94b4, 0x94b5, 0x94b9, 0x68e0, 0x68e1, 0x68e4, |
138 0x68e5, 0x68e8, 0x68e9, 0x68f1, 0x68f2, 0x68f8, 0x68f9, 0x68fa, 0x68fe, | 155 0x68e5, 0x68e8, 0x68e9, 0x68f1, 0x68f2, 0x68f8, 0x68f9, 0x68fa, 0x68fe, |
139 0x68c0, 0x68c1, 0x68c7, 0x68c8, 0x68c9, 0x68d8, 0x68d9, 0x68da, 0x68de, | 156 0x68c0, 0x68c1, 0x68c7, 0x68c8, 0x68c9, 0x68d8, 0x68d9, 0x68da, 0x68de, |
140 0x68a0, 0x68a1, 0x68a8, 0x68a9, 0x68b0, 0x68b8, 0x68b9, 0x68ba, 0x68be, | 157 0x68a0, 0x68a1, 0x68a8, 0x68a9, 0x68b0, 0x68b8, 0x68b9, 0x68ba, 0x68be, |
141 0x68bf, 0x6880, 0x6888, 0x6889, 0x688a, 0x688c, 0x688d, 0x6898, 0x6899, | 158 0x68bf, 0x6880, 0x6888, 0x6889, 0x688a, 0x688c, 0x688d, 0x6898, 0x6899, |
142 0x689b, 0x689e, 0x689c, 0x689d, 0x9802, 0x9803, 0x9804, 0x9805, 0x9806, | 159 0x689b, 0x689e, 0x689c, 0x689d, 0x9802, 0x9803, 0x9804, 0x9805, 0x9806, |
143 0x9807, 0x9808, 0x9809, 0x980a, 0x9640, 0x9641, 0x9647, 0x9648, 0x964a, | 160 0x9807, 0x9808, 0x9809, 0x980a, 0x9640, 0x9641, 0x9647, 0x9648, 0x964a, |
144 0x964b, 0x964c, 0x964e, 0x964f, 0x9642, 0x9643, 0x9644, 0x9645, 0x9649, | 161 0x964b, 0x964c, 0x964e, 0x964f, 0x9642, 0x9643, 0x9644, 0x9645, 0x9649, |
145 0x6720, 0x6721, 0x6722, 0x6723, 0x6724, 0x6725, 0x6726, 0x6727, 0x6728, | 162 0x6720, 0x6721, 0x6722, 0x6723, 0x6724, 0x6725, 0x6726, 0x6727, 0x6728, |
146 0x6729, 0x6738, 0x6739, 0x673e, 0x6740, 0x6741, 0x6742, 0x6743, 0x6744, | 163 0x6729, 0x6738, 0x6739, 0x673e, 0x6740, 0x6741, 0x6742, 0x6743, 0x6744, |
147 0x6745, 0x6746, 0x6747, 0x6748, 0x6749, 0x674a, 0x6750, 0x6751, 0x6758, | 164 0x6745, 0x6746, 0x6747, 0x6748, 0x6749, 0x674a, 0x6750, 0x6751, 0x6758, |
148 0x6759, 0x675b, 0x675d, 0x675f, 0x6840, 0x6841, 0x6842, 0x6843, 0x6849, | 165 0x6759, 0x675b, 0x675d, 0x675f, 0x6840, 0x6841, 0x6842, 0x6843, 0x6849, |
149 0x6850, 0x6858, 0x6859, 0x6760, 0x6761, 0x6762, 0x6763, 0x6764, 0x6765, | 166 0x6850, 0x6858, 0x6859, 0x6760, 0x6761, 0x6762, 0x6763, 0x6764, 0x6765, |
150 0x6766, 0x6767, 0x6768, 0x6770, 0x6771, 0x6772, 0x6778, 0x6779, 0x677b, | 167 0x6766, 0x6767, 0x6768, 0x6770, 0x6771, 0x6772, 0x6778, 0x6779, 0x677b, |
151 0x6700, 0x6701, 0x6702, 0x6703, 0x6704, 0x6705, 0x6706, 0x6707, 0x6708, | 168 0x6700, 0x6701, 0x6702, 0x6703, 0x6704, 0x6705, 0x6706, 0x6707, 0x6708, |
152 0x6709, 0x6718, 0x6719, 0x671c, 0x671d, 0x671f, 0x683D, 0x9900, 0x9901, | 169 0x6709, 0x6718, 0x6719, 0x671c, 0x671d, 0x671f, 0x683D, 0x9900, 0x9901, |
153 0x9903, 0x9904, 0x9905, 0x9906, 0x9907, 0x9908, 0x9909, 0x990a, 0x990b, | 170 0x9903, 0x9904, 0x9905, 0x9906, 0x9907, 0x9908, 0x9909, 0x990a, 0x990b, |
154 0x990c, 0x990d, 0x990e, 0x990f, 0x9910, 0x9913, 0x9917, 0x9918, 0x9919, | 171 0x990c, 0x990d, 0x990e, 0x990f, 0x9910, 0x9913, 0x9917, 0x9918, 0x9919, |
155 0x9990, 0x9991, 0x9992, 0x9993, 0x9994, 0x9995, 0x9996, 0x9997, 0x9998, | 172 0x9990, 0x9991, 0x9992, 0x9993, 0x9994, 0x9995, 0x9996, 0x9997, 0x9998, |
156 0x9999, 0x999a, 0x999b, 0x999c, 0x999d, 0x99a0, 0x99a2, 0x99a4, | 173 0x9999, 0x999a, 0x999b, 0x999c, 0x999d, 0x99a0, 0x99a2, 0x99a4, |
157 }; | 174 }; |
158 | 175 |
159 // Legacy Intel GPUs (Second generation) which have trouble with resolutions | 176 // Legacy Intel GPUs (Second generation) which have trouble with resolutions |
160 // higher than 1920 x 1088 | 177 // higher than 1920 x 1088 |
161 static const DWORD g_IntelLegacyGPUList[] = { | 178 static const DWORD g_IntelLegacyGPUList[] = { |
162 0x102, 0x106, 0x116, 0x126, | 179 0x102, 0x106, 0x116, 0x126, |
163 }; | 180 }; |
164 | 181 |
165 // Provides scoped access to the underlying buffer in an IMFMediaBuffer | 182 // Provides scoped access to the underlying buffer in an IMFMediaBuffer |
166 // instance. | 183 // instance. |
167 class MediaBufferScopedPointer { | 184 class MediaBufferScopedPointer { |
168 public: | 185 public: |
169 MediaBufferScopedPointer(IMFMediaBuffer* media_buffer) | 186 MediaBufferScopedPointer(IMFMediaBuffer* media_buffer) |
170 : media_buffer_(media_buffer), | 187 : media_buffer_(media_buffer), |
171 buffer_(nullptr), | 188 buffer_(nullptr), |
172 max_length_(0), | 189 max_length_(0), |
173 current_length_(0) { | 190 current_length_(0) { |
174 HRESULT hr = media_buffer_->Lock(&buffer_, &max_length_, ¤t_length_); | 191 HRESULT hr = media_buffer_->Lock(&buffer_, &max_length_, ¤t_length_); |
175 CHECK(SUCCEEDED(hr)); | 192 CHECK(SUCCEEDED(hr)); |
176 } | 193 } |
177 | 194 |
178 ~MediaBufferScopedPointer() { | 195 ~MediaBufferScopedPointer() { |
179 HRESULT hr = media_buffer_->Unlock(); | 196 HRESULT hr = media_buffer_->Unlock(); |
180 CHECK(SUCCEEDED(hr)); | 197 CHECK(SUCCEEDED(hr)); |
181 } | 198 } |
182 | 199 |
183 uint8_t* get() { | 200 uint8_t* get() { return buffer_; } |
184 return buffer_; | |
185 } | |
186 | 201 |
187 DWORD current_length() const { | 202 DWORD current_length() const { return current_length_; } |
188 return current_length_; | |
189 } | |
190 | 203 |
191 private: | 204 private: |
192 base::win::ScopedComPtr<IMFMediaBuffer> media_buffer_; | 205 base::win::ScopedComPtr<IMFMediaBuffer> media_buffer_; |
193 uint8_t* buffer_; | 206 uint8_t* buffer_; |
194 DWORD max_length_; | 207 DWORD max_length_; |
195 DWORD current_length_; | 208 DWORD current_length_; |
196 | 209 |
197 DISALLOW_COPY_AND_ASSIGN(MediaBufferScopedPointer); | 210 DISALLOW_COPY_AND_ASSIGN(MediaBufferScopedPointer); |
198 }; | 211 }; |
199 | 212 |
200 } // namespace | 213 } // namespace |
201 | 214 |
202 namespace content { | 215 namespace media { |
203 | 216 |
204 static const media::VideoCodecProfile kSupportedProfiles[] = { | 217 static const media::VideoCodecProfile kSupportedProfiles[] = { |
205 media::H264PROFILE_BASELINE, | 218 media::H264PROFILE_BASELINE, media::H264PROFILE_MAIN, |
206 media::H264PROFILE_MAIN, | 219 media::H264PROFILE_HIGH, media::VP8PROFILE_ANY, |
207 media::H264PROFILE_HIGH, | 220 media::VP9PROFILE_PROFILE0, media::VP9PROFILE_PROFILE1, |
208 media::VP8PROFILE_ANY, | 221 media::VP9PROFILE_PROFILE2, media::VP9PROFILE_PROFILE3}; |
209 media::VP9PROFILE_PROFILE0, | |
210 media::VP9PROFILE_PROFILE1, | |
211 media::VP9PROFILE_PROFILE2, | |
212 media::VP9PROFILE_PROFILE3 | |
213 }; | |
214 | 222 |
215 CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_ | 223 CreateDXGIDeviceManager |
216 = NULL; | 224 DXVAVideoDecodeAccelerator::create_dxgi_device_manager_ = NULL; |
217 | 225 |
218 #define RETURN_ON_FAILURE(result, log, ret) \ | 226 #define RETURN_ON_FAILURE(result, log, ret) \ |
219 do { \ | 227 do { \ |
220 if (!(result)) { \ | 228 if (!(result)) { \ |
221 DLOG(ERROR) << log; \ | 229 DLOG(ERROR) << log; \ |
222 return ret; \ | 230 return ret; \ |
223 } \ | 231 } \ |
224 } while (0) | 232 } while (0) |
225 | 233 |
226 #define RETURN_ON_HR_FAILURE(result, log, ret) \ | 234 #define RETURN_ON_HR_FAILURE(result, log, ret) \ |
227 RETURN_ON_FAILURE(SUCCEEDED(result), \ | 235 RETURN_ON_FAILURE(SUCCEEDED(result), \ |
228 log << ", HRESULT: 0x" << std::hex << result, \ | 236 log << ", HRESULT: 0x" << std::hex << result, ret); |
229 ret); | |
230 | 237 |
231 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \ | 238 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \ |
232 do { \ | 239 do { \ |
233 if (!(result)) { \ | 240 if (!(result)) { \ |
234 DVLOG(1) << log; \ | 241 DVLOG(1) << log; \ |
235 StopOnError(error_code); \ | 242 StopOnError(error_code); \ |
236 return ret; \ | 243 return ret; \ |
237 } \ | 244 } \ |
238 } while (0) | 245 } while (0) |
239 | 246 |
240 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \ | 247 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \ |
241 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \ | 248 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \ |
242 log << ", HRESULT: 0x" << std::hex << result, \ | 249 log << ", HRESULT: 0x" << std::hex << result, \ |
243 error_code, ret); | 250 error_code, ret); |
244 | 251 |
245 enum { | 252 enum { |
246 // Maximum number of iterations we allow before aborting the attempt to flush | 253 // Maximum number of iterations we allow before aborting the attempt to flush |
247 // the batched queries to the driver and allow torn/corrupt frames to be | 254 // the batched queries to the driver and allow torn/corrupt frames to be |
248 // rendered. | 255 // rendered. |
249 kFlushDecoderSurfaceTimeoutMs = 1, | 256 kFlushDecoderSurfaceTimeoutMs = 1, |
250 // Maximum iterations where we try to flush the d3d device. | 257 // Maximum iterations where we try to flush the d3d device. |
251 kMaxIterationsForD3DFlush = 4, | 258 kMaxIterationsForD3DFlush = 4, |
(...skipping 25 matching lines...) Expand all Loading... |
277 base::win::ScopedComPtr<IMFSample> sample; | 284 base::win::ScopedComPtr<IMFSample> sample; |
278 sample.Attach(CreateEmptySample()); | 285 sample.Attach(CreateEmptySample()); |
279 | 286 |
280 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | 287 base::win::ScopedComPtr<IMFMediaBuffer> buffer; |
281 HRESULT hr = E_FAIL; | 288 HRESULT hr = E_FAIL; |
282 if (align == 0) { | 289 if (align == 0) { |
283 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer | 290 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer |
284 // with the align argument being 0. | 291 // with the align argument being 0. |
285 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive()); | 292 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive()); |
286 } else { | 293 } else { |
287 hr = MFCreateAlignedMemoryBuffer(buffer_length, | 294 hr = |
288 align - 1, | 295 MFCreateAlignedMemoryBuffer(buffer_length, align - 1, buffer.Receive()); |
289 buffer.Receive()); | |
290 } | 296 } |
291 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); | 297 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); |
292 | 298 |
293 hr = sample->AddBuffer(buffer.get()); | 299 hr = sample->AddBuffer(buffer.get()); |
294 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); | 300 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); |
295 | 301 |
296 buffer->SetCurrentLength(0); | 302 buffer->SetCurrentLength(0); |
297 return sample.Detach(); | 303 return sample.Detach(); |
298 } | 304 } |
299 | 305 |
300 // Creates a Media Foundation sample with one buffer containing a copy of the | 306 // Creates a Media Foundation sample with one buffer containing a copy of the |
301 // given Annex B stream data. | 307 // given Annex B stream data. |
302 // If duration and sample time are not known, provide 0. | 308 // If duration and sample time are not known, provide 0. |
303 // |min_size| specifies the minimum size of the buffer (might be required by | 309 // |min_size| specifies the minimum size of the buffer (might be required by |
304 // the decoder for input). If no alignment is required, provide 0. | 310 // the decoder for input). If no alignment is required, provide 0. |
305 static IMFSample* CreateInputSample(const uint8_t* stream, | 311 static IMFSample* CreateInputSample(const uint8_t* stream, |
306 uint32_t size, | 312 uint32_t size, |
307 uint32_t min_size, | 313 uint32_t min_size, |
308 int alignment) { | 314 int alignment) { |
309 CHECK(stream); | 315 CHECK(stream); |
310 CHECK_GT(size, 0U); | 316 CHECK_GT(size, 0U); |
311 base::win::ScopedComPtr<IMFSample> sample; | 317 base::win::ScopedComPtr<IMFSample> sample; |
312 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size), | 318 sample.Attach( |
313 alignment)); | 319 CreateEmptySampleWithBuffer(std::max(min_size, size), alignment)); |
314 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", NULL); | 320 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", NULL); |
315 | 321 |
316 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | 322 base::win::ScopedComPtr<IMFMediaBuffer> buffer; |
317 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); | 323 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); |
318 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL); | 324 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL); |
319 | 325 |
320 DWORD max_length = 0; | 326 DWORD max_length = 0; |
321 DWORD current_length = 0; | 327 DWORD current_length = 0; |
322 uint8_t* destination = NULL; | 328 uint8_t* destination = NULL; |
323 hr = buffer->Lock(&destination, &max_length, ¤t_length); | 329 hr = buffer->Lock(&destination, &max_length, ¤t_length); |
324 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL); | 330 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL); |
325 | 331 |
326 CHECK_EQ(current_length, 0u); | 332 CHECK_EQ(current_length, 0u); |
327 CHECK_GE(max_length, size); | 333 CHECK_GE(max_length, size); |
328 memcpy(destination, stream, size); | 334 memcpy(destination, stream, size); |
329 | 335 |
330 hr = buffer->SetCurrentLength(size); | 336 hr = buffer->SetCurrentLength(size); |
331 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL); | 337 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL); |
332 | 338 |
333 hr = buffer->Unlock(); | 339 hr = buffer->Unlock(); |
334 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL); | 340 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL); |
335 | 341 |
336 return sample.Detach(); | 342 return sample.Detach(); |
337 } | 343 } |
338 | 344 |
339 // Helper function to create a COM object instance from a DLL. The alternative | 345 // Helper function to create a COM object instance from a DLL. The alternative |
340 // is to use the CoCreateInstance API which requires the COM apartment to be | 346 // is to use the CoCreateInstance API which requires the COM apartment to be |
341 // initialized which is not the case on the GPU main thread. We want to avoid | 347 // initialized which is not the case on the GPU main thread. We want to avoid |
342 // initializing COM as it may have sideeffects. | 348 // initializing COM as it may have sideeffects. |
343 HRESULT CreateCOMObjectFromDll(HMODULE dll, const CLSID& clsid, const IID& iid, | 349 HRESULT CreateCOMObjectFromDll(HMODULE dll, |
| 350 const CLSID& clsid, |
| 351 const IID& iid, |
344 void** object) { | 352 void** object) { |
345 if (!dll || !object) | 353 if (!dll || !object) |
346 return E_INVALIDARG; | 354 return E_INVALIDARG; |
347 | 355 |
348 using GetClassObject = HRESULT (WINAPI*)( | 356 using GetClassObject = |
349 const CLSID& clsid, const IID& iid, void** object); | 357 HRESULT(WINAPI*)(const CLSID& clsid, const IID& iid, void** object); |
350 | 358 |
351 GetClassObject get_class_object = reinterpret_cast<GetClassObject>( | 359 GetClassObject get_class_object = reinterpret_cast<GetClassObject>( |
352 GetProcAddress(dll, "DllGetClassObject")); | 360 GetProcAddress(dll, "DllGetClassObject")); |
353 RETURN_ON_FAILURE( | 361 RETURN_ON_FAILURE(get_class_object, "Failed to get DllGetClassObject pointer", |
354 get_class_object, "Failed to get DllGetClassObject pointer", E_FAIL); | 362 E_FAIL); |
355 | 363 |
356 base::win::ScopedComPtr<IClassFactory> factory; | 364 base::win::ScopedComPtr<IClassFactory> factory; |
357 HRESULT hr = get_class_object( | 365 HRESULT hr = |
358 clsid, | 366 get_class_object(clsid, __uuidof(IClassFactory), factory.ReceiveVoid()); |
359 __uuidof(IClassFactory), | |
360 factory.ReceiveVoid()); | |
361 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject failed", hr); | 367 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject failed", hr); |
362 | 368 |
363 hr = factory->CreateInstance(NULL, iid, object); | 369 hr = factory->CreateInstance(NULL, iid, object); |
364 return hr; | 370 return hr; |
365 } | 371 } |
366 | 372 |
367 // Helper function to query the ANGLE device object. The template argument T | 373 // Helper function to query the ANGLE device object. The template argument T |
368 // identifies the device interface being queried. IDirect3DDevice9Ex for d3d9 | 374 // identifies the device interface being queried. IDirect3DDevice9Ex for d3d9 |
369 // and ID3D11Device for dx11. | 375 // and ID3D11Device for dx11. |
370 template<class T> | 376 template <class T> |
371 base::win::ScopedComPtr<T> QueryDeviceObjectFromANGLE(int object_type) { | 377 base::win::ScopedComPtr<T> QueryDeviceObjectFromANGLE(int object_type) { |
372 base::win::ScopedComPtr<T> device_object; | 378 base::win::ScopedComPtr<T> device_object; |
373 | 379 |
374 EGLDisplay egl_display = nullptr; | 380 EGLDisplay egl_display = nullptr; |
375 intptr_t egl_device = 0; | 381 intptr_t egl_device = 0; |
376 intptr_t device = 0; | 382 intptr_t device = 0; |
377 | 383 |
378 { | 384 { |
379 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. GetHardwareDisplay"); | 385 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. GetHardwareDisplay"); |
380 egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 386 egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
381 } | 387 } |
382 | 388 |
383 RETURN_ON_FAILURE( | 389 RETURN_ON_FAILURE(gfx::GLSurfaceEGL::HasEGLExtension("EGL_EXT_device_query"), |
384 gfx::GLSurfaceEGL::HasEGLExtension("EGL_EXT_device_query"), | 390 "EGL_EXT_device_query missing", device_object); |
385 "EGL_EXT_device_query missing", | |
386 device_object); | |
387 | 391 |
388 PFNEGLQUERYDISPLAYATTRIBEXTPROC QueryDisplayAttribEXT = nullptr; | 392 PFNEGLQUERYDISPLAYATTRIBEXTPROC QueryDisplayAttribEXT = nullptr; |
389 | 393 |
390 { | 394 { |
391 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. eglGetProcAddress"); | 395 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. eglGetProcAddress"); |
392 | 396 |
393 QueryDisplayAttribEXT = | 397 QueryDisplayAttribEXT = reinterpret_cast<PFNEGLQUERYDISPLAYATTRIBEXTPROC>( |
394 reinterpret_cast<PFNEGLQUERYDISPLAYATTRIBEXTPROC>(eglGetProcAddress( | 398 eglGetProcAddress("eglQueryDisplayAttribEXT")); |
395 "eglQueryDisplayAttribEXT")); | |
396 | 399 |
397 RETURN_ON_FAILURE( | 400 RETURN_ON_FAILURE( |
398 QueryDisplayAttribEXT, | 401 QueryDisplayAttribEXT, |
399 "Failed to get the eglQueryDisplayAttribEXT function from ANGLE", | 402 "Failed to get the eglQueryDisplayAttribEXT function from ANGLE", |
400 device_object); | 403 device_object); |
401 } | 404 } |
402 | 405 |
403 PFNEGLQUERYDEVICEATTRIBEXTPROC QueryDeviceAttribEXT = nullptr; | 406 PFNEGLQUERYDEVICEATTRIBEXTPROC QueryDeviceAttribEXT = nullptr; |
404 | 407 |
405 { | 408 { |
406 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. eglGetProcAddress"); | 409 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. eglGetProcAddress"); |
407 | 410 |
408 QueryDeviceAttribEXT = | 411 QueryDeviceAttribEXT = reinterpret_cast<PFNEGLQUERYDEVICEATTRIBEXTPROC>( |
409 reinterpret_cast<PFNEGLQUERYDEVICEATTRIBEXTPROC>(eglGetProcAddress( | 412 eglGetProcAddress("eglQueryDeviceAttribEXT")); |
410 "eglQueryDeviceAttribEXT")); | |
411 | 413 |
412 RETURN_ON_FAILURE( | 414 RETURN_ON_FAILURE( |
413 QueryDeviceAttribEXT, | 415 QueryDeviceAttribEXT, |
414 "Failed to get the eglQueryDeviceAttribEXT function from ANGLE", | 416 "Failed to get the eglQueryDeviceAttribEXT function from ANGLE", |
415 device_object); | 417 device_object); |
416 } | 418 } |
417 | 419 |
418 { | 420 { |
419 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. QueryDisplayAttribEXT"); | 421 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. QueryDisplayAttribEXT"); |
420 | 422 |
421 RETURN_ON_FAILURE( | 423 RETURN_ON_FAILURE( |
422 QueryDisplayAttribEXT(egl_display, EGL_DEVICE_EXT, &egl_device), | 424 QueryDisplayAttribEXT(egl_display, EGL_DEVICE_EXT, &egl_device), |
423 "The eglQueryDisplayAttribEXT function failed to get the EGL device", | 425 "The eglQueryDisplayAttribEXT function failed to get the EGL device", |
424 device_object); | 426 device_object); |
425 } | 427 } |
426 | 428 |
427 RETURN_ON_FAILURE( | 429 RETURN_ON_FAILURE(egl_device, "Failed to get the EGL device", device_object); |
428 egl_device, | |
429 "Failed to get the EGL device", | |
430 device_object); | |
431 | 430 |
432 { | 431 { |
433 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. QueryDisplayAttribEXT"); | 432 TRACE_EVENT0("gpu", "QueryDeviceObjectFromANGLE. QueryDisplayAttribEXT"); |
434 | 433 |
435 RETURN_ON_FAILURE( | 434 RETURN_ON_FAILURE( |
436 QueryDeviceAttribEXT( | 435 QueryDeviceAttribEXT(reinterpret_cast<EGLDeviceEXT>(egl_device), |
437 reinterpret_cast<EGLDeviceEXT>(egl_device), object_type, &device), | 436 object_type, &device), |
438 "The eglQueryDeviceAttribEXT function failed to get the device", | 437 "The eglQueryDeviceAttribEXT function failed to get the device", |
439 device_object); | 438 device_object); |
440 | 439 |
441 RETURN_ON_FAILURE(device, "Failed to get the ANGLE device", device_object); | 440 RETURN_ON_FAILURE(device, "Failed to get the ANGLE device", device_object); |
442 } | 441 } |
443 | 442 |
444 device_object = reinterpret_cast<T*>(device); | 443 device_object = reinterpret_cast<T*>(device); |
445 return device_object; | 444 return device_object; |
446 } | 445 } |
447 | 446 |
448 H264ConfigChangeDetector::H264ConfigChangeDetector() | 447 H264ConfigChangeDetector::H264ConfigChangeDetector() |
449 : last_sps_id_(0), | 448 : last_sps_id_(0), |
450 last_pps_id_(0), | 449 last_pps_id_(0), |
451 config_changed_(false), | 450 config_changed_(false), |
452 pending_config_changed_(false) { | 451 pending_config_changed_(false) {} |
453 } | |
454 | 452 |
455 H264ConfigChangeDetector::~H264ConfigChangeDetector() { | 453 H264ConfigChangeDetector::~H264ConfigChangeDetector() {} |
456 } | |
457 | 454 |
458 bool H264ConfigChangeDetector::DetectConfig(const uint8_t* stream, | 455 bool H264ConfigChangeDetector::DetectConfig(const uint8_t* stream, |
459 unsigned int size) { | 456 unsigned int size) { |
460 std::vector<uint8_t> sps; | 457 std::vector<uint8_t> sps; |
461 std::vector<uint8_t> pps; | 458 std::vector<uint8_t> pps; |
462 media::H264NALU nalu; | 459 media::H264NALU nalu; |
463 bool idr_seen = false; | 460 bool idr_seen = false; |
464 | 461 |
465 if (!parser_.get()) | 462 if (!parser_.get()) |
466 parser_.reset(new media::H264Parser); | 463 parser_.reset(new media::H264Parser); |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
565 ~DXVAPictureBuffer(); | 562 ~DXVAPictureBuffer(); |
566 | 563 |
567 bool InitializeTexture(const DXVAVideoDecodeAccelerator& decoder, | 564 bool InitializeTexture(const DXVAVideoDecodeAccelerator& decoder, |
568 bool use_rgb); | 565 bool use_rgb); |
569 | 566 |
570 bool ReusePictureBuffer(); | 567 bool ReusePictureBuffer(); |
571 void ResetReuseFence(); | 568 void ResetReuseFence(); |
572 // Copies the output sample data to the picture buffer provided by the | 569 // Copies the output sample data to the picture buffer provided by the |
573 // client. | 570 // client. |
574 // The dest_surface parameter contains the decoded bits. | 571 // The dest_surface parameter contains the decoded bits. |
575 bool CopyOutputSampleDataToPictureBuffer( | 572 bool CopyOutputSampleDataToPictureBuffer(DXVAVideoDecodeAccelerator* decoder, |
576 DXVAVideoDecodeAccelerator* decoder, | 573 IDirect3DSurface9* dest_surface, |
577 IDirect3DSurface9* dest_surface, | 574 ID3D11Texture2D* dx11_texture, |
578 ID3D11Texture2D* dx11_texture, | 575 int input_buffer_id); |
579 int input_buffer_id); | |
580 | 576 |
581 bool available() const { | 577 bool available() const { return available_; } |
582 return available_; | |
583 } | |
584 | 578 |
585 void set_available(bool available) { | 579 void set_available(bool available) { available_ = available; } |
586 available_ = available; | |
587 } | |
588 | 580 |
589 int id() const { | 581 int id() const { return picture_buffer_.id(); } |
590 return picture_buffer_.id(); | |
591 } | |
592 | 582 |
593 gfx::Size size() const { | 583 gfx::Size size() const { return picture_buffer_.size(); } |
594 return picture_buffer_.size(); | |
595 } | |
596 | 584 |
597 bool waiting_to_reuse() const { return waiting_to_reuse_; } | 585 bool waiting_to_reuse() const { return waiting_to_reuse_; } |
598 | 586 |
599 gfx::GLFence* reuse_fence() { return reuse_fence_.get(); } | 587 gfx::GLFence* reuse_fence() { return reuse_fence_.get(); } |
600 | 588 |
601 // Called when the source surface |src_surface| is copied to the destination | 589 // Called when the source surface |src_surface| is copied to the destination |
602 // |dest_surface| | 590 // |dest_surface| |
603 bool CopySurfaceComplete(IDirect3DSurface9* src_surface, | 591 bool CopySurfaceComplete(IDirect3DSurface9* src_surface, |
604 IDirect3DSurface9* dest_surface); | 592 IDirect3DSurface9* dest_surface); |
605 | 593 |
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
654 | 642 |
655 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 643 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
656 | 644 |
657 EGLint use_rgb = 1; | 645 EGLint use_rgb = 1; |
658 eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB, | 646 eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB, |
659 &use_rgb); | 647 &use_rgb); |
660 | 648 |
661 if (!picture_buffer->InitializeTexture(decoder, !!use_rgb)) | 649 if (!picture_buffer->InitializeTexture(decoder, !!use_rgb)) |
662 return linked_ptr<DXVAPictureBuffer>(nullptr); | 650 return linked_ptr<DXVAPictureBuffer>(nullptr); |
663 | 651 |
664 EGLint attrib_list[] = { | 652 EGLint attrib_list[] = {EGL_WIDTH, |
665 EGL_WIDTH, buffer.size().width(), | 653 buffer.size().width(), |
666 EGL_HEIGHT, buffer.size().height(), | 654 EGL_HEIGHT, |
667 EGL_TEXTURE_FORMAT, use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA, | 655 buffer.size().height(), |
668 EGL_TEXTURE_TARGET, EGL_TEXTURE_2D, | 656 EGL_TEXTURE_FORMAT, |
669 EGL_NONE | 657 use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA, |
670 }; | 658 EGL_TEXTURE_TARGET, |
| 659 EGL_TEXTURE_2D, |
| 660 EGL_NONE}; |
671 | 661 |
672 picture_buffer->decoding_surface_ = eglCreatePbufferFromClientBuffer( | 662 picture_buffer->decoding_surface_ = eglCreatePbufferFromClientBuffer( |
673 egl_display, EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, | 663 egl_display, EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, |
674 picture_buffer->texture_share_handle_, egl_config, attrib_list); | 664 picture_buffer->texture_share_handle_, egl_config, attrib_list); |
675 RETURN_ON_FAILURE(picture_buffer->decoding_surface_, | 665 RETURN_ON_FAILURE(picture_buffer->decoding_surface_, |
676 "Failed to create surface", | 666 "Failed to create surface", |
677 linked_ptr<DXVAPictureBuffer>(NULL)); | 667 linked_ptr<DXVAPictureBuffer>(NULL)); |
678 if (decoder.d3d11_device_ && decoder.use_keyed_mutex_) { | 668 if (decoder.d3d11_device_ && decoder.use_keyed_mutex_) { |
679 void* keyed_mutex = nullptr; | 669 void* keyed_mutex = nullptr; |
680 EGLBoolean ret = eglQuerySurfacePointerANGLE( | 670 EGLBoolean ret = eglQuerySurfacePointerANGLE( |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
745 picture_buffer_(buffer), | 735 picture_buffer_(buffer), |
746 decoding_surface_(NULL), | 736 decoding_surface_(NULL), |
747 texture_share_handle_(nullptr), | 737 texture_share_handle_(nullptr), |
748 keyed_mutex_value_(0), | 738 keyed_mutex_value_(0), |
749 use_rgb_(true) {} | 739 use_rgb_(true) {} |
750 | 740 |
751 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() { | 741 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() { |
752 if (decoding_surface_) { | 742 if (decoding_surface_) { |
753 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 743 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
754 | 744 |
755 eglReleaseTexImage( | 745 eglReleaseTexImage(egl_display, decoding_surface_, EGL_BACK_BUFFER); |
756 egl_display, | |
757 decoding_surface_, | |
758 EGL_BACK_BUFFER); | |
759 | 746 |
760 eglDestroySurface( | 747 eglDestroySurface(egl_display, decoding_surface_); |
761 egl_display, | |
762 decoding_surface_); | |
763 decoding_surface_ = NULL; | 748 decoding_surface_ = NULL; |
764 } | 749 } |
765 } | 750 } |
766 | 751 |
767 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { | 752 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { |
768 DCHECK(decoding_surface_); | 753 DCHECK(decoding_surface_); |
769 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 754 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
770 eglReleaseTexImage( | 755 eglReleaseTexImage(egl_display, decoding_surface_, EGL_BACK_BUFFER); |
771 egl_display, | |
772 decoding_surface_, | |
773 EGL_BACK_BUFFER); | |
774 decoder_surface_.Release(); | 756 decoder_surface_.Release(); |
775 target_surface_.Release(); | 757 target_surface_.Release(); |
776 decoder_dx11_texture_.Release(); | 758 decoder_dx11_texture_.Release(); |
777 waiting_to_reuse_ = false; | 759 waiting_to_reuse_ = false; |
778 set_available(true); | 760 set_available(true); |
779 if (egl_keyed_mutex_) { | 761 if (egl_keyed_mutex_) { |
780 HRESULT hr = egl_keyed_mutex_->ReleaseSync(++keyed_mutex_value_); | 762 HRESULT hr = egl_keyed_mutex_->ReleaseSync(++keyed_mutex_value_); |
781 RETURN_ON_FAILURE(hr == S_OK, "Could not release sync mutex", false); | 763 RETURN_ON_FAILURE(hr == S_OK, "Could not release sync mutex", false); |
782 } | 764 } |
783 return true; | 765 return true; |
784 } | 766 } |
785 | 767 |
786 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ResetReuseFence() { | 768 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ResetReuseFence() { |
787 if (!reuse_fence_ || !reuse_fence_->ResetSupported()) | 769 if (!reuse_fence_ || !reuse_fence_->ResetSupported()) |
788 reuse_fence_.reset(gfx::GLFence::Create()); | 770 reuse_fence_.reset(gfx::GLFence::Create()); |
789 else | 771 else |
790 reuse_fence_->ResetState(); | 772 reuse_fence_->ResetState(); |
791 waiting_to_reuse_ = true; | 773 waiting_to_reuse_ = true; |
792 } | 774 } |
793 | 775 |
794 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: | 776 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: |
795 CopyOutputSampleDataToPictureBuffer( | 777 CopyOutputSampleDataToPictureBuffer(DXVAVideoDecodeAccelerator* decoder, |
796 DXVAVideoDecodeAccelerator* decoder, | 778 IDirect3DSurface9* dest_surface, |
797 IDirect3DSurface9* dest_surface, | 779 ID3D11Texture2D* dx11_texture, |
798 ID3D11Texture2D* dx11_texture, | 780 int input_buffer_id) { |
799 int input_buffer_id) { | |
800 DCHECK(dest_surface || dx11_texture); | 781 DCHECK(dest_surface || dx11_texture); |
801 if (dx11_texture) { | 782 if (dx11_texture) { |
802 // Grab a reference on the decoder texture. This reference will be released | 783 // Grab a reference on the decoder texture. This reference will be released |
803 // when we receive a notification that the copy was completed or when the | 784 // when we receive a notification that the copy was completed or when the |
804 // DXVAPictureBuffer instance is destroyed. | 785 // DXVAPictureBuffer instance is destroyed. |
805 decoder_dx11_texture_ = dx11_texture; | 786 decoder_dx11_texture_ = dx11_texture; |
806 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), | 787 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), |
807 dx11_keyed_mutex_, keyed_mutex_value_, NULL, id(), | 788 dx11_keyed_mutex_, keyed_mutex_value_, NULL, id(), |
808 input_buffer_id); | 789 input_buffer_id); |
809 return true; | 790 return true; |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
867 decoder_dx11_texture_.Release(); | 848 decoder_dx11_texture_.Release(); |
868 } | 849 } |
869 if (egl_keyed_mutex_) { | 850 if (egl_keyed_mutex_) { |
870 keyed_mutex_value_++; | 851 keyed_mutex_value_++; |
871 HRESULT result = | 852 HRESULT result = |
872 egl_keyed_mutex_->AcquireSync(keyed_mutex_value_, kAcquireSyncWaitMs); | 853 egl_keyed_mutex_->AcquireSync(keyed_mutex_value_, kAcquireSyncWaitMs); |
873 RETURN_ON_FAILURE(result == S_OK, "Could not acquire sync mutex", false); | 854 RETURN_ON_FAILURE(result == S_OK, "Could not acquire sync mutex", false); |
874 } | 855 } |
875 | 856 |
876 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 857 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
877 eglBindTexImage( | 858 eglBindTexImage(egl_display, decoding_surface_, EGL_BACK_BUFFER); |
878 egl_display, | |
879 decoding_surface_, | |
880 EGL_BACK_BUFFER); | |
881 | 859 |
882 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 860 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
883 glBindTexture(GL_TEXTURE_2D, current_texture); | 861 glBindTexture(GL_TEXTURE_2D, current_texture); |
884 return true; | 862 return true; |
885 } | 863 } |
886 | 864 |
887 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( | 865 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( |
888 int32_t buffer_id, | 866 int32_t buffer_id, |
889 IMFSample* sample) | 867 IMFSample* sample) |
890 : input_buffer_id(buffer_id), picture_buffer_id(-1) { | 868 : input_buffer_id(buffer_id), picture_buffer_id(-1) { |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
946 | 924 |
947 bool profile_supported = false; | 925 bool profile_supported = false; |
948 for (const auto& supported_profile : kSupportedProfiles) { | 926 for (const auto& supported_profile : kSupportedProfiles) { |
949 if (config.profile == supported_profile) { | 927 if (config.profile == supported_profile) { |
950 profile_supported = true; | 928 profile_supported = true; |
951 break; | 929 break; |
952 } | 930 } |
953 } | 931 } |
954 if (!profile_supported) { | 932 if (!profile_supported) { |
955 RETURN_AND_NOTIFY_ON_FAILURE(false, | 933 RETURN_AND_NOTIFY_ON_FAILURE(false, |
956 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false); | 934 "Unsupported h.264, vp8, or vp9 profile", |
| 935 PLATFORM_FAILURE, false); |
957 } | 936 } |
958 | 937 |
959 // Not all versions of Windows 7 and later include Media Foundation DLLs. | 938 // Not all versions of Windows 7 and later include Media Foundation DLLs. |
960 // Instead of crashing while delay loading the DLL when calling MFStartup() | 939 // Instead of crashing while delay loading the DLL when calling MFStartup() |
961 // below, probe whether we can successfully load the DLL now. | 940 // below, probe whether we can successfully load the DLL now. |
962 // See http://crbug.com/339678 for details. | 941 // See http://crbug.com/339678 for details. |
963 HMODULE dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll"); | 942 HMODULE dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll"); |
964 RETURN_ON_FAILURE(dxgi_manager_dll, "MFPlat.dll is required for decoding", | 943 RETURN_ON_FAILURE(dxgi_manager_dll, "MFPlat.dll is required for decoding", |
965 false); | 944 false); |
966 | 945 |
967 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API. | 946 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API. |
968 // On Windows 7 mshtmlmedia.dll provides it. | 947 // On Windows 7 mshtmlmedia.dll provides it. |
969 | 948 |
970 // TODO(ananta) | 949 // TODO(ananta) |
971 // The code below works, as in we can create the DX11 device manager for | 950 // The code below works, as in we can create the DX11 device manager for |
972 // Windows 7. However the IMFTransform we use for texture conversion and | 951 // Windows 7. However the IMFTransform we use for texture conversion and |
973 // copy does not exist on Windows 7. Look into an alternate approach | 952 // copy does not exist on Windows 7. Look into an alternate approach |
974 // and enable the code below. | 953 // and enable the code below. |
975 #if defined(ENABLE_DX11_FOR_WIN7) | 954 #if defined(ENABLE_DX11_FOR_WIN7) |
976 if (base::win::GetVersion() == base::win::VERSION_WIN7) { | 955 if (base::win::GetVersion() == base::win::VERSION_WIN7) { |
977 dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll"); | 956 dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll"); |
978 RETURN_ON_FAILURE(dxgi_manager_dll, | 957 RETURN_ON_FAILURE(dxgi_manager_dll, |
979 "mshtmlmedia.dll is required for decoding", false); | 958 "mshtmlmedia.dll is required for decoding", false); |
980 } | 959 } |
981 #endif | 960 #endif |
982 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 | 961 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 |
983 // decoding. | 962 // decoding. |
984 if (dxgi_manager_dll && !create_dxgi_device_manager_) { | 963 if (dxgi_manager_dll && !create_dxgi_device_manager_) { |
985 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( | 964 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( |
986 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); | 965 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); |
987 } | 966 } |
988 | 967 |
989 RETURN_AND_NOTIFY_ON_FAILURE( | 968 RETURN_AND_NOTIFY_ON_FAILURE( |
990 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, | 969 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, |
991 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", | 970 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", |
992 PLATFORM_FAILURE, | 971 PLATFORM_FAILURE, false); |
993 false); | |
994 | 972 |
995 RETURN_AND_NOTIFY_ON_FAILURE(gfx::GLFence::IsSupported(), | 973 RETURN_AND_NOTIFY_ON_FAILURE(gfx::GLFence::IsSupported(), |
996 "GL fences are unsupported", PLATFORM_FAILURE, | 974 "GL fences are unsupported", PLATFORM_FAILURE, |
997 false); | 975 false); |
998 | 976 |
999 State state = GetState(); | 977 State state = GetState(); |
1000 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), | 978 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), |
1001 "Initialize: invalid state: " << state, ILLEGAL_STATE, false); | 979 "Initialize: invalid state: " << state, |
| 980 ILLEGAL_STATE, false); |
1002 | 981 |
1003 media::InitializeMediaFoundation(); | 982 media::InitializeMediaFoundation(); |
1004 | 983 |
1005 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(config.profile), | 984 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(config.profile), |
1006 "Failed to initialize decoder", PLATFORM_FAILURE, false); | 985 "Failed to initialize decoder", PLATFORM_FAILURE, |
| 986 false); |
1007 | 987 |
1008 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), | 988 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), |
1009 "Failed to get input/output stream info.", PLATFORM_FAILURE, false); | 989 "Failed to get input/output stream info.", |
| 990 PLATFORM_FAILURE, false); |
1010 | 991 |
1011 RETURN_AND_NOTIFY_ON_FAILURE( | 992 RETURN_AND_NOTIFY_ON_FAILURE( |
1012 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), | 993 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), |
1013 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", | 994 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", |
1014 PLATFORM_FAILURE, false); | 995 PLATFORM_FAILURE, false); |
1015 | 996 |
1016 RETURN_AND_NOTIFY_ON_FAILURE( | 997 RETURN_AND_NOTIFY_ON_FAILURE( |
1017 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), | 998 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), |
1018 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", | 999 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", |
1019 PLATFORM_FAILURE, false); | 1000 PLATFORM_FAILURE, false); |
(...skipping 16 matching lines...) Expand all Loading... |
1036 hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); | 1017 hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); |
1037 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); | 1018 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); |
1038 | 1019 |
1039 base::win::ScopedComPtr<IDirect3DDevice9> angle_device = | 1020 base::win::ScopedComPtr<IDirect3DDevice9> angle_device = |
1040 QueryDeviceObjectFromANGLE<IDirect3DDevice9>(EGL_D3D9_DEVICE_ANGLE); | 1021 QueryDeviceObjectFromANGLE<IDirect3DDevice9>(EGL_D3D9_DEVICE_ANGLE); |
1041 if (angle_device.get()) | 1022 if (angle_device.get()) |
1042 using_angle_device_ = true; | 1023 using_angle_device_ = true; |
1043 | 1024 |
1044 if (using_angle_device_) { | 1025 if (using_angle_device_) { |
1045 hr = d3d9_device_ex_.QueryFrom(angle_device.get()); | 1026 hr = d3d9_device_ex_.QueryFrom(angle_device.get()); |
1046 RETURN_ON_HR_FAILURE(hr, | 1027 RETURN_ON_HR_FAILURE( |
1047 "QueryInterface for IDirect3DDevice9Ex from angle device failed", | 1028 hr, "QueryInterface for IDirect3DDevice9Ex from angle device failed", |
1048 false); | 1029 false); |
1049 } else { | 1030 } else { |
1050 D3DPRESENT_PARAMETERS present_params = {0}; | 1031 D3DPRESENT_PARAMETERS present_params = {0}; |
1051 present_params.BackBufferWidth = 1; | 1032 present_params.BackBufferWidth = 1; |
1052 present_params.BackBufferHeight = 1; | 1033 present_params.BackBufferHeight = 1; |
1053 present_params.BackBufferFormat = D3DFMT_UNKNOWN; | 1034 present_params.BackBufferFormat = D3DFMT_UNKNOWN; |
1054 present_params.BackBufferCount = 1; | 1035 present_params.BackBufferCount = 1; |
1055 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; | 1036 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; |
1056 present_params.hDeviceWindow = NULL; | 1037 present_params.hDeviceWindow = NULL; |
1057 present_params.Windowed = TRUE; | 1038 present_params.Windowed = TRUE; |
1058 present_params.Flags = D3DPRESENTFLAG_VIDEO; | 1039 present_params.Flags = D3DPRESENTFLAG_VIDEO; |
1059 present_params.FullScreen_RefreshRateInHz = 0; | 1040 present_params.FullScreen_RefreshRateInHz = 0; |
1060 present_params.PresentationInterval = 0; | 1041 present_params.PresentationInterval = 0; |
1061 | 1042 |
1062 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT, | 1043 hr = d3d9_->CreateDeviceEx( |
1063 D3DDEVTYPE_HAL, | 1044 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, NULL, |
1064 NULL, | 1045 D3DCREATE_FPU_PRESERVE | D3DCREATE_HARDWARE_VERTEXPROCESSING | |
1065 D3DCREATE_FPU_PRESERVE | | 1046 D3DCREATE_DISABLE_PSGP_THREADING | D3DCREATE_MULTITHREADED, |
1066 D3DCREATE_HARDWARE_VERTEXPROCESSING | | 1047 &present_params, NULL, d3d9_device_ex_.Receive()); |
1067 D3DCREATE_DISABLE_PSGP_THREADING | | |
1068 D3DCREATE_MULTITHREADED, | |
1069 &present_params, | |
1070 NULL, | |
1071 d3d9_device_ex_.Receive()); | |
1072 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); | 1048 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); |
1073 } | 1049 } |
1074 | 1050 |
1075 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, | 1051 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, |
1076 device_manager_.Receive()); | 1052 device_manager_.Receive()); |
1077 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); | 1053 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); |
1078 | 1054 |
1079 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(), | 1055 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(), |
1080 dev_manager_reset_token_); | 1056 dev_manager_reset_token_); |
1081 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | 1057 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
1082 | 1058 |
1083 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); | 1059 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); |
1084 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); | 1060 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); |
1085 // Ensure query_ API works (to avoid an infinite loop later in | 1061 // Ensure query_ API works (to avoid an infinite loop later in |
1086 // CopyOutputSampleDataToPictureBuffer). | 1062 // CopyOutputSampleDataToPictureBuffer). |
1087 hr = query_->Issue(D3DISSUE_END); | 1063 hr = query_->Issue(D3DISSUE_END); |
1088 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); | 1064 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); |
1089 return true; | 1065 return true; |
1090 } | 1066 } |
1091 | 1067 |
1092 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { | 1068 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { |
1093 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, | 1069 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, |
1094 d3d11_device_manager_.Receive()); | 1070 d3d11_device_manager_.Receive()); |
1095 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); | 1071 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); |
1096 | 1072 |
1097 // This array defines the set of DirectX hardware feature levels we support. | 1073 // This array defines the set of DirectX hardware feature levels we support. |
1098 // The ordering MUST be preserved. All applications are assumed to support | 1074 // The ordering MUST be preserved. All applications are assumed to support |
1099 // 9.1 unless otherwise stated by the application. | 1075 // 9.1 unless otherwise stated by the application. |
1100 D3D_FEATURE_LEVEL feature_levels[] = { | 1076 D3D_FEATURE_LEVEL feature_levels[] = { |
1101 D3D_FEATURE_LEVEL_11_1, | 1077 D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, |
1102 D3D_FEATURE_LEVEL_11_0, | 1078 D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_3, D3D_FEATURE_LEVEL_9_2, |
1103 D3D_FEATURE_LEVEL_10_1, | 1079 D3D_FEATURE_LEVEL_9_1}; |
1104 D3D_FEATURE_LEVEL_10_0, | |
1105 D3D_FEATURE_LEVEL_9_3, | |
1106 D3D_FEATURE_LEVEL_9_2, | |
1107 D3D_FEATURE_LEVEL_9_1 | |
1108 }; | |
1109 | 1080 |
1110 UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT; | 1081 UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT; |
1111 | 1082 |
1112 #if defined _DEBUG | 1083 #if defined _DEBUG |
1113 flags |= D3D11_CREATE_DEVICE_DEBUG; | 1084 flags |= D3D11_CREATE_DEVICE_DEBUG; |
1114 #endif | 1085 #endif |
1115 | 1086 |
1116 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0; | 1087 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0; |
1117 hr = D3D11CreateDevice(NULL, | 1088 hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, flags, |
1118 D3D_DRIVER_TYPE_HARDWARE, | 1089 feature_levels, arraysize(feature_levels), |
1119 NULL, | 1090 D3D11_SDK_VERSION, d3d11_device_.Receive(), |
1120 flags, | 1091 &feature_level_out, d3d11_device_context_.Receive()); |
1121 feature_levels, | |
1122 arraysize(feature_levels), | |
1123 D3D11_SDK_VERSION, | |
1124 d3d11_device_.Receive(), | |
1125 &feature_level_out, | |
1126 d3d11_device_context_.Receive()); | |
1127 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false); | 1092 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false); |
1128 | 1093 |
1129 // Enable multithreaded mode on the device. This ensures that accesses to | 1094 // Enable multithreaded mode on the device. This ensures that accesses to |
1130 // context are synchronized across threads. We have multiple threads | 1095 // context are synchronized across threads. We have multiple threads |
1131 // accessing the context, the media foundation decoder threads and the | 1096 // accessing the context, the media foundation decoder threads and the |
1132 // decoder thread via the video format conversion transform. | 1097 // decoder thread via the video format conversion transform. |
1133 hr = multi_threaded_.QueryFrom(d3d11_device_.get()); | 1098 hr = multi_threaded_.QueryFrom(d3d11_device_.get()); |
1134 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); | 1099 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); |
1135 multi_threaded_->SetMultithreadProtected(TRUE); | 1100 multi_threaded_->SetMultithreadProtected(TRUE); |
1136 | 1101 |
1137 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), | 1102 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), |
1138 dx11_dev_manager_reset_token_); | 1103 dx11_dev_manager_reset_token_); |
1139 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | 1104 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
1140 | 1105 |
1141 D3D11_QUERY_DESC query_desc; | 1106 D3D11_QUERY_DESC query_desc; |
1142 query_desc.Query = D3D11_QUERY_EVENT; | 1107 query_desc.Query = D3D11_QUERY_EVENT; |
1143 query_desc.MiscFlags = 0; | 1108 query_desc.MiscFlags = 0; |
1144 hr = d3d11_device_->CreateQuery( | 1109 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); |
1145 &query_desc, | |
1146 d3d11_query_.Receive()); | |
1147 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); | 1110 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); |
1148 | 1111 |
1149 HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); | 1112 HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); |
1150 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", | 1113 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", |
1151 false); | 1114 false); |
1152 | 1115 |
1153 hr = CreateCOMObjectFromDll( | 1116 hr = CreateCOMObjectFromDll(video_processor_dll, CLSID_VideoProcessorMFT, |
1154 video_processor_dll, | 1117 __uuidof(IMFTransform), |
1155 CLSID_VideoProcessorMFT, | 1118 video_format_converter_mft_.ReceiveVoid()); |
1156 __uuidof(IMFTransform), | |
1157 video_format_converter_mft_.ReceiveVoid()); | |
1158 if (FAILED(hr)) { | 1119 if (FAILED(hr)) { |
1159 base::debug::Alias(&hr); | 1120 base::debug::Alias(&hr); |
1160 // TODO(ananta) | 1121 // TODO(ananta) |
1161 // Remove this CHECK when the change to use DX11 for H/W decoding | 1122 // Remove this CHECK when the change to use DX11 for H/W decoding |
1162 // stablizes. | 1123 // stablizes. |
1163 CHECK(false); | 1124 CHECK(false); |
1164 } | 1125 } |
1165 | 1126 |
1166 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); | 1127 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); |
1167 | 1128 |
1168 base::win::ScopedComPtr<IMFAttributes> converter_attributes; | 1129 base::win::ScopedComPtr<IMFAttributes> converter_attributes; |
1169 hr = video_format_converter_mft_->GetAttributes( | 1130 hr = video_format_converter_mft_->GetAttributes( |
1170 converter_attributes.Receive()); | 1131 converter_attributes.Receive()); |
1171 RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false); | 1132 RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false); |
1172 | 1133 |
1173 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); | 1134 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); |
1174 RETURN_ON_HR_FAILURE( | 1135 RETURN_ON_HR_FAILURE( |
1175 hr, | 1136 hr, "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", false); |
1176 "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", | |
1177 false); | |
1178 | 1137 |
1179 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); | 1138 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); |
1180 RETURN_ON_HR_FAILURE( | 1139 RETURN_ON_HR_FAILURE( |
1181 hr, | 1140 hr, "Failed to set MF_LOW_LATENCY attribute on converter", false); |
1182 "Failed to set MF_LOW_LATENCY attribute on converter", | |
1183 false); | |
1184 return true; | 1141 return true; |
1185 } | 1142 } |
1186 | 1143 |
1187 void DXVAVideoDecodeAccelerator::Decode( | 1144 void DXVAVideoDecodeAccelerator::Decode( |
1188 const media::BitstreamBuffer& bitstream_buffer) { | 1145 const media::BitstreamBuffer& bitstream_buffer) { |
1189 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1146 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1190 | 1147 |
1191 // SharedMemory will take over the ownership of handle. | 1148 // SharedMemory will take over the ownership of handle. |
1192 base::SharedMemory shm(bitstream_buffer.handle(), true); | 1149 base::SharedMemory shm(bitstream_buffer.handle(), true); |
1193 | 1150 |
1194 State state = GetState(); | 1151 State state = GetState(); |
1195 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped || | 1152 RETURN_AND_NOTIFY_ON_FAILURE( |
1196 state == kFlushing), | 1153 (state == kNormal || state == kStopped || state == kFlushing), |
1197 "Invalid state: " << state, ILLEGAL_STATE,); | 1154 "Invalid state: " << state, ILLEGAL_STATE, ); |
1198 if (bitstream_buffer.id() < 0) { | 1155 if (bitstream_buffer.id() < 0) { |
1199 RETURN_AND_NOTIFY_ON_FAILURE( | 1156 RETURN_AND_NOTIFY_ON_FAILURE( |
1200 false, "Invalid bitstream_buffer, id: " << bitstream_buffer.id(), | 1157 false, "Invalid bitstream_buffer, id: " << bitstream_buffer.id(), |
1201 INVALID_ARGUMENT, ); | 1158 INVALID_ARGUMENT, ); |
1202 } | 1159 } |
1203 | 1160 |
1204 base::win::ScopedComPtr<IMFSample> sample; | 1161 base::win::ScopedComPtr<IMFSample> sample; |
1205 RETURN_AND_NOTIFY_ON_FAILURE(shm.Map(bitstream_buffer.size()), | 1162 RETURN_AND_NOTIFY_ON_FAILURE(shm.Map(bitstream_buffer.size()), |
1206 "Failed in base::SharedMemory::Map", | 1163 "Failed in base::SharedMemory::Map", |
1207 PLATFORM_FAILURE, ); | 1164 PLATFORM_FAILURE, ); |
1208 | 1165 |
1209 sample.Attach(CreateInputSample( | 1166 sample.Attach(CreateInputSample( |
1210 reinterpret_cast<const uint8_t*>(shm.memory()), bitstream_buffer.size(), | 1167 reinterpret_cast<const uint8_t*>(shm.memory()), bitstream_buffer.size(), |
1211 std::min<uint32_t>(bitstream_buffer.size(), input_stream_info_.cbSize), | 1168 std::min<uint32_t>(bitstream_buffer.size(), input_stream_info_.cbSize), |
1212 input_stream_info_.cbAlignment)); | 1169 input_stream_info_.cbAlignment)); |
1213 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample", | 1170 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample", |
1214 PLATFORM_FAILURE, ); | 1171 PLATFORM_FAILURE, ); |
1215 | 1172 |
1216 RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()), | 1173 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
1217 "Failed to associate input buffer id with sample", PLATFORM_FAILURE,); | 1174 sample->SetSampleTime(bitstream_buffer.id()), |
| 1175 "Failed to associate input buffer id with sample", PLATFORM_FAILURE, ); |
1218 | 1176 |
1219 decoder_thread_task_runner_->PostTask( | 1177 decoder_thread_task_runner_->PostTask( |
1220 FROM_HERE, | 1178 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal, |
1221 base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal, | 1179 base::Unretained(this), sample)); |
1222 base::Unretained(this), sample)); | |
1223 } | 1180 } |
1224 | 1181 |
1225 void DXVAVideoDecodeAccelerator::AssignPictureBuffers( | 1182 void DXVAVideoDecodeAccelerator::AssignPictureBuffers( |
1226 const std::vector<media::PictureBuffer>& buffers) { | 1183 const std::vector<media::PictureBuffer>& buffers) { |
1227 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1184 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1228 | 1185 |
1229 State state = GetState(); | 1186 State state = GetState(); |
1230 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | 1187 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), |
1231 "Invalid state: " << state, ILLEGAL_STATE,); | 1188 "Invalid state: " << state, ILLEGAL_STATE, ); |
1232 RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers >= buffers.size()), | 1189 RETURN_AND_NOTIFY_ON_FAILURE( |
1233 "Failed to provide requested picture buffers. (Got " << buffers.size() << | 1190 (kNumPictureBuffers >= buffers.size()), |
1234 ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,); | 1191 "Failed to provide requested picture buffers. (Got " |
| 1192 << buffers.size() << ", requested " << kNumPictureBuffers << ")", |
| 1193 INVALID_ARGUMENT, ); |
1235 | 1194 |
1236 // Copy the picture buffers provided by the client to the available list, | 1195 // Copy the picture buffers provided by the client to the available list, |
1237 // and mark these buffers as available for use. | 1196 // and mark these buffers as available for use. |
1238 for (size_t buffer_index = 0; buffer_index < buffers.size(); | 1197 for (size_t buffer_index = 0; buffer_index < buffers.size(); ++buffer_index) { |
1239 ++buffer_index) { | |
1240 DCHECK_LE(1u, buffers[buffer_index].texture_ids().size()); | 1198 DCHECK_LE(1u, buffers[buffer_index].texture_ids().size()); |
1241 linked_ptr<DXVAPictureBuffer> picture_buffer = | 1199 linked_ptr<DXVAPictureBuffer> picture_buffer = |
1242 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); | 1200 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); |
1243 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), | 1201 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), |
1244 "Failed to allocate picture buffer", PLATFORM_FAILURE,); | 1202 "Failed to allocate picture buffer", |
| 1203 PLATFORM_FAILURE, ); |
1245 | 1204 |
1246 bool inserted = output_picture_buffers_.insert(std::make_pair( | 1205 bool inserted = |
1247 buffers[buffer_index].id(), picture_buffer)).second; | 1206 output_picture_buffers_ |
| 1207 .insert(std::make_pair(buffers[buffer_index].id(), picture_buffer)) |
| 1208 .second; |
1248 DCHECK(inserted); | 1209 DCHECK(inserted); |
1249 } | 1210 } |
1250 | 1211 |
1251 ProcessPendingSamples(); | 1212 ProcessPendingSamples(); |
1252 if (pending_flush_) { | 1213 if (pending_flush_) { |
1253 decoder_thread_task_runner_->PostTask( | 1214 decoder_thread_task_runner_->PostTask( |
1254 FROM_HERE, | 1215 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
1255 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 1216 base::Unretained(this))); |
1256 base::Unretained(this))); | |
1257 } | 1217 } |
1258 } | 1218 } |
1259 | 1219 |
1260 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { | 1220 void DXVAVideoDecodeAccelerator::ReusePictureBuffer(int32_t picture_buffer_id) { |
1261 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1221 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1262 | 1222 |
1263 State state = GetState(); | 1223 State state = GetState(); |
1264 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | 1224 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), |
1265 "Invalid state: " << state, ILLEGAL_STATE,); | 1225 "Invalid state: " << state, ILLEGAL_STATE, ); |
1266 | 1226 |
1267 if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty()) | 1227 if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty()) |
1268 return; | 1228 return; |
1269 | 1229 |
1270 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id); | 1230 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id); |
1271 // If we didn't find the picture id in the |output_picture_buffers_| map we | 1231 // If we didn't find the picture id in the |output_picture_buffers_| map we |
1272 // try the |stale_output_picture_buffers_| map, as this may have been an | 1232 // try the |stale_output_picture_buffers_| map, as this may have been an |
1273 // output picture buffer from before a resolution change, that at resolution | 1233 // output picture buffer from before a resolution change, that at resolution |
1274 // change time had yet to be displayed. The client is calling us back to tell | 1234 // change time had yet to be displayed. The client is calling us back to tell |
1275 // us that we can now recycle this picture buffer, so if we were waiting to | 1235 // us that we can now recycle this picture buffer, so if we were waiting to |
1276 // dispose of it we now can. | 1236 // dispose of it we now can. |
1277 if (it == output_picture_buffers_.end()) { | 1237 if (it == output_picture_buffers_.end()) { |
1278 if (!stale_output_picture_buffers_.empty()) { | 1238 if (!stale_output_picture_buffers_.empty()) { |
1279 it = stale_output_picture_buffers_.find(picture_buffer_id); | 1239 it = stale_output_picture_buffers_.find(picture_buffer_id); |
1280 RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(), | 1240 RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(), |
1281 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,); | 1241 "Invalid picture id: " << picture_buffer_id, |
| 1242 INVALID_ARGUMENT, ); |
1282 main_thread_task_runner_->PostTask( | 1243 main_thread_task_runner_->PostTask( |
1283 FROM_HERE, | 1244 FROM_HERE, |
1284 base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer, | 1245 base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer, |
1285 weak_this_factory_.GetWeakPtr(), picture_buffer_id)); | 1246 weak_this_factory_.GetWeakPtr(), picture_buffer_id)); |
1286 } | 1247 } |
1287 return; | 1248 return; |
1288 } | 1249 } |
1289 | 1250 |
1290 if (it->second->available() || it->second->waiting_to_reuse()) | 1251 if (it->second->available() || it->second->waiting_to_reuse()) |
1291 return; | 1252 return; |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1335 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 1296 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
1336 return; | 1297 return; |
1337 } | 1298 } |
1338 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer->ReusePictureBuffer(), | 1299 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer->ReusePictureBuffer(), |
1339 "Failed to reuse picture buffer", | 1300 "Failed to reuse picture buffer", |
1340 PLATFORM_FAILURE, ); | 1301 PLATFORM_FAILURE, ); |
1341 | 1302 |
1342 ProcessPendingSamples(); | 1303 ProcessPendingSamples(); |
1343 if (pending_flush_) { | 1304 if (pending_flush_) { |
1344 decoder_thread_task_runner_->PostTask( | 1305 decoder_thread_task_runner_->PostTask( |
1345 FROM_HERE, | 1306 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
1346 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 1307 base::Unretained(this))); |
1347 base::Unretained(this))); | |
1348 } | 1308 } |
1349 } | 1309 } |
1350 | 1310 |
1351 void DXVAVideoDecodeAccelerator::Flush() { | 1311 void DXVAVideoDecodeAccelerator::Flush() { |
1352 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1312 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1353 | 1313 |
1354 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush"; | 1314 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush"; |
1355 | 1315 |
1356 State state = GetState(); | 1316 State state = GetState(); |
1357 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), | 1317 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), |
1358 "Unexpected decoder state: " << state, ILLEGAL_STATE,); | 1318 "Unexpected decoder state: " << state, |
| 1319 ILLEGAL_STATE, ); |
1359 | 1320 |
1360 SetState(kFlushing); | 1321 SetState(kFlushing); |
1361 | 1322 |
1362 pending_flush_ = true; | 1323 pending_flush_ = true; |
1363 | 1324 |
1364 decoder_thread_task_runner_->PostTask( | 1325 decoder_thread_task_runner_->PostTask( |
1365 FROM_HERE, | 1326 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
1366 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 1327 base::Unretained(this))); |
1367 base::Unretained(this))); | |
1368 } | 1328 } |
1369 | 1329 |
1370 void DXVAVideoDecodeAccelerator::Reset() { | 1330 void DXVAVideoDecodeAccelerator::Reset() { |
1371 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1331 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1372 | 1332 |
1373 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset"; | 1333 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset"; |
1374 | 1334 |
1375 State state = GetState(); | 1335 State state = GetState(); |
1376 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), | 1336 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), |
1377 "Reset: invalid state: " << state, ILLEGAL_STATE,); | 1337 "Reset: invalid state: " << state, |
| 1338 ILLEGAL_STATE, ); |
1378 | 1339 |
1379 decoder_thread_.Stop(); | 1340 decoder_thread_.Stop(); |
1380 | 1341 |
1381 SetState(kResetting); | 1342 SetState(kResetting); |
1382 | 1343 |
1383 // If we have pending output frames waiting for display then we drop those | 1344 // If we have pending output frames waiting for display then we drop those |
1384 // frames and set the corresponding picture buffer as available. | 1345 // frames and set the corresponding picture buffer as available. |
1385 PendingOutputSamples::iterator index; | 1346 PendingOutputSamples::iterator index; |
1386 for (index = pending_output_samples_.begin(); | 1347 for (index = pending_output_samples_.begin(); |
1387 index != pending_output_samples_.end(); | 1348 index != pending_output_samples_.end(); ++index) { |
1388 ++index) { | |
1389 if (index->picture_buffer_id != -1) { | 1349 if (index->picture_buffer_id != -1) { |
1390 OutputBuffers::iterator it = output_picture_buffers_.find( | 1350 OutputBuffers::iterator it = |
1391 index->picture_buffer_id); | 1351 output_picture_buffers_.find(index->picture_buffer_id); |
1392 if (it != output_picture_buffers_.end()) { | 1352 if (it != output_picture_buffers_.end()) { |
1393 DXVAPictureBuffer* picture_buffer = it->second.get(); | 1353 DXVAPictureBuffer* picture_buffer = it->second.get(); |
1394 picture_buffer->ReusePictureBuffer(); | 1354 picture_buffer->ReusePictureBuffer(); |
1395 } | 1355 } |
1396 } | 1356 } |
1397 } | 1357 } |
1398 | 1358 |
1399 pending_output_samples_.clear(); | 1359 pending_output_samples_.clear(); |
1400 | 1360 |
1401 NotifyInputBuffersDropped(); | 1361 NotifyInputBuffersDropped(); |
1402 | 1362 |
1403 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0), | 1363 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0), |
1404 "Reset: Failed to send message.", PLATFORM_FAILURE,); | 1364 "Reset: Failed to send message.", |
| 1365 PLATFORM_FAILURE, ); |
1405 | 1366 |
1406 main_thread_task_runner_->PostTask( | 1367 main_thread_task_runner_->PostTask( |
1407 FROM_HERE, | 1368 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone, |
1408 base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone, | 1369 weak_this_factory_.GetWeakPtr())); |
1409 weak_this_factory_.GetWeakPtr())); | |
1410 | 1370 |
1411 StartDecoderThread(); | 1371 StartDecoderThread(); |
1412 SetState(kNormal); | 1372 SetState(kNormal); |
1413 } | 1373 } |
1414 | 1374 |
1415 void DXVAVideoDecodeAccelerator::Destroy() { | 1375 void DXVAVideoDecodeAccelerator::Destroy() { |
1416 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1376 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1417 Invalidate(); | 1377 Invalidate(); |
1418 delete this; | 1378 delete this; |
1419 } | 1379 } |
1420 | 1380 |
1421 bool DXVAVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( | 1381 bool DXVAVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread( |
1422 const base::WeakPtr<Client>& decode_client, | 1382 const base::WeakPtr<Client>& decode_client, |
1423 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { | 1383 const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner) { |
1424 return false; | 1384 return false; |
1425 } | 1385 } |
1426 | 1386 |
1427 GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const { | 1387 GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const { |
1428 return GL_BGRA_EXT; | 1388 return GL_BGRA_EXT; |
1429 } | 1389 } |
1430 | 1390 |
1431 // static | 1391 // static |
1432 media::VideoDecodeAccelerator::SupportedProfiles | 1392 media::VideoDecodeAccelerator::SupportedProfiles |
1433 DXVAVideoDecodeAccelerator::GetSupportedProfiles() { | 1393 DXVAVideoDecodeAccelerator::GetSupportedProfiles() { |
1434 TRACE_EVENT0("gpu,startup", | 1394 TRACE_EVENT0("gpu,startup", |
1435 "DXVAVideoDecodeAccelerator::GetSupportedProfiles"); | 1395 "DXVAVideoDecodeAccelerator::GetSupportedProfiles"); |
1436 | 1396 |
1437 // TODO(henryhsu): Need to ensure the profiles are actually supported. | 1397 // TODO(henryhsu): Need to ensure the profiles are actually supported. |
1438 SupportedProfiles profiles; | 1398 SupportedProfiles profiles; |
1439 for (const auto& supported_profile : kSupportedProfiles) { | 1399 for (const auto& supported_profile : kSupportedProfiles) { |
1440 std::pair<int, int> min_resolution = GetMinResolution(supported_profile); | 1400 std::pair<int, int> min_resolution = GetMinResolution(supported_profile); |
1441 std::pair<int, int> max_resolution = GetMaxResolution(supported_profile); | 1401 std::pair<int, int> max_resolution = GetMaxResolution(supported_profile); |
1442 | 1402 |
1443 SupportedProfile profile; | 1403 SupportedProfile profile; |
1444 profile.profile = supported_profile; | 1404 profile.profile = supported_profile; |
1445 profile.min_resolution.SetSize(min_resolution.first, min_resolution.second); | 1405 profile.min_resolution.SetSize(min_resolution.first, min_resolution.second); |
(...skipping 15 matching lines...) Expand all Loading... |
1461 } else { | 1421 } else { |
1462 #if defined(ENABLE_DX11_FOR_WIN7) | 1422 #if defined(ENABLE_DX11_FOR_WIN7) |
1463 LoadLibrary(L"mshtmlmedia.dll"); | 1423 LoadLibrary(L"mshtmlmedia.dll"); |
1464 #endif | 1424 #endif |
1465 } | 1425 } |
1466 } | 1426 } |
1467 | 1427 |
1468 // static | 1428 // static |
1469 std::pair<int, int> DXVAVideoDecodeAccelerator::GetMinResolution( | 1429 std::pair<int, int> DXVAVideoDecodeAccelerator::GetMinResolution( |
1470 media::VideoCodecProfile profile) { | 1430 media::VideoCodecProfile profile) { |
1471 TRACE_EVENT0("gpu,startup", | 1431 TRACE_EVENT0("gpu,startup", "DXVAVideoDecodeAccelerator::GetMinResolution"); |
1472 "DXVAVideoDecodeAccelerator::GetMinResolution"); | |
1473 std::pair<int, int> min_resolution; | 1432 std::pair<int, int> min_resolution; |
1474 if (profile >= media::H264PROFILE_BASELINE && | 1433 if (profile >= media::H264PROFILE_BASELINE && |
1475 profile <= media::H264PROFILE_HIGH) { | 1434 profile <= media::H264PROFILE_HIGH) { |
1476 // Windows Media Foundation H.264 decoding does not support decoding videos | 1435 // Windows Media Foundation H.264 decoding does not support decoding videos |
1477 // with any dimension smaller than 48 pixels: | 1436 // with any dimension smaller than 48 pixels: |
1478 // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815 | 1437 // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815 |
1479 min_resolution = std::make_pair(48, 48); | 1438 min_resolution = std::make_pair(48, 48); |
1480 } else { | 1439 } else { |
1481 // TODO(ananta) | 1440 // TODO(ananta) |
1482 // Detect this properly for VP8/VP9 profiles. | 1441 // Detect this properly for VP8/VP9 profiles. |
1483 min_resolution = std::make_pair(16, 16); | 1442 min_resolution = std::make_pair(16, 16); |
1484 } | 1443 } |
1485 return min_resolution; | 1444 return min_resolution; |
1486 } | 1445 } |
1487 | 1446 |
1488 // static | 1447 // static |
1489 std::pair<int, int> DXVAVideoDecodeAccelerator::GetMaxResolution( | 1448 std::pair<int, int> DXVAVideoDecodeAccelerator::GetMaxResolution( |
1490 const media::VideoCodecProfile profile) { | 1449 const media::VideoCodecProfile profile) { |
1491 TRACE_EVENT0("gpu,startup", | 1450 TRACE_EVENT0("gpu,startup", "DXVAVideoDecodeAccelerator::GetMaxResolution"); |
1492 "DXVAVideoDecodeAccelerator::GetMaxResolution"); | |
1493 std::pair<int, int> max_resolution; | 1451 std::pair<int, int> max_resolution; |
1494 if (profile >= media::H264PROFILE_BASELINE && | 1452 if (profile >= media::H264PROFILE_BASELINE && |
1495 profile <= media::H264PROFILE_HIGH) { | 1453 profile <= media::H264PROFILE_HIGH) { |
1496 max_resolution = GetMaxH264Resolution(); | 1454 max_resolution = GetMaxH264Resolution(); |
1497 } else { | 1455 } else { |
1498 // TODO(ananta) | 1456 // TODO(ananta) |
1499 // Detect this properly for VP8/VP9 profiles. | 1457 // Detect this properly for VP8/VP9 profiles. |
1500 max_resolution = std::make_pair(4096, 2160); | 1458 max_resolution = std::make_pair(4096, 2160); |
1501 } | 1459 } |
1502 return max_resolution; | 1460 return max_resolution; |
(...skipping 18 matching lines...) Expand all Loading... |
1521 return max_resolution; | 1479 return max_resolution; |
1522 | 1480 |
1523 // To detect if a driver supports the desired resolutions, we try and create | 1481 // To detect if a driver supports the desired resolutions, we try and create |
1524 // a DXVA decoder instance for that resolution and profile. If that succeeds | 1482 // a DXVA decoder instance for that resolution and profile. If that succeeds |
1525 // we assume that the driver supports H/W H.264 decoding for that resolution. | 1483 // we assume that the driver supports H/W H.264 decoding for that resolution. |
1526 HRESULT hr = E_FAIL; | 1484 HRESULT hr = E_FAIL; |
1527 base::win::ScopedComPtr<ID3D11Device> device; | 1485 base::win::ScopedComPtr<ID3D11Device> device; |
1528 | 1486 |
1529 { | 1487 { |
1530 TRACE_EVENT0("gpu,startup", | 1488 TRACE_EVENT0("gpu,startup", |
1531 "GetMaxH264Resolution. QueryDeviceObjectFromANGLE"); | 1489 "GetMaxH264Resolution. QueryDeviceObjectFromANGLE"); |
1532 | 1490 |
1533 device = QueryDeviceObjectFromANGLE<ID3D11Device>(EGL_D3D11_DEVICE_ANGLE); | 1491 device = QueryDeviceObjectFromANGLE<ID3D11Device>(EGL_D3D11_DEVICE_ANGLE); |
1534 if (!device.get()) | 1492 if (!device.get()) |
1535 return max_resolution; | 1493 return max_resolution; |
1536 } | 1494 } |
1537 | 1495 |
1538 base::win::ScopedComPtr<ID3D11VideoDevice> video_device; | 1496 base::win::ScopedComPtr<ID3D11VideoDevice> video_device; |
1539 hr = device.QueryInterface(IID_ID3D11VideoDevice, | 1497 hr = device.QueryInterface(IID_ID3D11VideoDevice, video_device.ReceiveVoid()); |
1540 video_device.ReceiveVoid()); | |
1541 if (FAILED(hr)) | 1498 if (FAILED(hr)) |
1542 return max_resolution; | 1499 return max_resolution; |
1543 | 1500 |
1544 GUID decoder_guid = {}; | 1501 GUID decoder_guid = {}; |
1545 | 1502 |
1546 { | 1503 { |
1547 TRACE_EVENT0("gpu,startup", | 1504 TRACE_EVENT0("gpu,startup", |
1548 "GetMaxH264Resolution. H.264 guid search begin"); | 1505 "GetMaxH264Resolution. H.264 guid search begin"); |
1549 // Enumerate supported video profiles and look for the H264 profile. | 1506 // Enumerate supported video profiles and look for the H264 profile. |
1550 bool found = false; | 1507 bool found = false; |
1551 UINT profile_count = video_device->GetVideoDecoderProfileCount(); | 1508 UINT profile_count = video_device->GetVideoDecoderProfileCount(); |
1552 for (UINT profile_idx = 0; profile_idx < profile_count; profile_idx++) { | 1509 for (UINT profile_idx = 0; profile_idx < profile_count; profile_idx++) { |
1553 GUID profile_id = {}; | 1510 GUID profile_id = {}; |
1554 hr = video_device->GetVideoDecoderProfile(profile_idx, &profile_id); | 1511 hr = video_device->GetVideoDecoderProfile(profile_idx, &profile_id); |
1555 if (SUCCEEDED(hr) && | 1512 if (SUCCEEDED(hr) && (profile_id == DXVA2_ModeH264_E || |
1556 (profile_id == DXVA2_ModeH264_E || | 1513 profile_id == DXVA2_Intel_ModeH264_E)) { |
1557 profile_id == DXVA2_Intel_ModeH264_E)) { | |
1558 decoder_guid = profile_id; | 1514 decoder_guid = profile_id; |
1559 found = true; | 1515 found = true; |
1560 break; | 1516 break; |
1561 } | 1517 } |
1562 } | 1518 } |
1563 if (!found) | 1519 if (!found) |
1564 return max_resolution; | 1520 return max_resolution; |
1565 } | 1521 } |
1566 | 1522 |
1567 // Legacy AMD drivers with UVD3 or earlier and some Intel GPU's crash while | 1523 // Legacy AMD drivers with UVD3 or earlier and some Intel GPU's crash while |
1568 // creating surfaces larger than 1920 x 1088. | 1524 // creating surfaces larger than 1920 x 1088. |
1569 if (IsLegacyGPU(device.get())) | 1525 if (IsLegacyGPU(device.get())) |
1570 return max_resolution; | 1526 return max_resolution; |
1571 | 1527 |
1572 // We look for the following resolutions in the driver. | 1528 // We look for the following resolutions in the driver. |
1573 // TODO(ananta) | 1529 // TODO(ananta) |
1574 // Look into whether this list needs to be expanded. | 1530 // Look into whether this list needs to be expanded. |
1575 static std::pair<int, int> resolution_array[] = { | 1531 static std::pair<int, int> resolution_array[] = { |
1576 // Use 1088 to account for 16x16 macroblocks. | 1532 // Use 1088 to account for 16x16 macroblocks. |
1577 std::make_pair(1920, 1088), | 1533 std::make_pair(1920, 1088), std::make_pair(2560, 1440), |
1578 std::make_pair(2560, 1440), | 1534 std::make_pair(3840, 2160), std::make_pair(4096, 2160), |
1579 std::make_pair(3840, 2160), | 1535 std::make_pair(4096, 2304), |
1580 std::make_pair(4096, 2160), | |
1581 std::make_pair(4096, 2304), | |
1582 }; | 1536 }; |
1583 | 1537 |
1584 { | 1538 { |
1585 TRACE_EVENT0("gpu,startup", | 1539 TRACE_EVENT0("gpu,startup", |
1586 "GetMaxH264Resolution. Resolution search begin"); | 1540 "GetMaxH264Resolution. Resolution search begin"); |
1587 | 1541 |
1588 for (size_t res_idx = 0; res_idx < arraysize(resolution_array); | 1542 for (size_t res_idx = 0; res_idx < arraysize(resolution_array); res_idx++) { |
1589 res_idx++) { | |
1590 D3D11_VIDEO_DECODER_DESC desc = {}; | 1543 D3D11_VIDEO_DECODER_DESC desc = {}; |
1591 desc.Guid = decoder_guid; | 1544 desc.Guid = decoder_guid; |
1592 desc.SampleWidth = resolution_array[res_idx].first; | 1545 desc.SampleWidth = resolution_array[res_idx].first; |
1593 desc.SampleHeight = resolution_array[res_idx].second; | 1546 desc.SampleHeight = resolution_array[res_idx].second; |
1594 desc.OutputFormat = DXGI_FORMAT_NV12; | 1547 desc.OutputFormat = DXGI_FORMAT_NV12; |
1595 UINT config_count = 0; | 1548 UINT config_count = 0; |
1596 hr = video_device->GetVideoDecoderConfigCount(&desc, &config_count); | 1549 hr = video_device->GetVideoDecoderConfigCount(&desc, &config_count); |
1597 if (FAILED(hr) || config_count == 0) | 1550 if (FAILED(hr) || config_count == 0) |
1598 return max_resolution; | 1551 return max_resolution; |
1599 | 1552 |
1600 D3D11_VIDEO_DECODER_CONFIG config = {}; | 1553 D3D11_VIDEO_DECODER_CONFIG config = {}; |
1601 hr = video_device->GetVideoDecoderConfig(&desc, 0, &config); | 1554 hr = video_device->GetVideoDecoderConfig(&desc, 0, &config); |
1602 if (FAILED(hr)) | 1555 if (FAILED(hr)) |
1603 return max_resolution; | 1556 return max_resolution; |
1604 | 1557 |
1605 base::win::ScopedComPtr<ID3D11VideoDecoder> video_decoder; | 1558 base::win::ScopedComPtr<ID3D11VideoDecoder> video_decoder; |
1606 hr = video_device->CreateVideoDecoder(&desc, &config, | 1559 hr = video_device->CreateVideoDecoder(&desc, &config, |
1607 video_decoder.Receive()); | 1560 video_decoder.Receive()); |
1608 if (!video_decoder.get()) | 1561 if (!video_decoder.get()) |
1609 return max_resolution; | 1562 return max_resolution; |
1610 | 1563 |
1611 max_resolution = resolution_array[res_idx]; | 1564 max_resolution = resolution_array[res_idx]; |
1612 } | 1565 } |
1613 } | 1566 } |
1614 return max_resolution; | 1567 return max_resolution; |
1615 } | 1568 } |
1616 | 1569 |
1617 // static | 1570 // static |
(...skipping 23 matching lines...) Expand all Loading... |
1641 | 1594 |
1642 DXGI_ADAPTER_DESC adapter_desc = {}; | 1595 DXGI_ADAPTER_DESC adapter_desc = {}; |
1643 hr = adapter->GetDesc(&adapter_desc); | 1596 hr = adapter->GetDesc(&adapter_desc); |
1644 if (FAILED(hr)) | 1597 if (FAILED(hr)) |
1645 return legacy_gpu; | 1598 return legacy_gpu; |
1646 | 1599 |
1647 // We check if the device is an Intel or an AMD device and whether it is in | 1600 // We check if the device is an Intel or an AMD device and whether it is in |
1648 // the global list defined by the g_AMDUVD3GPUList and g_IntelLegacyGPUList | 1601 // the global list defined by the g_AMDUVD3GPUList and g_IntelLegacyGPUList |
1649 // arrays above. If yes then the device is treated as a legacy device. | 1602 // arrays above. If yes then the device is treated as a legacy device. |
1650 if ((adapter_desc.VendorId == kAMDGPUId1) || | 1603 if ((adapter_desc.VendorId == kAMDGPUId1) || |
1651 adapter_desc.VendorId == kAMDGPUId2) { | 1604 adapter_desc.VendorId == kAMDGPUId2) { |
1652 { | 1605 { |
1653 TRACE_EVENT0("gpu,startup", | 1606 TRACE_EVENT0("gpu,startup", |
1654 "DXVAVideoDecodeAccelerator::IsLegacyGPU. AMD check"); | 1607 "DXVAVideoDecodeAccelerator::IsLegacyGPU. AMD check"); |
1655 for (size_t i = 0; i < arraysize(g_AMDUVD3GPUList); i++) { | 1608 for (size_t i = 0; i < arraysize(g_AMDUVD3GPUList); i++) { |
1656 if (adapter_desc.DeviceId == g_AMDUVD3GPUList[i]) | 1609 if (adapter_desc.DeviceId == g_AMDUVD3GPUList[i]) |
1657 return legacy_gpu; | 1610 return legacy_gpu; |
1658 } | 1611 } |
1659 } | 1612 } |
1660 } else if (adapter_desc.VendorId == kIntelGPU) { | 1613 } else if (adapter_desc.VendorId == kIntelGPU) { |
1661 { | 1614 { |
1662 TRACE_EVENT0("gpu,startup", | 1615 TRACE_EVENT0("gpu,startup", |
1663 "DXVAVideoDecodeAccelerator::IsLegacyGPU. Intel check"); | 1616 "DXVAVideoDecodeAccelerator::IsLegacyGPU. Intel check"); |
1664 for (size_t i = 0; i < arraysize(g_IntelLegacyGPUList); i++) { | 1617 for (size_t i = 0; i < arraysize(g_IntelLegacyGPUList); i++) { |
1665 if (adapter_desc.DeviceId == g_IntelLegacyGPUList[i]) | 1618 if (adapter_desc.DeviceId == g_IntelLegacyGPUList[i]) |
1666 return legacy_gpu; | 1619 return legacy_gpu; |
1667 } | 1620 } |
1668 } | 1621 } |
1669 } | 1622 } |
1670 legacy_gpu = false; | 1623 legacy_gpu = false; |
1671 return legacy_gpu; | 1624 return legacy_gpu; |
1672 } | 1625 } |
1673 | 1626 |
(...skipping 11 matching lines...) Expand all Loading... |
1685 decoder_dll = ::GetModuleHandle(L"msmpeg2vdec.dll"); | 1638 decoder_dll = ::GetModuleHandle(L"msmpeg2vdec.dll"); |
1686 RETURN_ON_FAILURE(decoder_dll, | 1639 RETURN_ON_FAILURE(decoder_dll, |
1687 "msmpeg2vdec.dll required for decoding is not loaded", | 1640 "msmpeg2vdec.dll required for decoding is not loaded", |
1688 false); | 1641 false); |
1689 | 1642 |
1690 // Check version of DLL, version 6.1.7140 is blacklisted due to high crash | 1643 // Check version of DLL, version 6.1.7140 is blacklisted due to high crash |
1691 // rates in browsers loading that DLL. If that is the version installed we | 1644 // rates in browsers loading that DLL. If that is the version installed we |
1692 // fall back to software decoding. See crbug/403440. | 1645 // fall back to software decoding. See crbug/403440. |
1693 std::unique_ptr<FileVersionInfo> version_info( | 1646 std::unique_ptr<FileVersionInfo> version_info( |
1694 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll)); | 1647 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll)); |
1695 RETURN_ON_FAILURE(version_info, | 1648 RETURN_ON_FAILURE(version_info, "unable to get version of msmpeg2vdec.dll", |
1696 "unable to get version of msmpeg2vdec.dll", | |
1697 false); | 1649 false); |
1698 base::string16 file_version = version_info->file_version(); | 1650 base::string16 file_version = version_info->file_version(); |
1699 RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos, | 1651 RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos, |
1700 "blacklisted version of msmpeg2vdec.dll 6.1.7140", | 1652 "blacklisted version of msmpeg2vdec.dll 6.1.7140", false); |
1701 false); | |
1702 codec_ = media::kCodecH264; | 1653 codec_ = media::kCodecH264; |
1703 clsid = __uuidof(CMSH264DecoderMFT); | 1654 clsid = __uuidof(CMSH264DecoderMFT); |
1704 } else if (enable_accelerated_vpx_decode_ && | 1655 } else if (enable_accelerated_vpx_decode_ && |
1705 (profile == media::VP8PROFILE_ANY || | 1656 (profile == media::VP8PROFILE_ANY || |
1706 profile == media::VP9PROFILE_PROFILE0 || | 1657 profile == media::VP9PROFILE_PROFILE0 || |
1707 profile == media::VP9PROFILE_PROFILE1 || | 1658 profile == media::VP9PROFILE_PROFILE1 || |
1708 profile == media::VP9PROFILE_PROFILE2 || | 1659 profile == media::VP9PROFILE_PROFILE2 || |
1709 profile == media::VP9PROFILE_PROFILE3)) { | 1660 profile == media::VP9PROFILE_PROFILE3)) { |
1710 int program_files_key = base::DIR_PROGRAM_FILES; | 1661 int program_files_key = base::DIR_PROGRAM_FILES; |
1711 if (base::win::OSInfo::GetInstance()->wow64_status() == | 1662 if (base::win::OSInfo::GetInstance()->wow64_status() == |
1712 base::win::OSInfo::WOW64_ENABLED) { | 1663 base::win::OSInfo::WOW64_ENABLED) { |
1713 program_files_key = base::DIR_PROGRAM_FILES6432; | 1664 program_files_key = base::DIR_PROGRAM_FILES6432; |
1714 } | 1665 } |
1715 | 1666 |
1716 base::FilePath dll_path; | 1667 base::FilePath dll_path; |
1717 RETURN_ON_FAILURE(PathService::Get(program_files_key, &dll_path), | 1668 RETURN_ON_FAILURE(PathService::Get(program_files_key, &dll_path), |
1718 "failed to get path for Program Files", false); | 1669 "failed to get path for Program Files", false); |
1719 | 1670 |
1720 dll_path = dll_path.Append(kVPXDecoderDLLPath); | 1671 dll_path = dll_path.Append(kVPXDecoderDLLPath); |
1721 if (profile == media::VP8PROFILE_ANY) { | 1672 if (profile == media::VP8PROFILE_ANY) { |
1722 codec_ = media::kCodecVP8; | 1673 codec_ = media::kCodecVP8; |
1723 dll_path = dll_path.Append(kVP8DecoderDLLName); | 1674 dll_path = dll_path.Append(kVP8DecoderDLLName); |
1724 clsid = CLSID_WebmMfVp8Dec; | 1675 clsid = CLSID_WebmMfVp8Dec; |
1725 } else { | 1676 } else { |
1726 codec_ = media::kCodecVP9; | 1677 codec_ = media::kCodecVP9; |
1727 dll_path = dll_path.Append(kVP9DecoderDLLName); | 1678 dll_path = dll_path.Append(kVP9DecoderDLLName); |
1728 clsid = CLSID_WebmMfVp9Dec; | 1679 clsid = CLSID_WebmMfVp9Dec; |
1729 } | 1680 } |
1730 decoder_dll = ::LoadLibraryEx(dll_path.value().data(), NULL, | 1681 decoder_dll = ::LoadLibraryEx(dll_path.value().data(), NULL, |
1731 LOAD_WITH_ALTERED_SEARCH_PATH); | 1682 LOAD_WITH_ALTERED_SEARCH_PATH); |
1732 RETURN_ON_FAILURE(decoder_dll, "vpx decoder dll is not loaded", false); | 1683 RETURN_ON_FAILURE(decoder_dll, "vpx decoder dll is not loaded", false); |
1733 } else { | 1684 } else { |
1734 RETURN_ON_FAILURE(false, "Unsupported codec.", false); | 1685 RETURN_ON_FAILURE(false, "Unsupported codec.", false); |
1735 } | 1686 } |
1736 | 1687 |
1737 HRESULT hr = CreateCOMObjectFromDll(decoder_dll, | 1688 HRESULT hr = CreateCOMObjectFromDll( |
1738 clsid, | 1689 decoder_dll, clsid, __uuidof(IMFTransform), decoder_.ReceiveVoid()); |
1739 __uuidof(IMFTransform), | |
1740 decoder_.ReceiveVoid()); | |
1741 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); | 1690 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); |
1742 | 1691 |
1743 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), | 1692 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), |
1744 "Failed to check decoder DXVA support", false); | 1693 "Failed to check decoder DXVA support", false); |
1745 | 1694 |
1746 ULONG_PTR device_manager_to_use = NULL; | 1695 ULONG_PTR device_manager_to_use = NULL; |
1747 if (use_dx11_) { | 1696 if (use_dx11_) { |
1748 CHECK(create_dxgi_device_manager_); | 1697 CHECK(create_dxgi_device_manager_); |
1749 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), | 1698 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), |
1750 "Failed to initialize DX11 device and manager", | 1699 "Failed to initialize DX11 device and manager", |
1751 PLATFORM_FAILURE, | 1700 PLATFORM_FAILURE, false); |
1752 false); | 1701 device_manager_to_use = |
1753 device_manager_to_use = reinterpret_cast<ULONG_PTR>( | 1702 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get()); |
1754 d3d11_device_manager_.get()); | |
1755 } else { | 1703 } else { |
1756 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), | 1704 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), |
1757 "Failed to initialize D3D device and manager", | 1705 "Failed to initialize D3D device and manager", |
1758 PLATFORM_FAILURE, | 1706 PLATFORM_FAILURE, false); |
1759 false); | |
1760 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); | 1707 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); |
1761 } | 1708 } |
1762 | 1709 |
1763 hr = decoder_->ProcessMessage( | 1710 hr = decoder_->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, |
1764 MFT_MESSAGE_SET_D3D_MANAGER, | 1711 device_manager_to_use); |
1765 device_manager_to_use); | |
1766 if (use_dx11_) { | 1712 if (use_dx11_) { |
1767 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); | 1713 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); |
1768 } else { | 1714 } else { |
1769 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); | 1715 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); |
1770 } | 1716 } |
1771 | 1717 |
1772 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 1718 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
1773 | 1719 |
1774 EGLint config_attribs[] = { | 1720 EGLint config_attribs[] = {EGL_BUFFER_SIZE, 32, |
1775 EGL_BUFFER_SIZE, 32, | 1721 EGL_RED_SIZE, 8, |
1776 EGL_RED_SIZE, 8, | 1722 EGL_GREEN_SIZE, 8, |
1777 EGL_GREEN_SIZE, 8, | 1723 EGL_BLUE_SIZE, 8, |
1778 EGL_BLUE_SIZE, 8, | 1724 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, |
1779 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, | 1725 EGL_ALPHA_SIZE, 0, |
1780 EGL_ALPHA_SIZE, 0, | 1726 EGL_NONE}; |
1781 EGL_NONE | |
1782 }; | |
1783 | 1727 |
1784 EGLint num_configs; | 1728 EGLint num_configs; |
1785 | 1729 |
1786 if (!eglChooseConfig( | 1730 if (!eglChooseConfig(egl_display, config_attribs, &egl_config_, 1, |
1787 egl_display, | 1731 &num_configs)) |
1788 config_attribs, | |
1789 &egl_config_, | |
1790 1, | |
1791 &num_configs)) | |
1792 return false; | 1732 return false; |
1793 | 1733 |
1794 return SetDecoderMediaTypes(); | 1734 return SetDecoderMediaTypes(); |
1795 } | 1735 } |
1796 | 1736 |
1797 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() { | 1737 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() { |
1798 base::win::ScopedComPtr<IMFAttributes> attributes; | 1738 base::win::ScopedComPtr<IMFAttributes> attributes; |
1799 HRESULT hr = decoder_->GetAttributes(attributes.Receive()); | 1739 HRESULT hr = decoder_->GetAttributes(attributes.Receive()); |
1800 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false); | 1740 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false); |
1801 | 1741 |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1904 } | 1844 } |
1905 | 1845 |
1906 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize; | 1846 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize; |
1907 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead; | 1847 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead; |
1908 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment; | 1848 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment; |
1909 | 1849 |
1910 DVLOG(1) << "Output stream info: "; | 1850 DVLOG(1) << "Output stream info: "; |
1911 // The flags here should be the same and mean the same thing, except when | 1851 // The flags here should be the same and mean the same thing, except when |
1912 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will | 1852 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will |
1913 // allocate its own sample. | 1853 // allocate its own sample. |
1914 DVLOG(1) << "Flags: " | 1854 DVLOG(1) << "Flags: " << std::hex << std::showbase |
1915 << std::hex << std::showbase << output_stream_info_.dwFlags; | 1855 << output_stream_info_.dwFlags; |
1916 if (codec_ == media::kCodecH264) { | 1856 if (codec_ == media::kCodecH264) { |
1917 CHECK_EQ(output_stream_info_.dwFlags, 0x107u); | 1857 CHECK_EQ(output_stream_info_.dwFlags, 0x107u); |
1918 } | 1858 } |
1919 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize; | 1859 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize; |
1920 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment; | 1860 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment; |
1921 return true; | 1861 return true; |
1922 } | 1862 } |
1923 | 1863 |
1924 void DXVAVideoDecodeAccelerator::DoDecode() { | 1864 void DXVAVideoDecodeAccelerator::DoDecode() { |
1925 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1865 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1926 // This function is also called from FlushInternal in a loop which could | 1866 // This function is also called from FlushInternal in a loop which could |
1927 // result in the state transitioning to kStopped due to no decoded output. | 1867 // result in the state transitioning to kStopped due to no decoded output. |
1928 State state = GetState(); | 1868 State state = GetState(); |
1929 RETURN_AND_NOTIFY_ON_FAILURE( | 1869 RETURN_AND_NOTIFY_ON_FAILURE( |
1930 (state == kNormal || state == kFlushing || state == kStopped), | 1870 (state == kNormal || state == kFlushing || state == kStopped), |
1931 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,); | 1871 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE, ); |
1932 | 1872 |
1933 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; | 1873 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; |
1934 DWORD status = 0; | 1874 DWORD status = 0; |
1935 | 1875 |
1936 HRESULT hr = decoder_->ProcessOutput(0, // No flags | 1876 HRESULT hr = decoder_->ProcessOutput(0, // No flags |
1937 1, // # of out streams to pull from | 1877 1, // # of out streams to pull from |
1938 &output_data_buffer, | 1878 &output_data_buffer, &status); |
1939 &status); | |
1940 IMFCollection* events = output_data_buffer.pEvents; | 1879 IMFCollection* events = output_data_buffer.pEvents; |
1941 if (events != NULL) { | 1880 if (events != NULL) { |
1942 DVLOG(1) << "Got events from ProcessOuput, but discarding"; | 1881 DVLOG(1) << "Got events from ProcessOuput, but discarding"; |
1943 events->Release(); | 1882 events->Release(); |
1944 } | 1883 } |
1945 if (FAILED(hr)) { | 1884 if (FAILED(hr)) { |
1946 // A stream change needs further ProcessInput calls to get back decoder | 1885 // A stream change needs further ProcessInput calls to get back decoder |
1947 // output which is why we need to set the state to stopped. | 1886 // output which is why we need to set the state to stopped. |
1948 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { | 1887 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { |
1949 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) { | 1888 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) { |
(...skipping 17 matching lines...) Expand all Loading... |
1967 } | 1906 } |
1968 } | 1907 } |
1969 TRACE_EVENT_ASYNC_END0("gpu", "DXVAVideoDecodeAccelerator.Decoding", this); | 1908 TRACE_EVENT_ASYNC_END0("gpu", "DXVAVideoDecodeAccelerator.Decoding", this); |
1970 | 1909 |
1971 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode", | 1910 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode", |
1972 inputs_before_decode_); | 1911 inputs_before_decode_); |
1973 | 1912 |
1974 inputs_before_decode_ = 0; | 1913 inputs_before_decode_ = 0; |
1975 | 1914 |
1976 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), | 1915 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), |
1977 "Failed to process output sample.", PLATFORM_FAILURE,); | 1916 "Failed to process output sample.", |
| 1917 PLATFORM_FAILURE, ); |
1978 } | 1918 } |
1979 | 1919 |
1980 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { | 1920 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { |
1981 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); | 1921 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); |
1982 | 1922 |
1983 LONGLONG input_buffer_id = 0; | 1923 LONGLONG input_buffer_id = 0; |
1984 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), | 1924 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), |
1985 "Failed to get input buffer id associated with sample", | 1925 "Failed to get input buffer id associated with sample", |
1986 false); | 1926 false); |
1987 | 1927 |
(...skipping 15 matching lines...) Expand all Loading... |
2003 | 1943 |
2004 int width = 0; | 1944 int width = 0; |
2005 int height = 0; | 1945 int height = 0; |
2006 if (!GetVideoFrameDimensions(sample, &width, &height)) { | 1946 if (!GetVideoFrameDimensions(sample, &width, &height)) { |
2007 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", | 1947 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", |
2008 false); | 1948 false); |
2009 } | 1949 } |
2010 | 1950 |
2011 // Go ahead and request picture buffers. | 1951 // Go ahead and request picture buffers. |
2012 main_thread_task_runner_->PostTask( | 1952 main_thread_task_runner_->PostTask( |
2013 FROM_HERE, | 1953 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
2014 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 1954 weak_this_factory_.GetWeakPtr(), width, height)); |
2015 weak_this_factory_.GetWeakPtr(), | |
2016 width, | |
2017 height)); | |
2018 | 1955 |
2019 pictures_requested_ = true; | 1956 pictures_requested_ = true; |
2020 return true; | 1957 return true; |
2021 } | 1958 } |
2022 | 1959 |
2023 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { | 1960 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { |
2024 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1961 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2025 | 1962 |
2026 if (!output_picture_buffers_.size()) | 1963 if (!output_picture_buffers_.size()) |
2027 return; | 1964 return; |
2028 | 1965 |
2029 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), | 1966 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), |
2030 "Failed to make context current", | 1967 "Failed to make context current", |
2031 PLATFORM_FAILURE, ); | 1968 PLATFORM_FAILURE, ); |
2032 | 1969 |
2033 OutputBuffers::iterator index; | 1970 OutputBuffers::iterator index; |
2034 | 1971 |
2035 for (index = output_picture_buffers_.begin(); | 1972 for (index = output_picture_buffers_.begin(); |
2036 index != output_picture_buffers_.end() && | 1973 index != output_picture_buffers_.end() && OutputSamplesPresent(); |
2037 OutputSamplesPresent(); | |
2038 ++index) { | 1974 ++index) { |
2039 if (index->second->available()) { | 1975 if (index->second->available()) { |
2040 PendingSampleInfo* pending_sample = NULL; | 1976 PendingSampleInfo* pending_sample = NULL; |
2041 { | 1977 { |
2042 base::AutoLock lock(decoder_lock_); | 1978 base::AutoLock lock(decoder_lock_); |
2043 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 1979 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
2044 if (sample_info.picture_buffer_id != -1) | 1980 if (sample_info.picture_buffer_id != -1) |
2045 continue; | 1981 continue; |
2046 pending_sample = &sample_info; | 1982 pending_sample = &sample_info; |
2047 } | 1983 } |
2048 | 1984 |
2049 int width = 0; | 1985 int width = 0; |
2050 int height = 0; | 1986 int height = 0; |
2051 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), | 1987 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), &width, |
2052 &width, &height)) { | 1988 &height)) { |
2053 RETURN_AND_NOTIFY_ON_FAILURE(false, | 1989 RETURN_AND_NOTIFY_ON_FAILURE( |
2054 "Failed to get D3D surface from output sample", PLATFORM_FAILURE,); | 1990 false, "Failed to get D3D surface from output sample", |
| 1991 PLATFORM_FAILURE, ); |
2055 } | 1992 } |
2056 | 1993 |
2057 if (width != index->second->size().width() || | 1994 if (width != index->second->size().width() || |
2058 height != index->second->size().height()) { | 1995 height != index->second->size().height()) { |
2059 HandleResolutionChanged(width, height); | 1996 HandleResolutionChanged(width, height); |
2060 return; | 1997 return; |
2061 } | 1998 } |
2062 | 1999 |
2063 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2000 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2064 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( | 2001 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( |
2065 0, output_buffer.Receive()); | 2002 0, output_buffer.Receive()); |
2066 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2003 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2067 "Failed to get buffer from output sample", PLATFORM_FAILURE,); | 2004 hr, "Failed to get buffer from output sample", PLATFORM_FAILURE, ); |
2068 | 2005 |
2069 base::win::ScopedComPtr<IDirect3DSurface9> surface; | 2006 base::win::ScopedComPtr<IDirect3DSurface9> surface; |
2070 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | 2007 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
2071 | 2008 |
2072 if (use_dx11_) { | 2009 if (use_dx11_) { |
2073 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 2010 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
2074 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | 2011 hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
2075 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2012 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2076 "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,); | 2013 hr, "Failed to get DXGIBuffer from output sample", |
| 2014 PLATFORM_FAILURE, ); |
2077 hr = dxgi_buffer->GetResource( | 2015 hr = dxgi_buffer->GetResource( |
2078 __uuidof(ID3D11Texture2D), | 2016 __uuidof(ID3D11Texture2D), |
2079 reinterpret_cast<void**>(d3d11_texture.Receive())); | 2017 reinterpret_cast<void**>(d3d11_texture.Receive())); |
2080 } else { | 2018 } else { |
2081 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | 2019 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, |
2082 IID_PPV_ARGS(surface.Receive())); | 2020 IID_PPV_ARGS(surface.Receive())); |
2083 } | 2021 } |
2084 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2022 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2085 "Failed to get surface from output sample", PLATFORM_FAILURE,); | 2023 hr, "Failed to get surface from output sample", PLATFORM_FAILURE, ); |
2086 | 2024 |
2087 pending_sample->picture_buffer_id = index->second->id(); | 2025 pending_sample->picture_buffer_id = index->second->id(); |
2088 | 2026 |
2089 RETURN_AND_NOTIFY_ON_FAILURE( | 2027 RETURN_AND_NOTIFY_ON_FAILURE( |
2090 index->second->CopyOutputSampleDataToPictureBuffer( | 2028 index->second->CopyOutputSampleDataToPictureBuffer( |
2091 this, | 2029 this, surface.get(), d3d11_texture.get(), |
2092 surface.get(), | |
2093 d3d11_texture.get(), | |
2094 pending_sample->input_buffer_id), | 2030 pending_sample->input_buffer_id), |
2095 "Failed to copy output sample", PLATFORM_FAILURE,); | 2031 "Failed to copy output sample", PLATFORM_FAILURE, ); |
2096 | 2032 |
2097 index->second->set_available(false); | 2033 index->second->set_available(false); |
2098 } | 2034 } |
2099 } | 2035 } |
2100 } | 2036 } |
2101 | 2037 |
2102 void DXVAVideoDecodeAccelerator::StopOnError( | 2038 void DXVAVideoDecodeAccelerator::StopOnError( |
2103 media::VideoDecodeAccelerator::Error error) { | 2039 media::VideoDecodeAccelerator::Error error) { |
2104 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | 2040 if (!main_thread_task_runner_->BelongsToCurrentThread()) { |
2105 main_thread_task_runner_->PostTask( | 2041 main_thread_task_runner_->PostTask( |
2106 FROM_HERE, | 2042 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::StopOnError, |
2107 base::Bind(&DXVAVideoDecodeAccelerator::StopOnError, | 2043 weak_this_factory_.GetWeakPtr(), error)); |
2108 weak_this_factory_.GetWeakPtr(), | |
2109 error)); | |
2110 return; | 2044 return; |
2111 } | 2045 } |
2112 | 2046 |
2113 if (client_) | 2047 if (client_) |
2114 client_->NotifyError(error); | 2048 client_->NotifyError(error); |
2115 client_ = NULL; | 2049 client_ = NULL; |
2116 | 2050 |
2117 if (GetState() != kUninitialized) { | 2051 if (GetState() != kUninitialized) { |
2118 Invalidate(); | 2052 Invalidate(); |
2119 } | 2053 } |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2189 | 2123 |
2190 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) { | 2124 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) { |
2191 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2125 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2192 // This task could execute after the decoder has been torn down. | 2126 // This task could execute after the decoder has been torn down. |
2193 if (GetState() != kUninitialized && client_) { | 2127 if (GetState() != kUninitialized && client_) { |
2194 client_->ProvidePictureBuffers(kNumPictureBuffers, 1, | 2128 client_->ProvidePictureBuffers(kNumPictureBuffers, 1, |
2195 gfx::Size(width, height), GL_TEXTURE_2D); | 2129 gfx::Size(width, height), GL_TEXTURE_2D); |
2196 } | 2130 } |
2197 } | 2131 } |
2198 | 2132 |
2199 void DXVAVideoDecodeAccelerator::NotifyPictureReady( | 2133 void DXVAVideoDecodeAccelerator::NotifyPictureReady(int picture_buffer_id, |
2200 int picture_buffer_id, | 2134 int input_buffer_id) { |
2201 int input_buffer_id) { | |
2202 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2135 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2203 // This task could execute after the decoder has been torn down. | 2136 // This task could execute after the decoder has been torn down. |
2204 if (GetState() != kUninitialized && client_) { | 2137 if (GetState() != kUninitialized && client_) { |
2205 // TODO(henryhsu): Use correct visible size instead of (0, 0). We can't use | 2138 // TODO(henryhsu): Use correct visible size instead of (0, 0). We can't use |
2206 // coded size here so use (0, 0) intentionally to have the client choose. | 2139 // coded size here so use (0, 0) intentionally to have the client choose. |
2207 media::Picture picture(picture_buffer_id, input_buffer_id, | 2140 media::Picture picture(picture_buffer_id, input_buffer_id, gfx::Rect(0, 0), |
2208 gfx::Rect(0, 0), false); | 2141 false); |
2209 client_->PictureReady(picture); | 2142 client_->PictureReady(picture); |
2210 } | 2143 } |
2211 } | 2144 } |
2212 | 2145 |
2213 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() { | 2146 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() { |
2214 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2147 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2215 if (!client_) | 2148 if (!client_) |
2216 return; | 2149 return; |
2217 | 2150 |
2218 for (PendingInputs::iterator it = pending_input_buffers_.begin(); | 2151 for (PendingInputs::iterator it = pending_input_buffers_.begin(); |
2219 it != pending_input_buffers_.end(); ++it) { | 2152 it != pending_input_buffers_.end(); ++it) { |
2220 LONGLONG input_buffer_id = 0; | 2153 LONGLONG input_buffer_id = 0; |
2221 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id), | 2154 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id), |
2222 "Failed to get buffer id associated with sample",); | 2155 "Failed to get buffer id associated with sample", ); |
2223 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | 2156 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
2224 } | 2157 } |
2225 pending_input_buffers_.clear(); | 2158 pending_input_buffers_.clear(); |
2226 } | 2159 } |
2227 | 2160 |
2228 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() { | 2161 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() { |
2229 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2162 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2230 State state = GetState(); | 2163 State state = GetState(); |
2231 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | 2164 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), |
2232 "Invalid state: " << state, ILLEGAL_STATE,); | 2165 "Invalid state: " << state, ILLEGAL_STATE, ); |
2233 | 2166 |
2234 if (pending_input_buffers_.empty() || OutputSamplesPresent()) | 2167 if (pending_input_buffers_.empty() || OutputSamplesPresent()) |
2235 return; | 2168 return; |
2236 | 2169 |
2237 PendingInputs pending_input_buffers_copy; | 2170 PendingInputs pending_input_buffers_copy; |
2238 std::swap(pending_input_buffers_, pending_input_buffers_copy); | 2171 std::swap(pending_input_buffers_, pending_input_buffers_copy); |
2239 | 2172 |
2240 for (PendingInputs::iterator it = pending_input_buffers_copy.begin(); | 2173 for (PendingInputs::iterator it = pending_input_buffers_copy.begin(); |
2241 it != pending_input_buffers_copy.end(); ++it) { | 2174 it != pending_input_buffers_copy.end(); ++it) { |
2242 DecodeInternal(*it); | 2175 DecodeInternal(*it); |
2243 } | 2176 } |
2244 } | 2177 } |
2245 | 2178 |
2246 void DXVAVideoDecodeAccelerator::FlushInternal() { | 2179 void DXVAVideoDecodeAccelerator::FlushInternal() { |
2247 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2180 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2248 | 2181 |
2249 // We allow only one output frame to be present at any given time. If we have | 2182 // We allow only one output frame to be present at any given time. If we have |
2250 // an output frame, then we cannot complete the flush at this time. | 2183 // an output frame, then we cannot complete the flush at this time. |
2251 if (OutputSamplesPresent()) | 2184 if (OutputSamplesPresent()) |
2252 return; | 2185 return; |
2253 | 2186 |
2254 // First drain the pending input because once the drain message is sent below, | 2187 // First drain the pending input because once the drain message is sent below, |
2255 // the decoder will ignore further input until it's drained. | 2188 // the decoder will ignore further input until it's drained. |
2256 if (!pending_input_buffers_.empty()) { | 2189 if (!pending_input_buffers_.empty()) { |
2257 decoder_thread_task_runner_->PostTask( | 2190 decoder_thread_task_runner_->PostTask( |
2258 FROM_HERE, | 2191 FROM_HERE, |
2259 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2192 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2260 base::Unretained(this))); | 2193 base::Unretained(this))); |
2261 decoder_thread_task_runner_->PostTask( | 2194 decoder_thread_task_runner_->PostTask( |
2262 FROM_HERE, | 2195 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
2263 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 2196 base::Unretained(this))); |
2264 base::Unretained(this))); | |
2265 return; | 2197 return; |
2266 } | 2198 } |
2267 | 2199 |
2268 { | 2200 { |
2269 base::AutoLock lock(decoder_lock_); | 2201 base::AutoLock lock(decoder_lock_); |
2270 if (!sent_drain_message_) { | 2202 if (!sent_drain_message_) { |
2271 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0), | 2203 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0), |
2272 "Failed to send drain message", | 2204 "Failed to send drain message", |
2273 PLATFORM_FAILURE,); | 2205 PLATFORM_FAILURE, ); |
2274 sent_drain_message_ = true; | 2206 sent_drain_message_ = true; |
2275 } | 2207 } |
2276 } | 2208 } |
2277 | 2209 |
2278 // Attempt to retrieve an output frame from the decoder. If we have one, | 2210 // Attempt to retrieve an output frame from the decoder. If we have one, |
2279 // return and proceed when the output frame is processed. If we don't have a | 2211 // return and proceed when the output frame is processed. If we don't have a |
2280 // frame then we are done. | 2212 // frame then we are done. |
2281 DoDecode(); | 2213 DoDecode(); |
2282 if (OutputSamplesPresent()) | 2214 if (OutputSamplesPresent()) |
2283 return; | 2215 return; |
2284 | 2216 |
2285 SetState(kFlushing); | 2217 SetState(kFlushing); |
2286 | 2218 |
2287 main_thread_task_runner_->PostTask( | 2219 main_thread_task_runner_->PostTask( |
2288 FROM_HERE, | 2220 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone, |
2289 base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone, | 2221 weak_this_factory_.GetWeakPtr())); |
2290 weak_this_factory_.GetWeakPtr())); | |
2291 | 2222 |
2292 SetState(kNormal); | 2223 SetState(kNormal); |
2293 } | 2224 } |
2294 | 2225 |
2295 void DXVAVideoDecodeAccelerator::DecodeInternal( | 2226 void DXVAVideoDecodeAccelerator::DecodeInternal( |
2296 const base::win::ScopedComPtr<IMFSample>& sample) { | 2227 const base::win::ScopedComPtr<IMFSample>& sample) { |
2297 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2228 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2298 | 2229 |
2299 if (GetState() == kUninitialized) | 2230 if (GetState() == kUninitialized) |
2300 return; | 2231 return; |
2301 | 2232 |
2302 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) { | 2233 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) { |
2303 pending_input_buffers_.push_back(sample); | 2234 pending_input_buffers_.push_back(sample); |
2304 return; | 2235 return; |
2305 } | 2236 } |
2306 | 2237 |
2307 // Check if the resolution, bit rate, etc changed in the stream. If yes we | 2238 // Check if the resolution, bit rate, etc changed in the stream. If yes we |
2308 // reinitialize the decoder to ensure that the stream decodes correctly. | 2239 // reinitialize the decoder to ensure that the stream decodes correctly. |
2309 bool config_changed = false; | 2240 bool config_changed = false; |
2310 | 2241 |
2311 HRESULT hr = CheckConfigChanged(sample.get(), &config_changed); | 2242 HRESULT hr = CheckConfigChanged(sample.get(), &config_changed); |
2312 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to check video stream config", | 2243 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to check video stream config", |
2313 PLATFORM_FAILURE,); | 2244 PLATFORM_FAILURE, ); |
2314 | 2245 |
2315 if (config_changed) { | 2246 if (config_changed) { |
2316 pending_input_buffers_.push_back(sample); | 2247 pending_input_buffers_.push_back(sample); |
2317 main_thread_task_runner_->PostTask( | 2248 main_thread_task_runner_->PostTask( |
2318 FROM_HERE, | 2249 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::ConfigChanged, |
2319 base::Bind(&DXVAVideoDecodeAccelerator::ConfigChanged, | 2250 weak_this_factory_.GetWeakPtr(), config_)); |
2320 weak_this_factory_.GetWeakPtr(), | |
2321 config_)); | |
2322 return; | 2251 return; |
2323 } | 2252 } |
2324 | 2253 |
2325 if (!inputs_before_decode_) { | 2254 if (!inputs_before_decode_) { |
2326 TRACE_EVENT_ASYNC_BEGIN0("gpu", "DXVAVideoDecodeAccelerator.Decoding", | 2255 TRACE_EVENT_ASYNC_BEGIN0("gpu", "DXVAVideoDecodeAccelerator.Decoding", |
2327 this); | 2256 this); |
2328 } | 2257 } |
2329 inputs_before_decode_++; | 2258 inputs_before_decode_++; |
2330 | 2259 |
2331 hr = decoder_->ProcessInput(0, sample.get(), 0); | 2260 hr = decoder_->ProcessInput(0, sample.get(), 0); |
2332 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it | 2261 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it |
2333 // has enough data to produce one or more output samples. In this case the | 2262 // has enough data to produce one or more output samples. In this case the |
2334 // recommended options are to | 2263 // recommended options are to |
2335 // 1. Generate new output by calling IMFTransform::ProcessOutput until it | 2264 // 1. Generate new output by calling IMFTransform::ProcessOutput until it |
2336 // returns MF_E_TRANSFORM_NEED_MORE_INPUT. | 2265 // returns MF_E_TRANSFORM_NEED_MORE_INPUT. |
2337 // 2. Flush the input data | 2266 // 2. Flush the input data |
2338 // We implement the first option, i.e to retrieve the output sample and then | 2267 // We implement the first option, i.e to retrieve the output sample and then |
2339 // process the input again. Failure in either of these steps is treated as a | 2268 // process the input again. Failure in either of these steps is treated as a |
2340 // decoder failure. | 2269 // decoder failure. |
2341 if (hr == MF_E_NOTACCEPTING) { | 2270 if (hr == MF_E_NOTACCEPTING) { |
2342 DoDecode(); | 2271 DoDecode(); |
2343 // If the DoDecode call resulted in an output frame then we should not | 2272 // If the DoDecode call resulted in an output frame then we should not |
2344 // process any more input until that frame is copied to the target surface. | 2273 // process any more input until that frame is copied to the target surface. |
2345 if (!OutputSamplesPresent()) { | 2274 if (!OutputSamplesPresent()) { |
2346 State state = GetState(); | 2275 State state = GetState(); |
2347 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal || | 2276 RETURN_AND_NOTIFY_ON_FAILURE( |
2348 state == kFlushing), | 2277 (state == kStopped || state == kNormal || state == kFlushing), |
2349 "Failed to process output. Unexpected decoder state: " << state, | 2278 "Failed to process output. Unexpected decoder state: " << state, |
2350 PLATFORM_FAILURE,); | 2279 PLATFORM_FAILURE, ); |
2351 hr = decoder_->ProcessInput(0, sample.get(), 0); | 2280 hr = decoder_->ProcessInput(0, sample.get(), 0); |
2352 } | 2281 } |
2353 // If we continue to get the MF_E_NOTACCEPTING error we do the following:- | 2282 // If we continue to get the MF_E_NOTACCEPTING error we do the following:- |
2354 // 1. Add the input sample to the pending queue. | 2283 // 1. Add the input sample to the pending queue. |
2355 // 2. If we don't have any output samples we post the | 2284 // 2. If we don't have any output samples we post the |
2356 // DecodePendingInputBuffers task to process the pending input samples. | 2285 // DecodePendingInputBuffers task to process the pending input samples. |
2357 // If we have an output sample then the above task is posted when the | 2286 // If we have an output sample then the above task is posted when the |
2358 // output samples are sent to the client. | 2287 // output samples are sent to the client. |
2359 // This is because we only support 1 pending output sample at any | 2288 // This is because we only support 1 pending output sample at any |
2360 // given time due to the limitation with the Microsoft media foundation | 2289 // given time due to the limitation with the Microsoft media foundation |
2361 // decoder where it recycles the output Decoder surfaces. | 2290 // decoder where it recycles the output Decoder surfaces. |
2362 if (hr == MF_E_NOTACCEPTING) { | 2291 if (hr == MF_E_NOTACCEPTING) { |
2363 pending_input_buffers_.push_back(sample); | 2292 pending_input_buffers_.push_back(sample); |
2364 decoder_thread_task_runner_->PostTask( | 2293 decoder_thread_task_runner_->PostTask( |
2365 FROM_HERE, | 2294 FROM_HERE, |
2366 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2295 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2367 base::Unretained(this))); | 2296 base::Unretained(this))); |
2368 return; | 2297 return; |
2369 } | 2298 } |
2370 } | 2299 } |
2371 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample", | 2300 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample", |
2372 PLATFORM_FAILURE,); | 2301 PLATFORM_FAILURE, ); |
2373 | 2302 |
2374 DoDecode(); | 2303 DoDecode(); |
2375 | 2304 |
2376 State state = GetState(); | 2305 State state = GetState(); |
2377 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal || | 2306 RETURN_AND_NOTIFY_ON_FAILURE( |
2378 state == kFlushing), | 2307 (state == kStopped || state == kNormal || state == kFlushing), |
2379 "Failed to process output. Unexpected decoder state: " << state, | 2308 "Failed to process output. Unexpected decoder state: " << state, |
2380 ILLEGAL_STATE,); | 2309 ILLEGAL_STATE, ); |
2381 | 2310 |
2382 LONGLONG input_buffer_id = 0; | 2311 LONGLONG input_buffer_id = 0; |
2383 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), | 2312 RETURN_ON_HR_FAILURE( |
2384 "Failed to get input buffer id associated with sample",); | 2313 sample->GetSampleTime(&input_buffer_id), |
| 2314 "Failed to get input buffer id associated with sample", ); |
2385 // The Microsoft Media foundation decoder internally buffers up to 30 frames | 2315 // The Microsoft Media foundation decoder internally buffers up to 30 frames |
2386 // before returning a decoded frame. We need to inform the client that this | 2316 // before returning a decoded frame. We need to inform the client that this |
2387 // input buffer is processed as it may stop sending us further input. | 2317 // input buffer is processed as it may stop sending us further input. |
2388 // Note: This may break clients which expect every input buffer to be | 2318 // Note: This may break clients which expect every input buffer to be |
2389 // associated with a decoded output buffer. | 2319 // associated with a decoded output buffer. |
2390 // TODO(ananta) | 2320 // TODO(ananta) |
2391 // Do some more investigation into whether it is possible to get the MFT | 2321 // Do some more investigation into whether it is possible to get the MFT |
2392 // decoder to emit an output packet for every input packet. | 2322 // decoder to emit an output packet for every input packet. |
2393 // http://code.google.com/p/chromium/issues/detail?id=108121 | 2323 // http://code.google.com/p/chromium/issues/detail?id=108121 |
2394 // http://code.google.com/p/chromium/issues/detail?id=150925 | 2324 // http://code.google.com/p/chromium/issues/detail?id=150925 |
2395 main_thread_task_runner_->PostTask( | 2325 main_thread_task_runner_->PostTask( |
2396 FROM_HERE, | 2326 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, |
2397 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, | 2327 weak_this_factory_.GetWeakPtr(), input_buffer_id)); |
2398 weak_this_factory_.GetWeakPtr(), | |
2399 input_buffer_id)); | |
2400 } | 2328 } |
2401 | 2329 |
2402 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, | 2330 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, |
2403 int height) { | 2331 int height) { |
2404 dx11_video_format_converter_media_type_needs_init_ = true; | 2332 dx11_video_format_converter_media_type_needs_init_ = true; |
2405 | 2333 |
2406 main_thread_task_runner_->PostTask( | 2334 main_thread_task_runner_->PostTask( |
2407 FROM_HERE, | 2335 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, |
2408 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, | 2336 weak_this_factory_.GetWeakPtr(), false)); |
2409 weak_this_factory_.GetWeakPtr(), false)); | |
2410 | 2337 |
2411 main_thread_task_runner_->PostTask( | 2338 main_thread_task_runner_->PostTask( |
2412 FROM_HERE, | 2339 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
2413 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 2340 weak_this_factory_.GetWeakPtr(), width, height)); |
2414 weak_this_factory_.GetWeakPtr(), | |
2415 width, | |
2416 height)); | |
2417 } | 2341 } |
2418 | 2342 |
2419 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(bool force) { | 2343 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(bool force) { |
2420 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), | 2344 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), |
2421 "Failed to make context current", | 2345 "Failed to make context current", |
2422 PLATFORM_FAILURE, ); | 2346 PLATFORM_FAILURE, ); |
2423 | 2347 |
2424 OutputBuffers::iterator index; | 2348 OutputBuffers::iterator index; |
2425 | 2349 |
2426 for (index = output_picture_buffers_.begin(); | 2350 for (index = output_picture_buffers_.begin(); |
2427 index != output_picture_buffers_.end(); | 2351 index != output_picture_buffers_.end(); ++index) { |
2428 ++index) { | |
2429 if (force || index->second->available()) { | 2352 if (force || index->second->available()) { |
2430 DVLOG(1) << "Dismissing picture id: " << index->second->id(); | 2353 DVLOG(1) << "Dismissing picture id: " << index->second->id(); |
2431 client_->DismissPictureBuffer(index->second->id()); | 2354 client_->DismissPictureBuffer(index->second->id()); |
2432 } else { | 2355 } else { |
2433 // Move to |stale_output_picture_buffers_| for deferred deletion. | 2356 // Move to |stale_output_picture_buffers_| for deferred deletion. |
2434 stale_output_picture_buffers_.insert( | 2357 stale_output_picture_buffers_.insert( |
2435 std::make_pair(index->first, index->second)); | 2358 std::make_pair(index->first, index->second)); |
2436 } | 2359 } |
2437 } | 2360 } |
2438 | 2361 |
2439 output_picture_buffers_.clear(); | 2362 output_picture_buffers_.clear(); |
2440 } | 2363 } |
2441 | 2364 |
2442 void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer( | 2365 void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer( |
2443 int32_t picture_buffer_id) { | 2366 int32_t picture_buffer_id) { |
2444 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), | 2367 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_cb_.Run(), |
2445 "Failed to make context current", | 2368 "Failed to make context current", |
2446 PLATFORM_FAILURE, ); | 2369 PLATFORM_FAILURE, ); |
2447 | 2370 |
2448 OutputBuffers::iterator it = stale_output_picture_buffers_.find( | 2371 OutputBuffers::iterator it = |
2449 picture_buffer_id); | 2372 stale_output_picture_buffers_.find(picture_buffer_id); |
2450 DCHECK(it != stale_output_picture_buffers_.end()); | 2373 DCHECK(it != stale_output_picture_buffers_.end()); |
2451 DVLOG(1) << "Dismissing picture id: " << it->second->id(); | 2374 DVLOG(1) << "Dismissing picture id: " << it->second->id(); |
2452 client_->DismissPictureBuffer(it->second->id()); | 2375 client_->DismissPictureBuffer(it->second->id()); |
2453 stale_output_picture_buffers_.erase(it); | 2376 stale_output_picture_buffers_.erase(it); |
2454 } | 2377 } |
2455 | 2378 |
2456 DXVAVideoDecodeAccelerator::State | 2379 DXVAVideoDecodeAccelerator::State DXVAVideoDecodeAccelerator::GetState() { |
2457 DXVAVideoDecodeAccelerator::GetState() { | |
2458 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); | 2380 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); |
2459 State state = static_cast<State>( | 2381 State state = static_cast<State>( |
2460 InterlockedAdd(reinterpret_cast<volatile long*>(&state_), 0)); | 2382 InterlockedAdd(reinterpret_cast<volatile long*>(&state_), 0)); |
2461 return state; | 2383 return state; |
2462 } | 2384 } |
2463 | 2385 |
2464 void DXVAVideoDecodeAccelerator::SetState(State new_state) { | 2386 void DXVAVideoDecodeAccelerator::SetState(State new_state) { |
2465 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | 2387 if (!main_thread_task_runner_->BelongsToCurrentThread()) { |
2466 main_thread_task_runner_->PostTask( | 2388 main_thread_task_runner_->PostTask( |
2467 FROM_HERE, | 2389 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::SetState, |
2468 base::Bind(&DXVAVideoDecodeAccelerator::SetState, | 2390 weak_this_factory_.GetWeakPtr(), new_state)); |
2469 weak_this_factory_.GetWeakPtr(), | |
2470 new_state)); | |
2471 return; | 2391 return; |
2472 } | 2392 } |
2473 | 2393 |
2474 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); | 2394 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); |
2475 ::InterlockedExchange(reinterpret_cast<volatile long*>(&state_), | 2395 ::InterlockedExchange(reinterpret_cast<volatile long*>(&state_), new_state); |
2476 new_state); | |
2477 DCHECK_EQ(state_, new_state); | 2396 DCHECK_EQ(state_, new_state); |
2478 } | 2397 } |
2479 | 2398 |
2480 void DXVAVideoDecodeAccelerator::StartDecoderThread() { | 2399 void DXVAVideoDecodeAccelerator::StartDecoderThread() { |
2481 decoder_thread_.init_com_with_mta(false); | 2400 decoder_thread_.init_com_with_mta(false); |
2482 decoder_thread_.Start(); | 2401 decoder_thread_.Start(); |
2483 decoder_thread_task_runner_ = decoder_thread_.task_runner(); | 2402 decoder_thread_task_runner_ = decoder_thread_.task_runner(); |
2484 } | 2403 } |
2485 | 2404 |
2486 bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() { | 2405 bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() { |
2487 base::AutoLock lock(decoder_lock_); | 2406 base::AutoLock lock(decoder_lock_); |
2488 return !pending_output_samples_.empty(); | 2407 return !pending_output_samples_.empty(); |
2489 } | 2408 } |
2490 | 2409 |
2491 void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface, | 2410 void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface, |
2492 IDirect3DSurface9* dest_surface, | 2411 IDirect3DSurface9* dest_surface, |
2493 int picture_buffer_id, | 2412 int picture_buffer_id, |
2494 int input_buffer_id) { | 2413 int input_buffer_id) { |
2495 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { | 2414 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { |
2496 decoder_thread_task_runner_->PostTask( | 2415 decoder_thread_task_runner_->PostTask( |
2497 FROM_HERE, | 2416 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, |
2498 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, | 2417 base::Unretained(this), src_surface, dest_surface, |
2499 base::Unretained(this), | 2418 picture_buffer_id, input_buffer_id)); |
2500 src_surface, | |
2501 dest_surface, | |
2502 picture_buffer_id, | |
2503 input_buffer_id)); | |
2504 return; | 2419 return; |
2505 } | 2420 } |
2506 | 2421 |
2507 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, | 2422 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, |
2508 NULL, D3DTEXF_NONE); | 2423 NULL, D3DTEXF_NONE); |
2509 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",); | 2424 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed", ); |
2510 | 2425 |
2511 // Ideally, this should be done immediately before the draw call that uses | 2426 // Ideally, this should be done immediately before the draw call that uses |
2512 // the texture. Flush it once here though. | 2427 // the texture. Flush it once here though. |
2513 hr = query_->Issue(D3DISSUE_END); | 2428 hr = query_->Issue(D3DISSUE_END); |
2514 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",); | 2429 RETURN_ON_HR_FAILURE(hr, "Failed to issue END", ); |
2515 | 2430 |
2516 // If we are sharing the ANGLE device we don't need to wait for the Flush to | 2431 // If we are sharing the ANGLE device we don't need to wait for the Flush to |
2517 // complete. | 2432 // complete. |
2518 if (using_angle_device_) { | 2433 if (using_angle_device_) { |
2519 main_thread_task_runner_->PostTask( | 2434 main_thread_task_runner_->PostTask( |
2520 FROM_HERE, | 2435 FROM_HERE, |
2521 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 2436 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
2522 weak_this_factory_.GetWeakPtr(), | 2437 weak_this_factory_.GetWeakPtr(), src_surface, dest_surface, |
2523 src_surface, | 2438 picture_buffer_id, input_buffer_id)); |
2524 dest_surface, | |
2525 picture_buffer_id, | |
2526 input_buffer_id)); | |
2527 return; | 2439 return; |
2528 } | 2440 } |
2529 | 2441 |
2530 // Flush the decoder device to ensure that the decoded frame is copied to the | 2442 // Flush the decoder device to ensure that the decoded frame is copied to the |
2531 // target surface. | 2443 // target surface. |
2532 decoder_thread_task_runner_->PostDelayedTask( | 2444 decoder_thread_task_runner_->PostDelayedTask( |
2533 FROM_HERE, | 2445 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
2534 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 2446 base::Unretained(this), 0, src_surface, |
2535 base::Unretained(this), 0, src_surface, dest_surface, | 2447 dest_surface, picture_buffer_id, input_buffer_id), |
2536 picture_buffer_id, input_buffer_id), | |
2537 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2448 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2538 } | 2449 } |
2539 | 2450 |
2540 void DXVAVideoDecodeAccelerator::CopySurfaceComplete( | 2451 void DXVAVideoDecodeAccelerator::CopySurfaceComplete( |
2541 IDirect3DSurface9* src_surface, | 2452 IDirect3DSurface9* src_surface, |
2542 IDirect3DSurface9* dest_surface, | 2453 IDirect3DSurface9* dest_surface, |
2543 int picture_buffer_id, | 2454 int picture_buffer_id, |
2544 int input_buffer_id) { | 2455 int input_buffer_id) { |
2545 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2456 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2546 | 2457 |
(...skipping 25 matching lines...) Expand all Loading... |
2572 NotifyPictureReady(picture_buffer->id(), input_buffer_id); | 2483 NotifyPictureReady(picture_buffer->id(), input_buffer_id); |
2573 | 2484 |
2574 { | 2485 { |
2575 base::AutoLock lock(decoder_lock_); | 2486 base::AutoLock lock(decoder_lock_); |
2576 if (!pending_output_samples_.empty()) | 2487 if (!pending_output_samples_.empty()) |
2577 pending_output_samples_.pop_front(); | 2488 pending_output_samples_.pop_front(); |
2578 } | 2489 } |
2579 | 2490 |
2580 if (pending_flush_) { | 2491 if (pending_flush_) { |
2581 decoder_thread_task_runner_->PostTask( | 2492 decoder_thread_task_runner_->PostTask( |
2582 FROM_HERE, | 2493 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
2583 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 2494 base::Unretained(this))); |
2584 base::Unretained(this))); | |
2585 return; | 2495 return; |
2586 } | 2496 } |
2587 decoder_thread_task_runner_->PostTask( | 2497 decoder_thread_task_runner_->PostTask( |
2588 FROM_HERE, | 2498 FROM_HERE, |
2589 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2499 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2590 base::Unretained(this))); | 2500 base::Unretained(this))); |
2591 } | 2501 } |
2592 | 2502 |
2593 void DXVAVideoDecodeAccelerator::CopyTexture( | 2503 void DXVAVideoDecodeAccelerator::CopyTexture( |
2594 ID3D11Texture2D* src_texture, | 2504 ID3D11Texture2D* src_texture, |
(...skipping 20 matching lines...) Expand all Loading... |
2615 // conversion as per msdn is done in the GPU. | 2525 // conversion as per msdn is done in the GPU. |
2616 | 2526 |
2617 D3D11_TEXTURE2D_DESC source_desc; | 2527 D3D11_TEXTURE2D_DESC source_desc; |
2618 src_texture->GetDesc(&source_desc); | 2528 src_texture->GetDesc(&source_desc); |
2619 | 2529 |
2620 // Set up the input and output types for the video processor MFT. | 2530 // Set up the input and output types for the video processor MFT. |
2621 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width, | 2531 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width, |
2622 source_desc.Height)) { | 2532 source_desc.Height)) { |
2623 RETURN_AND_NOTIFY_ON_FAILURE( | 2533 RETURN_AND_NOTIFY_ON_FAILURE( |
2624 false, "Failed to initialize media types for convesion.", | 2534 false, "Failed to initialize media types for convesion.", |
2625 PLATFORM_FAILURE,); | 2535 PLATFORM_FAILURE, ); |
2626 } | 2536 } |
2627 | 2537 |
2628 // The input to the video processor is the output sample. | 2538 // The input to the video processor is the output sample. |
2629 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; | 2539 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; |
2630 { | 2540 { |
2631 base::AutoLock lock(decoder_lock_); | 2541 base::AutoLock lock(decoder_lock_); |
2632 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 2542 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
2633 input_sample_for_conversion = sample_info.output_sample; | 2543 input_sample_for_conversion = sample_info.output_sample; |
2634 } | 2544 } |
2635 | 2545 |
(...skipping 21 matching lines...) Expand all Loading... |
2657 PLATFORM_FAILURE, ); | 2567 PLATFORM_FAILURE, ); |
2658 } | 2568 } |
2659 // The video processor MFT requires output samples to be allocated by the | 2569 // The video processor MFT requires output samples to be allocated by the |
2660 // caller. We create a sample with a buffer backed with the ID3D11Texture2D | 2570 // caller. We create a sample with a buffer backed with the ID3D11Texture2D |
2661 // interface exposed by ANGLE. This works nicely as this ensures that the | 2571 // interface exposed by ANGLE. This works nicely as this ensures that the |
2662 // video processor coverts the color space of the output frame and copies | 2572 // video processor coverts the color space of the output frame and copies |
2663 // the result into the ANGLE texture. | 2573 // the result into the ANGLE texture. |
2664 base::win::ScopedComPtr<IMFSample> output_sample; | 2574 base::win::ScopedComPtr<IMFSample> output_sample; |
2665 hr = MFCreateSample(output_sample.Receive()); | 2575 hr = MFCreateSample(output_sample.Receive()); |
2666 if (FAILED(hr)) { | 2576 if (FAILED(hr)) { |
2667 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2577 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
2668 "Failed to create output sample.", PLATFORM_FAILURE,); | 2578 PLATFORM_FAILURE, ); |
2669 } | 2579 } |
2670 | 2580 |
2671 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2581 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2672 hr = MFCreateDXGISurfaceBuffer( | 2582 hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), dest_texture, 0, |
2673 __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE, | 2583 FALSE, output_buffer.Receive()); |
2674 output_buffer.Receive()); | |
2675 if (FAILED(hr)) { | 2584 if (FAILED(hr)) { |
2676 base::debug::Alias(&hr); | 2585 base::debug::Alias(&hr); |
2677 // TODO(ananta) | 2586 // TODO(ananta) |
2678 // Remove this CHECK when the change to use DX11 for H/W decoding | 2587 // Remove this CHECK when the change to use DX11 for H/W decoding |
2679 // stablizes. | 2588 // stablizes. |
2680 CHECK(false); | 2589 CHECK(false); |
2681 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2590 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
2682 "Failed to create output sample.", PLATFORM_FAILURE,); | 2591 PLATFORM_FAILURE, ); |
2683 } | 2592 } |
2684 | 2593 |
2685 output_sample->AddBuffer(output_buffer.get()); | 2594 output_sample->AddBuffer(output_buffer.get()); |
2686 | 2595 |
2687 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0); | 2596 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0); |
2688 if (FAILED(hr)) { | 2597 if (FAILED(hr)) { |
2689 DCHECK(false); | 2598 DCHECK(false); |
2690 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2599 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2691 "Failed to convert output sample format.", PLATFORM_FAILURE,); | 2600 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
2692 } | 2601 } |
2693 | 2602 |
2694 DWORD status = 0; | 2603 DWORD status = 0; |
2695 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; | 2604 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; |
2696 format_converter_output.pSample = output_sample.get(); | 2605 format_converter_output.pSample = output_sample.get(); |
2697 hr = video_format_converter_mft_->ProcessOutput( | 2606 hr = video_format_converter_mft_->ProcessOutput( |
2698 0, // No flags | 2607 0, // No flags |
2699 1, // # of out streams to pull from | 2608 1, // # of out streams to pull from |
2700 &format_converter_output, | 2609 &format_converter_output, &status); |
2701 &status); | |
2702 | 2610 |
2703 if (FAILED(hr)) { | 2611 if (FAILED(hr)) { |
2704 base::debug::Alias(&hr); | 2612 base::debug::Alias(&hr); |
2705 // TODO(ananta) | 2613 // TODO(ananta) |
2706 // Remove this CHECK when the change to use DX11 for H/W decoding | 2614 // Remove this CHECK when the change to use DX11 for H/W decoding |
2707 // stablizes. | 2615 // stablizes. |
2708 CHECK(false); | 2616 CHECK(false); |
2709 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2617 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2710 "Failed to convert output sample format.", PLATFORM_FAILURE,); | 2618 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
2711 } | 2619 } |
2712 | 2620 |
2713 if (dest_keyed_mutex) { | 2621 if (dest_keyed_mutex) { |
2714 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); | 2622 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); |
2715 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", | 2623 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", |
2716 PLATFORM_FAILURE, ); | 2624 PLATFORM_FAILURE, ); |
2717 | 2625 |
2718 main_thread_task_runner_->PostTask( | 2626 main_thread_task_runner_->PostTask( |
2719 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 2627 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
2720 weak_this_factory_.GetWeakPtr(), nullptr, nullptr, | 2628 weak_this_factory_.GetWeakPtr(), nullptr, nullptr, |
2721 picture_buffer_id, input_buffer_id)); | 2629 picture_buffer_id, input_buffer_id)); |
2722 } else { | 2630 } else { |
2723 d3d11_device_context_->Flush(); | 2631 d3d11_device_context_->Flush(); |
2724 d3d11_device_context_->End(d3d11_query_.get()); | 2632 d3d11_device_context_->End(d3d11_query_.get()); |
2725 | 2633 |
2726 decoder_thread_task_runner_->PostDelayedTask( | 2634 decoder_thread_task_runner_->PostDelayedTask( |
2727 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 2635 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
2728 base::Unretained(this), 0, | 2636 base::Unretained(this), 0, |
2729 reinterpret_cast<IDirect3DSurface9*>(NULL), | 2637 reinterpret_cast<IDirect3DSurface9*>(NULL), |
2730 reinterpret_cast<IDirect3DSurface9*>(NULL), | 2638 reinterpret_cast<IDirect3DSurface9*>(NULL), |
2731 picture_buffer_id, input_buffer_id), | 2639 picture_buffer_id, input_buffer_id), |
2732 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2640 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2733 } | 2641 } |
2734 } | 2642 } |
2735 | 2643 |
2736 void DXVAVideoDecodeAccelerator::FlushDecoder( | 2644 void DXVAVideoDecodeAccelerator::FlushDecoder(int iterations, |
2737 int iterations, | 2645 IDirect3DSurface9* src_surface, |
2738 IDirect3DSurface9* src_surface, | 2646 IDirect3DSurface9* dest_surface, |
2739 IDirect3DSurface9* dest_surface, | 2647 int picture_buffer_id, |
2740 int picture_buffer_id, | 2648 int input_buffer_id) { |
2741 int input_buffer_id) { | |
2742 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2649 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2743 | 2650 |
2744 // The DXVA decoder has its own device which it uses for decoding. ANGLE | 2651 // The DXVA decoder has its own device which it uses for decoding. ANGLE |
2745 // has its own device which we don't have access to. | 2652 // has its own device which we don't have access to. |
2746 // The above code attempts to copy the decoded picture into a surface | 2653 // The above code attempts to copy the decoded picture into a surface |
2747 // which is owned by ANGLE. As there are multiple devices involved in | 2654 // which is owned by ANGLE. As there are multiple devices involved in |
2748 // this, the StretchRect call above is not synchronous. | 2655 // this, the StretchRect call above is not synchronous. |
2749 // We attempt to flush the batched operations to ensure that the picture is | 2656 // We attempt to flush the batched operations to ensure that the picture is |
2750 // copied to the surface owned by ANGLE. | 2657 // copied to the surface owned by ANGLE. |
2751 // We need to do this in a loop and call flush multiple times. | 2658 // We need to do this in a loop and call flush multiple times. |
(...skipping 14 matching lines...) Expand all Loading... |
2766 // Remove this CHECK when the change to use DX11 for H/W decoding | 2673 // Remove this CHECK when the change to use DX11 for H/W decoding |
2767 // stablizes. | 2674 // stablizes. |
2768 CHECK(false); | 2675 CHECK(false); |
2769 } | 2676 } |
2770 } else { | 2677 } else { |
2771 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); | 2678 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); |
2772 } | 2679 } |
2773 | 2680 |
2774 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { | 2681 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { |
2775 decoder_thread_task_runner_->PostDelayedTask( | 2682 decoder_thread_task_runner_->PostDelayedTask( |
2776 FROM_HERE, | 2683 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
2777 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 2684 base::Unretained(this), iterations, src_surface, |
2778 base::Unretained(this), iterations, src_surface, | 2685 dest_surface, picture_buffer_id, input_buffer_id), |
2779 dest_surface, picture_buffer_id, input_buffer_id), | |
2780 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2686 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2781 return; | 2687 return; |
2782 } | 2688 } |
2783 | 2689 |
2784 main_thread_task_runner_->PostTask( | 2690 main_thread_task_runner_->PostTask( |
2785 FROM_HERE, | 2691 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
2786 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 2692 weak_this_factory_.GetWeakPtr(), src_surface, |
2787 weak_this_factory_.GetWeakPtr(), | 2693 dest_surface, picture_buffer_id, input_buffer_id)); |
2788 src_surface, | |
2789 dest_surface, | |
2790 picture_buffer_id, | |
2791 input_buffer_id)); | |
2792 } | 2694 } |
2793 | 2695 |
2794 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( | 2696 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( |
2795 int width, int height) { | 2697 int width, |
| 2698 int height) { |
2796 if (!dx11_video_format_converter_media_type_needs_init_) | 2699 if (!dx11_video_format_converter_media_type_needs_init_) |
2797 return true; | 2700 return true; |
2798 | 2701 |
2799 CHECK(video_format_converter_mft_.get()); | 2702 CHECK(video_format_converter_mft_.get()); |
2800 | 2703 |
2801 HRESULT hr = video_format_converter_mft_->ProcessMessage( | 2704 HRESULT hr = video_format_converter_mft_->ProcessMessage( |
2802 MFT_MESSAGE_SET_D3D_MANAGER, | 2705 MFT_MESSAGE_SET_D3D_MANAGER, |
2803 reinterpret_cast<ULONG_PTR>( | 2706 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get())); |
2804 d3d11_device_manager_.get())); | |
2805 | 2707 |
2806 if (FAILED(hr)) { | 2708 if (FAILED(hr)) { |
2807 base::debug::Alias(&hr); | 2709 base::debug::Alias(&hr); |
2808 // TODO(ananta) | 2710 // TODO(ananta) |
2809 // Remove this CHECK when the change to use DX11 for H/W decoding | 2711 // Remove this CHECK when the change to use DX11 for H/W decoding |
2810 // stablizes. | 2712 // stablizes. |
2811 CHECK(false); | 2713 CHECK(false); |
2812 } | 2714 } |
2813 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | 2715 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
2814 "Failed to initialize video format converter", PLATFORM_FAILURE, false); | 2716 "Failed to initialize video format converter", |
| 2717 PLATFORM_FAILURE, false); |
2815 | 2718 |
2816 video_format_converter_mft_->ProcessMessage( | 2719 video_format_converter_mft_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, |
2817 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); | 2720 0); |
2818 | 2721 |
2819 base::win::ScopedComPtr<IMFMediaType> media_type; | 2722 base::win::ScopedComPtr<IMFMediaType> media_type; |
2820 hr = MFCreateMediaType(media_type.Receive()); | 2723 hr = MFCreateMediaType(media_type.Receive()); |
2821 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", | 2724 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", |
2822 PLATFORM_FAILURE, false); | 2725 PLATFORM_FAILURE, false); |
2823 | 2726 |
2824 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | 2727 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
2825 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", | 2728 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", |
2826 PLATFORM_FAILURE, false); | 2729 PLATFORM_FAILURE, false); |
2827 | 2730 |
2828 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); | 2731 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); |
2829 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", | 2732 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", |
2830 PLATFORM_FAILURE, false); | 2733 PLATFORM_FAILURE, false); |
2831 | 2734 |
2832 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); | 2735 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); |
2833 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", | 2736 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", |
2834 PLATFORM_FAILURE, false); | 2737 PLATFORM_FAILURE, false); |
2835 | 2738 |
2836 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); | 2739 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); |
2837 if (FAILED(hr)) { | 2740 if (FAILED(hr)) { |
2838 base::debug::Alias(&hr); | 2741 base::debug::Alias(&hr); |
2839 // TODO(ananta) | 2742 // TODO(ananta) |
2840 // Remove this CHECK when the change to use DX11 for H/W decoding | 2743 // Remove this CHECK when the change to use DX11 for H/W decoding |
2841 // stablizes. | 2744 // stablizes. |
2842 CHECK(false); | 2745 CHECK(false); |
2843 } | 2746 } |
2844 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", | 2747 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", |
2845 PLATFORM_FAILURE, false); | 2748 PLATFORM_FAILURE, false); |
2846 | 2749 |
2847 // It appears that we fail to set MFVideoFormat_ARGB32 as the output media | 2750 // It appears that we fail to set MFVideoFormat_ARGB32 as the output media |
2848 // type in certain configurations. Try to fallback to MFVideoFormat_RGB32 | 2751 // type in certain configurations. Try to fallback to MFVideoFormat_RGB32 |
2849 // in such cases. If both fail, then bail. | 2752 // in such cases. If both fail, then bail. |
2850 bool media_type_set = | 2753 bool media_type_set = SetTransformOutputType( |
2851 SetTransformOutputType(video_format_converter_mft_.get(), | 2754 video_format_converter_mft_.get(), MFVideoFormat_ARGB32, width, height); |
2852 MFVideoFormat_ARGB32, | |
2853 width, | |
2854 height); | |
2855 if (!media_type_set) { | 2755 if (!media_type_set) { |
2856 media_type_set = | 2756 media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), |
2857 SetTransformOutputType(video_format_converter_mft_.get(), | 2757 MFVideoFormat_RGB32, width, height); |
2858 MFVideoFormat_RGB32, | |
2859 width, | |
2860 height); | |
2861 } | 2758 } |
2862 | 2759 |
2863 if (!media_type_set) { | 2760 if (!media_type_set) { |
2864 // Remove this once this stabilizes in the field. | 2761 // Remove this once this stabilizes in the field. |
2865 CHECK(false); | 2762 CHECK(false); |
2866 LOG(ERROR) << "Failed to find a matching RGB output type in the converter"; | 2763 LOG(ERROR) << "Failed to find a matching RGB output type in the converter"; |
2867 return false; | 2764 return false; |
2868 } | 2765 } |
2869 | 2766 |
2870 dx11_video_format_converter_media_type_needs_init_ = false; | 2767 dx11_video_format_converter_media_type_needs_init_ = false; |
2871 return true; | 2768 return true; |
2872 } | 2769 } |
2873 | 2770 |
2874 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions( | 2771 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, |
2875 IMFSample* sample, | 2772 int* width, |
2876 int* width, | 2773 int* height) { |
2877 int* height) { | |
2878 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2774 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2879 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | 2775 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); |
2880 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | 2776 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); |
2881 | 2777 |
2882 if (use_dx11_) { | 2778 if (use_dx11_) { |
2883 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 2779 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
2884 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | 2780 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
2885 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | 2781 hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
2886 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", | 2782 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", |
2887 false); | 2783 false); |
(...skipping 14 matching lines...) Expand all Loading... |
2902 false); | 2798 false); |
2903 D3DSURFACE_DESC surface_desc; | 2799 D3DSURFACE_DESC surface_desc; |
2904 hr = surface->GetDesc(&surface_desc); | 2800 hr = surface->GetDesc(&surface_desc); |
2905 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | 2801 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); |
2906 *width = surface_desc.Width; | 2802 *width = surface_desc.Width; |
2907 *height = surface_desc.Height; | 2803 *height = surface_desc.Height; |
2908 } | 2804 } |
2909 return true; | 2805 return true; |
2910 } | 2806 } |
2911 | 2807 |
2912 bool DXVAVideoDecodeAccelerator::SetTransformOutputType( | 2808 bool DXVAVideoDecodeAccelerator::SetTransformOutputType(IMFTransform* transform, |
2913 IMFTransform* transform, | 2809 const GUID& output_type, |
2914 const GUID& output_type, | 2810 int width, |
2915 int width, | 2811 int height) { |
2916 int height) { | |
2917 HRESULT hr = E_FAIL; | 2812 HRESULT hr = E_FAIL; |
2918 base::win::ScopedComPtr<IMFMediaType> media_type; | 2813 base::win::ScopedComPtr<IMFMediaType> media_type; |
2919 | 2814 |
2920 for (uint32_t i = 0; | 2815 for (uint32_t i = 0; |
2921 SUCCEEDED(transform->GetOutputAvailableType( | 2816 SUCCEEDED(transform->GetOutputAvailableType(0, i, media_type.Receive())); |
2922 0, i, media_type.Receive())); | |
2923 ++i) { | 2817 ++i) { |
2924 GUID out_subtype = {0}; | 2818 GUID out_subtype = {0}; |
2925 hr = media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); | 2819 hr = media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); |
2926 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false); | 2820 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false); |
2927 | 2821 |
2928 if (out_subtype == output_type) { | 2822 if (out_subtype == output_type) { |
2929 if (width && height) { | 2823 if (width && height) { |
2930 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, | 2824 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, |
2931 height); | 2825 height); |
2932 RETURN_ON_HR_FAILURE(hr, "Failed to set media type attributes", false); | 2826 RETURN_ON_HR_FAILURE(hr, "Failed to set media type attributes", false); |
2933 } | 2827 } |
2934 hr = transform->SetOutputType(0, media_type.get(), 0); // No flags | 2828 hr = transform->SetOutputType(0, media_type.get(), 0); // No flags |
2935 RETURN_ON_HR_FAILURE(hr, "Failed to set output type", false); | 2829 RETURN_ON_HR_FAILURE(hr, "Failed to set output type", false); |
2936 return true; | 2830 return true; |
2937 } | 2831 } |
2938 media_type.Release(); | 2832 media_type.Release(); |
2939 } | 2833 } |
2940 return false; | 2834 return false; |
2941 } | 2835 } |
2942 | 2836 |
2943 HRESULT DXVAVideoDecodeAccelerator::CheckConfigChanged( | 2837 HRESULT DXVAVideoDecodeAccelerator::CheckConfigChanged(IMFSample* sample, |
2944 IMFSample* sample, bool* config_changed) { | 2838 bool* config_changed) { |
2945 if (codec_ != media::kCodecH264) | 2839 if (codec_ != media::kCodecH264) |
2946 return S_FALSE; | 2840 return S_FALSE; |
2947 | 2841 |
2948 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | 2842 base::win::ScopedComPtr<IMFMediaBuffer> buffer; |
2949 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); | 2843 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); |
2950 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from input sample", hr); | 2844 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from input sample", hr); |
2951 | 2845 |
2952 MediaBufferScopedPointer scoped_media_buffer(buffer.get()); | 2846 MediaBufferScopedPointer scoped_media_buffer(buffer.get()); |
2953 | 2847 |
2954 if (!config_change_detector_->DetectConfig( | 2848 if (!config_change_detector_->DetectConfig( |
2955 scoped_media_buffer.get(), | 2849 scoped_media_buffer.get(), scoped_media_buffer.current_length())) { |
2956 scoped_media_buffer.current_length())) { | |
2957 RETURN_ON_HR_FAILURE(E_FAIL, "Failed to detect H.264 stream config", | 2850 RETURN_ON_HR_FAILURE(E_FAIL, "Failed to detect H.264 stream config", |
2958 E_FAIL); | 2851 E_FAIL); |
2959 } | 2852 } |
2960 *config_changed = config_change_detector_->config_changed(); | 2853 *config_changed = config_change_detector_->config_changed(); |
2961 return S_OK; | 2854 return S_OK; |
2962 } | 2855 } |
2963 | 2856 |
2964 void DXVAVideoDecodeAccelerator::ConfigChanged( | 2857 void DXVAVideoDecodeAccelerator::ConfigChanged(const Config& config) { |
2965 const Config& config) { | |
2966 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2858 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2967 | 2859 |
2968 SetState(kConfigChange); | 2860 SetState(kConfigChange); |
2969 DismissStaleBuffers(true); | 2861 DismissStaleBuffers(true); |
2970 Invalidate(); | 2862 Invalidate(); |
2971 Initialize(config_, client_); | 2863 Initialize(config_, client_); |
2972 decoder_thread_task_runner_->PostTask( | 2864 decoder_thread_task_runner_->PostTask( |
2973 FROM_HERE, | 2865 FROM_HERE, |
2974 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2866 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2975 base::Unretained(this))); | 2867 base::Unretained(this))); |
2976 } | 2868 } |
2977 | 2869 |
2978 } // namespace content | 2870 } // namespace media |
OLD | NEW |