| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h" | |
| 6 | |
| 7 #if !defined(OS_WIN) | |
| 8 #error This file should only be built on Windows. | |
| 9 #endif // !defined(OS_WIN) | |
| 10 | |
| 11 #include <ks.h> | |
| 12 #include <codecapi.h> | |
| 13 #include <dxgi1_2.h> | |
| 14 #include <mfapi.h> | |
| 15 #include <mferror.h> | |
| 16 #include <ntverp.h> | |
| 17 #include <wmcodecdsp.h> | |
| 18 | |
| 19 #include "base/base_paths_win.h" | |
| 20 #include "base/bind.h" | |
| 21 #include "base/callback.h" | |
| 22 #include "base/command_line.h" | |
| 23 #include "base/debug/alias.h" | |
| 24 #include "base/file_version_info.h" | |
| 25 #include "base/files/file_path.h" | |
| 26 #include "base/logging.h" | |
| 27 #include "base/memory/scoped_ptr.h" | |
| 28 #include "base/memory/shared_memory.h" | |
| 29 #include "base/message_loop/message_loop.h" | |
| 30 #include "base/path_service.h" | |
| 31 #include "base/trace_event/trace_event.h" | |
| 32 #include "base/win/windows_version.h" | |
| 33 #include "content/public/common/content_switches.h" | |
| 34 #include "media/base/win/mf_initializer.h" | |
| 35 #include "media/video/video_decode_accelerator.h" | |
| 36 #include "third_party/angle/include/EGL/egl.h" | |
| 37 #include "third_party/angle/include/EGL/eglext.h" | |
| 38 #include "ui/gl/gl_bindings.h" | |
| 39 #include "ui/gl/gl_context.h" | |
| 40 #include "ui/gl/gl_surface_egl.h" | |
| 41 #include "ui/gl/gl_switches.h" | |
| 42 | |
| 43 namespace { | |
| 44 | |
| 45 // Path is appended on to the PROGRAM_FILES base path. | |
| 46 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; | |
| 47 | |
| 48 const wchar_t kVP8DecoderDLLName[] = | |
| 49 #if defined(ARCH_CPU_X86) | |
| 50 L"mfx_mft_vp8vd_32.dll"; | |
| 51 #elif defined(ARCH_CPU_X86_64) | |
| 52 L"mfx_mft_vp8vd_64.dll"; | |
| 53 #else | |
| 54 #error Unsupported Windows CPU Architecture | |
| 55 #endif | |
| 56 | |
| 57 const wchar_t kVP9DecoderDLLName[] = | |
| 58 #if defined(ARCH_CPU_X86) | |
| 59 L"mfx_mft_vp9vd_32.dll"; | |
| 60 #elif defined(ARCH_CPU_X86_64) | |
| 61 L"mfx_mft_vp9vd_64.dll"; | |
| 62 #else | |
| 63 #error Unsupported Windows CPU Architecture | |
| 64 #endif | |
| 65 | |
| 66 const CLSID CLSID_WebmMfVp8Dec = { | |
| 67 0x451e3cb7, | |
| 68 0x2622, | |
| 69 0x4ba5, | |
| 70 { 0x8e, 0x1d, 0x44, 0xb3, 0xc4, 0x1d, 0x09, 0x24 } | |
| 71 }; | |
| 72 | |
| 73 const CLSID CLSID_WebmMfVp9Dec = { | |
| 74 0x07ab4bd2, | |
| 75 0x1979, | |
| 76 0x4fcd, | |
| 77 { 0xa6, 0x97, 0xdf, 0x9a, 0xd1, 0x5b, 0x34, 0xfe } | |
| 78 }; | |
| 79 | |
| 80 const CLSID MEDIASUBTYPE_VP80 = { | |
| 81 0x30385056, | |
| 82 0x0000, | |
| 83 0x0010, | |
| 84 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | |
| 85 }; | |
| 86 | |
| 87 const CLSID MEDIASUBTYPE_VP90 = { | |
| 88 0x30395056, | |
| 89 0x0000, | |
| 90 0x0010, | |
| 91 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | |
| 92 }; | |
| 93 | |
| 94 // The CLSID of the video processor media foundation transform which we use for | |
| 95 // texture color conversion in DX11. | |
| 96 // Defined in mfidl.h in the Windows 10 SDK. ntverp.h provides VER_PRODUCTBUILD | |
| 97 // to detect which SDK we are compiling with. | |
| 98 #if VER_PRODUCTBUILD < 10011 // VER_PRODUCTBUILD for 10.0.10158.0 SDK. | |
| 99 DEFINE_GUID(CLSID_VideoProcessorMFT, | |
| 100 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, | |
| 101 0xc9, 0x82); | |
| 102 #endif | |
| 103 | |
| 104 // MF_XVP_PLAYBACK_MODE | |
| 105 // Data type: UINT32 (treat as BOOL) | |
| 106 // If this attribute is TRUE, the video processor will run in playback mode | |
| 107 // where it allows callers to allocate output samples and allows last frame | |
| 108 // regeneration (repaint). | |
| 109 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12, | |
| 110 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9); | |
| 111 | |
| 112 } // namespace | |
| 113 | |
| 114 namespace content { | |
| 115 | |
| 116 static const media::VideoCodecProfile kSupportedProfiles[] = { | |
| 117 media::H264PROFILE_BASELINE, | |
| 118 media::H264PROFILE_MAIN, | |
| 119 media::H264PROFILE_HIGH, | |
| 120 media::VP8PROFILE_ANY, | |
| 121 media::VP9PROFILE_ANY | |
| 122 }; | |
| 123 | |
| 124 CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_ | |
| 125 = NULL; | |
| 126 | |
| 127 #define RETURN_ON_FAILURE(result, log, ret) \ | |
| 128 do { \ | |
| 129 if (!(result)) { \ | |
| 130 DLOG(ERROR) << log; \ | |
| 131 return ret; \ | |
| 132 } \ | |
| 133 } while (0) | |
| 134 | |
| 135 #define RETURN_ON_HR_FAILURE(result, log, ret) \ | |
| 136 RETURN_ON_FAILURE(SUCCEEDED(result), \ | |
| 137 log << ", HRESULT: 0x" << std::hex << result, \ | |
| 138 ret); | |
| 139 | |
| 140 #define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret) \ | |
| 141 do { \ | |
| 142 if (!(result)) { \ | |
| 143 DVLOG(1) << log; \ | |
| 144 StopOnError(error_code); \ | |
| 145 return ret; \ | |
| 146 } \ | |
| 147 } while (0) | |
| 148 | |
| 149 #define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret) \ | |
| 150 RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result), \ | |
| 151 log << ", HRESULT: 0x" << std::hex << result, \ | |
| 152 error_code, ret); | |
| 153 | |
| 154 enum { | |
| 155 // Maximum number of iterations we allow before aborting the attempt to flush | |
| 156 // the batched queries to the driver and allow torn/corrupt frames to be | |
| 157 // rendered. | |
| 158 kFlushDecoderSurfaceTimeoutMs = 1, | |
| 159 // Maximum iterations where we try to flush the d3d device. | |
| 160 kMaxIterationsForD3DFlush = 4, | |
| 161 // We only request 5 picture buffers from the client which are used to hold | |
| 162 // the decoded samples. These buffers are then reused when the client tells | |
| 163 // us that it is done with the buffer. | |
| 164 kNumPictureBuffers = 5, | |
| 165 }; | |
| 166 | |
| 167 static IMFSample* CreateEmptySample() { | |
| 168 base::win::ScopedComPtr<IMFSample> sample; | |
| 169 HRESULT hr = MFCreateSample(sample.Receive()); | |
| 170 RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL); | |
| 171 return sample.Detach(); | |
| 172 } | |
| 173 | |
| 174 // Creates a Media Foundation sample with one buffer of length |buffer_length| | |
| 175 // on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0. | |
| 176 static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) { | |
| 177 CHECK_GT(buffer_length, 0); | |
| 178 | |
| 179 base::win::ScopedComPtr<IMFSample> sample; | |
| 180 sample.Attach(CreateEmptySample()); | |
| 181 | |
| 182 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | |
| 183 HRESULT hr = E_FAIL; | |
| 184 if (align == 0) { | |
| 185 // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer | |
| 186 // with the align argument being 0. | |
| 187 hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive()); | |
| 188 } else { | |
| 189 hr = MFCreateAlignedMemoryBuffer(buffer_length, | |
| 190 align - 1, | |
| 191 buffer.Receive()); | |
| 192 } | |
| 193 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); | |
| 194 | |
| 195 hr = sample->AddBuffer(buffer.get()); | |
| 196 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); | |
| 197 | |
| 198 buffer->SetCurrentLength(0); | |
| 199 return sample.Detach(); | |
| 200 } | |
| 201 | |
| 202 // Creates a Media Foundation sample with one buffer containing a copy of the | |
| 203 // given Annex B stream data. | |
| 204 // If duration and sample time are not known, provide 0. | |
| 205 // |min_size| specifies the minimum size of the buffer (might be required by | |
| 206 // the decoder for input). If no alignment is required, provide 0. | |
| 207 static IMFSample* CreateInputSample(const uint8* stream, int size, | |
| 208 int min_size, int alignment) { | |
| 209 CHECK(stream); | |
| 210 CHECK_GT(size, 0); | |
| 211 base::win::ScopedComPtr<IMFSample> sample; | |
| 212 sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size), | |
| 213 alignment)); | |
| 214 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", NULL); | |
| 215 | |
| 216 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | |
| 217 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); | |
| 218 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL); | |
| 219 | |
| 220 DWORD max_length = 0; | |
| 221 DWORD current_length = 0; | |
| 222 uint8* destination = NULL; | |
| 223 hr = buffer->Lock(&destination, &max_length, ¤t_length); | |
| 224 RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL); | |
| 225 | |
| 226 CHECK_EQ(current_length, 0u); | |
| 227 CHECK_GE(static_cast<int>(max_length), size); | |
| 228 memcpy(destination, stream, size); | |
| 229 | |
| 230 hr = buffer->Unlock(); | |
| 231 RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL); | |
| 232 | |
| 233 hr = buffer->SetCurrentLength(size); | |
| 234 RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL); | |
| 235 | |
| 236 return sample.Detach(); | |
| 237 } | |
| 238 | |
| 239 static IMFSample* CreateSampleFromInputBuffer( | |
| 240 const media::BitstreamBuffer& bitstream_buffer, | |
| 241 DWORD stream_size, | |
| 242 DWORD alignment) { | |
| 243 base::SharedMemory shm(bitstream_buffer.handle(), true); | |
| 244 RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()), | |
| 245 "Failed in base::SharedMemory::Map", NULL); | |
| 246 | |
| 247 return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()), | |
| 248 bitstream_buffer.size(), | |
| 249 stream_size, | |
| 250 alignment); | |
| 251 } | |
| 252 | |
| 253 // Helper function to create a COM object instance from a DLL. The alternative | |
| 254 // is to use the CoCreateInstance API which requires the COM apartment to be | |
| 255 // initialized which is not the case on the GPU main thread. We want to avoid | |
| 256 // initializing COM as it may have sideeffects. | |
| 257 HRESULT CreateCOMObjectFromDll(HMODULE dll, const CLSID& clsid, const IID& iid, | |
| 258 void** object) { | |
| 259 if (!dll || !object) | |
| 260 return E_INVALIDARG; | |
| 261 | |
| 262 using GetClassObject = HRESULT (WINAPI*)( | |
| 263 const CLSID& clsid, const IID& iid, void** object); | |
| 264 | |
| 265 GetClassObject get_class_object = reinterpret_cast<GetClassObject>( | |
| 266 GetProcAddress(dll, "DllGetClassObject")); | |
| 267 RETURN_ON_FAILURE( | |
| 268 get_class_object, "Failed to get DllGetClassObject pointer", E_FAIL); | |
| 269 | |
| 270 base::win::ScopedComPtr<IClassFactory> factory; | |
| 271 HRESULT hr = get_class_object( | |
| 272 clsid, | |
| 273 __uuidof(IClassFactory), | |
| 274 factory.ReceiveVoid()); | |
| 275 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject failed", hr); | |
| 276 | |
| 277 hr = factory->CreateInstance(NULL, iid, object); | |
| 278 return hr; | |
| 279 } | |
| 280 | |
| 281 // Helper function to query the ANGLE device object. The template argument T | |
| 282 // identifies the device interface being queried. IDirect3DDevice9Ex for d3d9 | |
| 283 // and ID3D11Device for dx11. | |
| 284 template<class T> | |
| 285 base::win::ScopedComPtr<T> QueryDeviceObjectFromANGLE(int object_type) { | |
| 286 base::win::ScopedComPtr<T> device_object; | |
| 287 | |
| 288 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | |
| 289 intptr_t egl_device = 0; | |
| 290 intptr_t device = 0; | |
| 291 | |
| 292 RETURN_ON_FAILURE( | |
| 293 gfx::GLSurfaceEGL::HasEGLExtension("EGL_EXT_device_query"), | |
| 294 "EGL_EXT_device_query missing", | |
| 295 device_object); | |
| 296 | |
| 297 PFNEGLQUERYDISPLAYATTRIBEXTPROC QueryDisplayAttribEXT = | |
| 298 reinterpret_cast<PFNEGLQUERYDISPLAYATTRIBEXTPROC>(eglGetProcAddress( | |
| 299 "eglQueryDisplayAttribEXT")); | |
| 300 | |
| 301 RETURN_ON_FAILURE( | |
| 302 QueryDisplayAttribEXT, | |
| 303 "Failed to get the eglQueryDisplayAttribEXT function from ANGLE", | |
| 304 device_object); | |
| 305 | |
| 306 PFNEGLQUERYDEVICEATTRIBEXTPROC QueryDeviceAttribEXT = | |
| 307 reinterpret_cast<PFNEGLQUERYDEVICEATTRIBEXTPROC>(eglGetProcAddress( | |
| 308 "eglQueryDeviceAttribEXT")); | |
| 309 | |
| 310 RETURN_ON_FAILURE( | |
| 311 QueryDeviceAttribEXT, | |
| 312 "Failed to get the eglQueryDeviceAttribEXT function from ANGLE", | |
| 313 device_object); | |
| 314 | |
| 315 RETURN_ON_FAILURE( | |
| 316 QueryDisplayAttribEXT(egl_display, EGL_DEVICE_EXT, &egl_device), | |
| 317 "The eglQueryDisplayAttribEXT function failed to get the EGL device", | |
| 318 device_object); | |
| 319 | |
| 320 RETURN_ON_FAILURE( | |
| 321 egl_device, | |
| 322 "Failed to get the EGL device", | |
| 323 device_object); | |
| 324 | |
| 325 RETURN_ON_FAILURE( | |
| 326 QueryDeviceAttribEXT( | |
| 327 reinterpret_cast<EGLDeviceEXT>(egl_device), object_type, &device), | |
| 328 "The eglQueryDeviceAttribEXT function failed to get the device", | |
| 329 device_object); | |
| 330 | |
| 331 RETURN_ON_FAILURE(device, "Failed to get the ANGLE device", device_object); | |
| 332 | |
| 333 device_object = reinterpret_cast<T*>(device); | |
| 334 return device_object; | |
| 335 } | |
| 336 | |
| 337 | |
| 338 // Maintains information about a DXVA picture buffer, i.e. whether it is | |
| 339 // available for rendering, the texture information, etc. | |
| 340 struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer { | |
| 341 public: | |
| 342 static linked_ptr<DXVAPictureBuffer> Create( | |
| 343 const DXVAVideoDecodeAccelerator& decoder, | |
| 344 const media::PictureBuffer& buffer, | |
| 345 EGLConfig egl_config); | |
| 346 ~DXVAPictureBuffer(); | |
| 347 | |
| 348 void ReusePictureBuffer(); | |
| 349 // Copies the output sample data to the picture buffer provided by the | |
| 350 // client. | |
| 351 // The dest_surface parameter contains the decoded bits. | |
| 352 bool CopyOutputSampleDataToPictureBuffer( | |
| 353 DXVAVideoDecodeAccelerator* decoder, | |
| 354 IDirect3DSurface9* dest_surface, | |
| 355 ID3D11Texture2D* dx11_texture, | |
| 356 int input_buffer_id); | |
| 357 | |
| 358 bool available() const { | |
| 359 return available_; | |
| 360 } | |
| 361 | |
| 362 void set_available(bool available) { | |
| 363 available_ = available; | |
| 364 } | |
| 365 | |
| 366 int id() const { | |
| 367 return picture_buffer_.id(); | |
| 368 } | |
| 369 | |
| 370 gfx::Size size() const { | |
| 371 return picture_buffer_.size(); | |
| 372 } | |
| 373 | |
| 374 // Called when the source surface |src_surface| is copied to the destination | |
| 375 // |dest_surface| | |
| 376 void CopySurfaceComplete(IDirect3DSurface9* src_surface, | |
| 377 IDirect3DSurface9* dest_surface); | |
| 378 | |
| 379 private: | |
| 380 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer); | |
| 381 | |
| 382 bool available_; | |
| 383 media::PictureBuffer picture_buffer_; | |
| 384 EGLSurface decoding_surface_; | |
| 385 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_; | |
| 386 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture_; | |
| 387 | |
| 388 // The following |IDirect3DSurface9| interface pointers are used to hold | |
| 389 // references on the surfaces during the course of a StretchRect operation | |
| 390 // to copy the source surface to the target. The references are released | |
| 391 // when the StretchRect operation i.e. the copy completes. | |
| 392 base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_; | |
| 393 base::win::ScopedComPtr<IDirect3DSurface9> target_surface_; | |
| 394 | |
| 395 // This ID3D11Texture2D interface pointer is used to hold a reference to the | |
| 396 // decoder texture during the course of a copy operation. This reference is | |
| 397 // released when the copy completes. | |
| 398 base::win::ScopedComPtr<ID3D11Texture2D> decoder_dx11_texture_; | |
| 399 | |
| 400 // Set to true if RGB is supported by the texture. | |
| 401 // Defaults to true. | |
| 402 bool use_rgb_; | |
| 403 | |
| 404 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer); | |
| 405 }; | |
| 406 | |
| 407 // static | |
| 408 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer> | |
| 409 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create( | |
| 410 const DXVAVideoDecodeAccelerator& decoder, | |
| 411 const media::PictureBuffer& buffer, | |
| 412 EGLConfig egl_config) { | |
| 413 linked_ptr<DXVAPictureBuffer> picture_buffer(new DXVAPictureBuffer(buffer)); | |
| 414 | |
| 415 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | |
| 416 | |
| 417 EGLint use_rgb = 1; | |
| 418 eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB, | |
| 419 &use_rgb); | |
| 420 | |
| 421 EGLint attrib_list[] = { | |
| 422 EGL_WIDTH, buffer.size().width(), | |
| 423 EGL_HEIGHT, buffer.size().height(), | |
| 424 EGL_TEXTURE_FORMAT, use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA, | |
| 425 EGL_TEXTURE_TARGET, EGL_TEXTURE_2D, | |
| 426 EGL_NONE | |
| 427 }; | |
| 428 | |
| 429 picture_buffer->decoding_surface_ = eglCreatePbufferSurface( | |
| 430 egl_display, | |
| 431 egl_config, | |
| 432 attrib_list); | |
| 433 RETURN_ON_FAILURE(picture_buffer->decoding_surface_, | |
| 434 "Failed to create surface", | |
| 435 linked_ptr<DXVAPictureBuffer>(NULL)); | |
| 436 | |
| 437 HANDLE share_handle = NULL; | |
| 438 EGLBoolean ret = eglQuerySurfacePointerANGLE( | |
| 439 egl_display, | |
| 440 picture_buffer->decoding_surface_, | |
| 441 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, | |
| 442 &share_handle); | |
| 443 | |
| 444 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE, | |
| 445 "Failed to query ANGLE surface pointer", | |
| 446 linked_ptr<DXVAPictureBuffer>(NULL)); | |
| 447 | |
| 448 HRESULT hr = E_FAIL; | |
| 449 if (decoder.d3d11_device_) { | |
| 450 base::win::ScopedComPtr<ID3D11Resource> resource; | |
| 451 hr = decoder.d3d11_device_->OpenSharedResource( | |
| 452 share_handle, | |
| 453 __uuidof(ID3D11Resource), | |
| 454 reinterpret_cast<void**>(resource.Receive())); | |
| 455 RETURN_ON_HR_FAILURE(hr, "Failed to open shared resource", | |
| 456 linked_ptr<DXVAPictureBuffer>(NULL)); | |
| 457 hr = picture_buffer->dx11_decoding_texture_.QueryFrom(resource.get()); | |
| 458 } else { | |
| 459 hr = decoder.d3d9_device_ex_->CreateTexture( | |
| 460 buffer.size().width(), | |
| 461 buffer.size().height(), | |
| 462 1, | |
| 463 D3DUSAGE_RENDERTARGET, | |
| 464 use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8, | |
| 465 D3DPOOL_DEFAULT, | |
| 466 picture_buffer->decoding_texture_.Receive(), | |
| 467 &share_handle); | |
| 468 } | |
| 469 RETURN_ON_HR_FAILURE(hr, "Failed to create texture", | |
| 470 linked_ptr<DXVAPictureBuffer>(NULL)); | |
| 471 picture_buffer->use_rgb_ = !!use_rgb; | |
| 472 return picture_buffer; | |
| 473 } | |
| 474 | |
| 475 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer( | |
| 476 const media::PictureBuffer& buffer) | |
| 477 : available_(true), | |
| 478 picture_buffer_(buffer), | |
| 479 decoding_surface_(NULL), | |
| 480 use_rgb_(true) { | |
| 481 } | |
| 482 | |
| 483 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() { | |
| 484 if (decoding_surface_) { | |
| 485 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | |
| 486 | |
| 487 eglReleaseTexImage( | |
| 488 egl_display, | |
| 489 decoding_surface_, | |
| 490 EGL_BACK_BUFFER); | |
| 491 | |
| 492 eglDestroySurface( | |
| 493 egl_display, | |
| 494 decoding_surface_); | |
| 495 decoding_surface_ = NULL; | |
| 496 } | |
| 497 } | |
| 498 | |
| 499 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { | |
| 500 DCHECK(decoding_surface_); | |
| 501 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | |
| 502 eglReleaseTexImage( | |
| 503 egl_display, | |
| 504 decoding_surface_, | |
| 505 EGL_BACK_BUFFER); | |
| 506 decoder_surface_.Release(); | |
| 507 target_surface_.Release(); | |
| 508 decoder_dx11_texture_.Release(); | |
| 509 set_available(true); | |
| 510 } | |
| 511 | |
| 512 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: | |
| 513 CopyOutputSampleDataToPictureBuffer( | |
| 514 DXVAVideoDecodeAccelerator* decoder, | |
| 515 IDirect3DSurface9* dest_surface, | |
| 516 ID3D11Texture2D* dx11_texture, | |
| 517 int input_buffer_id) { | |
| 518 DCHECK(dest_surface || dx11_texture); | |
| 519 if (dx11_texture) { | |
| 520 // Grab a reference on the decoder texture. This reference will be released | |
| 521 // when we receive a notification that the copy was completed or when the | |
| 522 // DXVAPictureBuffer instance is destroyed. | |
| 523 decoder_dx11_texture_ = dx11_texture; | |
| 524 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), NULL, | |
| 525 id(), input_buffer_id); | |
| 526 return true; | |
| 527 } | |
| 528 D3DSURFACE_DESC surface_desc; | |
| 529 HRESULT hr = dest_surface->GetDesc(&surface_desc); | |
| 530 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | |
| 531 | |
| 532 D3DSURFACE_DESC texture_desc; | |
| 533 decoding_texture_->GetLevelDesc(0, &texture_desc); | |
| 534 | |
| 535 if (texture_desc.Width != surface_desc.Width || | |
| 536 texture_desc.Height != surface_desc.Height) { | |
| 537 NOTREACHED() << "Decode surface of different dimension than texture"; | |
| 538 return false; | |
| 539 } | |
| 540 | |
| 541 hr = decoder->d3d9_->CheckDeviceFormatConversion( | |
| 542 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, surface_desc.Format, | |
| 543 use_rgb_ ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8); | |
| 544 RETURN_ON_HR_FAILURE(hr, "Device does not support format converision", false); | |
| 545 | |
| 546 // The same picture buffer can be reused for a different frame. Release the | |
| 547 // target surface and the decoder references here. | |
| 548 target_surface_.Release(); | |
| 549 decoder_surface_.Release(); | |
| 550 | |
| 551 // Grab a reference on the decoder surface and the target surface. These | |
| 552 // references will be released when we receive a notification that the | |
| 553 // copy was completed or when the DXVAPictureBuffer instance is destroyed. | |
| 554 // We hold references here as it is easier to manage their lifetimes. | |
| 555 hr = decoding_texture_->GetSurfaceLevel(0, target_surface_.Receive()); | |
| 556 RETURN_ON_HR_FAILURE(hr, "Failed to get surface from texture", false); | |
| 557 | |
| 558 decoder_surface_ = dest_surface; | |
| 559 | |
| 560 decoder->CopySurface(decoder_surface_.get(), target_surface_.get(), id(), | |
| 561 input_buffer_id); | |
| 562 return true; | |
| 563 } | |
| 564 | |
| 565 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::CopySurfaceComplete( | |
| 566 IDirect3DSurface9* src_surface, | |
| 567 IDirect3DSurface9* dest_surface) { | |
| 568 DCHECK(!available()); | |
| 569 | |
| 570 GLint current_texture = 0; | |
| 571 glGetIntegerv(GL_TEXTURE_BINDING_2D, ¤t_texture); | |
| 572 | |
| 573 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id()); | |
| 574 | |
| 575 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
| 576 | |
| 577 if (src_surface && dest_surface) { | |
| 578 DCHECK_EQ(src_surface, decoder_surface_.get()); | |
| 579 DCHECK_EQ(dest_surface, target_surface_.get()); | |
| 580 decoder_surface_.Release(); | |
| 581 target_surface_.Release(); | |
| 582 } else { | |
| 583 DCHECK(decoder_dx11_texture_.get()); | |
| 584 decoder_dx11_texture_.Release(); | |
| 585 } | |
| 586 | |
| 587 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | |
| 588 eglBindTexImage( | |
| 589 egl_display, | |
| 590 decoding_surface_, | |
| 591 EGL_BACK_BUFFER); | |
| 592 | |
| 593 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | |
| 594 glBindTexture(GL_TEXTURE_2D, current_texture); | |
| 595 } | |
| 596 | |
| 597 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( | |
| 598 int32 buffer_id, IMFSample* sample) | |
| 599 : input_buffer_id(buffer_id), | |
| 600 picture_buffer_id(-1) { | |
| 601 output_sample.Attach(sample); | |
| 602 } | |
| 603 | |
| 604 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {} | |
| 605 | |
| 606 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( | |
| 607 const base::Callback<bool(void)>& make_context_current, | |
| 608 gfx::GLContext* gl_context) | |
| 609 : client_(NULL), | |
| 610 dev_manager_reset_token_(0), | |
| 611 dx11_dev_manager_reset_token_(0), | |
| 612 egl_config_(NULL), | |
| 613 state_(kUninitialized), | |
| 614 pictures_requested_(false), | |
| 615 inputs_before_decode_(0), | |
| 616 sent_drain_message_(false), | |
| 617 make_context_current_(make_context_current), | |
| 618 codec_(media::kUnknownVideoCodec), | |
| 619 decoder_thread_("DXVAVideoDecoderThread"), | |
| 620 pending_flush_(false), | |
| 621 use_dx11_(false), | |
| 622 dx11_video_format_converter_media_type_needs_init_(true), | |
| 623 gl_context_(gl_context), | |
| 624 using_angle_device_(false), | |
| 625 weak_this_factory_(this) { | |
| 626 weak_ptr_ = weak_this_factory_.GetWeakPtr(); | |
| 627 memset(&input_stream_info_, 0, sizeof(input_stream_info_)); | |
| 628 memset(&output_stream_info_, 0, sizeof(output_stream_info_)); | |
| 629 } | |
| 630 | |
| 631 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() { | |
| 632 client_ = NULL; | |
| 633 } | |
| 634 | |
| 635 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, | |
| 636 Client* client) { | |
| 637 client_ = client; | |
| 638 | |
| 639 main_thread_task_runner_ = base::MessageLoop::current()->task_runner(); | |
| 640 | |
| 641 bool profile_supported = false; | |
| 642 for (const auto& supported_profile : kSupportedProfiles) { | |
| 643 if (profile == supported_profile) { | |
| 644 profile_supported = true; | |
| 645 break; | |
| 646 } | |
| 647 } | |
| 648 if (!profile_supported) { | |
| 649 RETURN_AND_NOTIFY_ON_FAILURE(false, | |
| 650 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false); | |
| 651 } | |
| 652 | |
| 653 // Not all versions of Windows 7 and later include Media Foundation DLLs. | |
| 654 // Instead of crashing while delay loading the DLL when calling MFStartup() | |
| 655 // below, probe whether we can successfully load the DLL now. | |
| 656 // See http://crbug.com/339678 for details. | |
| 657 HMODULE dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll"); | |
| 658 RETURN_ON_FAILURE(dxgi_manager_dll, "MFPlat.dll is required for decoding", | |
| 659 false); | |
| 660 | |
| 661 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API. | |
| 662 // On Windows 7 mshtmlmedia.dll provides it. | |
| 663 | |
| 664 // TODO(ananta) | |
| 665 // The code below works, as in we can create the DX11 device manager for | |
| 666 // Windows 7. However the IMFTransform we use for texture conversion and | |
| 667 // copy does not exist on Windows 7. Look into an alternate approach | |
| 668 // and enable the code below. | |
| 669 #if defined(ENABLE_DX11_FOR_WIN7) | |
| 670 if (base::win::GetVersion() == base::win::VERSION_WIN7) { | |
| 671 dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll"); | |
| 672 RETURN_ON_FAILURE(dxgi_manager_dll, | |
| 673 "mshtmlmedia.dll is required for decoding", false); | |
| 674 } | |
| 675 #endif | |
| 676 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 | |
| 677 // decoding. | |
| 678 if (dxgi_manager_dll && !create_dxgi_device_manager_) { | |
| 679 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( | |
| 680 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); | |
| 681 } | |
| 682 | |
| 683 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 684 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, | |
| 685 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", | |
| 686 PLATFORM_FAILURE, | |
| 687 false); | |
| 688 | |
| 689 State state = GetState(); | |
| 690 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), | |
| 691 "Initialize: invalid state: " << state, ILLEGAL_STATE, false); | |
| 692 | |
| 693 media::InitializeMediaFoundation(); | |
| 694 | |
| 695 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile), | |
| 696 "Failed to initialize decoder", PLATFORM_FAILURE, false); | |
| 697 | |
| 698 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), | |
| 699 "Failed to get input/output stream info.", PLATFORM_FAILURE, false); | |
| 700 | |
| 701 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 702 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), | |
| 703 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", | |
| 704 PLATFORM_FAILURE, false); | |
| 705 | |
| 706 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 707 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), | |
| 708 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", | |
| 709 PLATFORM_FAILURE, false); | |
| 710 | |
| 711 SetState(kNormal); | |
| 712 | |
| 713 StartDecoderThread(); | |
| 714 return true; | |
| 715 } | |
| 716 | |
| 717 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { | |
| 718 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); | |
| 719 | |
| 720 HRESULT hr = E_FAIL; | |
| 721 | |
| 722 hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); | |
| 723 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); | |
| 724 | |
| 725 base::win::ScopedComPtr<IDirect3DDevice9> angle_device = | |
| 726 QueryDeviceObjectFromANGLE<IDirect3DDevice9>(EGL_D3D9_DEVICE_ANGLE); | |
| 727 if (angle_device.get()) | |
| 728 using_angle_device_ = true; | |
| 729 | |
| 730 if (using_angle_device_) { | |
| 731 hr = d3d9_device_ex_.QueryFrom(angle_device.get()); | |
| 732 RETURN_ON_HR_FAILURE(hr, | |
| 733 "QueryInterface for IDirect3DDevice9Ex from angle device failed", | |
| 734 false); | |
| 735 } else { | |
| 736 D3DPRESENT_PARAMETERS present_params = {0}; | |
| 737 present_params.BackBufferWidth = 1; | |
| 738 present_params.BackBufferHeight = 1; | |
| 739 present_params.BackBufferFormat = D3DFMT_UNKNOWN; | |
| 740 present_params.BackBufferCount = 1; | |
| 741 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; | |
| 742 present_params.hDeviceWindow = NULL; | |
| 743 present_params.Windowed = TRUE; | |
| 744 present_params.Flags = D3DPRESENTFLAG_VIDEO; | |
| 745 present_params.FullScreen_RefreshRateInHz = 0; | |
| 746 present_params.PresentationInterval = 0; | |
| 747 | |
| 748 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT, | |
| 749 D3DDEVTYPE_HAL, | |
| 750 NULL, | |
| 751 D3DCREATE_FPU_PRESERVE | | |
| 752 D3DCREATE_HARDWARE_VERTEXPROCESSING | | |
| 753 D3DCREATE_DISABLE_PSGP_THREADING | | |
| 754 D3DCREATE_MULTITHREADED, | |
| 755 &present_params, | |
| 756 NULL, | |
| 757 d3d9_device_ex_.Receive()); | |
| 758 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); | |
| 759 } | |
| 760 | |
| 761 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, | |
| 762 device_manager_.Receive()); | |
| 763 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); | |
| 764 | |
| 765 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(), | |
| 766 dev_manager_reset_token_); | |
| 767 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | |
| 768 | |
| 769 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); | |
| 770 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); | |
| 771 // Ensure query_ API works (to avoid an infinite loop later in | |
| 772 // CopyOutputSampleDataToPictureBuffer). | |
| 773 hr = query_->Issue(D3DISSUE_END); | |
| 774 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); | |
| 775 return true; | |
| 776 } | |
| 777 | |
| 778 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { | |
| 779 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, | |
| 780 d3d11_device_manager_.Receive()); | |
| 781 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); | |
| 782 | |
| 783 base::win::ScopedComPtr<ID3D11Device> angle_device = | |
| 784 QueryDeviceObjectFromANGLE<ID3D11Device>(EGL_D3D11_DEVICE_ANGLE); | |
| 785 RETURN_ON_FAILURE( | |
| 786 angle_device.get(), | |
| 787 "Failed to query DX11 device object from ANGLE", | |
| 788 false); | |
| 789 | |
| 790 using_angle_device_ = true; | |
| 791 d3d11_device_ = angle_device; | |
| 792 d3d11_device_->GetImmediateContext(d3d11_device_context_.Receive()); | |
| 793 RETURN_ON_FAILURE( | |
| 794 d3d11_device_context_.get(), | |
| 795 "Failed to query DX11 device context from ANGLE device", | |
| 796 false); | |
| 797 | |
| 798 // Enable multithreaded mode on the context. This ensures that accesses to | |
| 799 // context are synchronized across threads. We have multiple threads | |
| 800 // accessing the context, the media foundation decoder threads and the | |
| 801 // decoder thread via the video format conversion transform. | |
| 802 hr = multi_threaded_.QueryFrom(d3d11_device_context_.get()); | |
| 803 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); | |
| 804 multi_threaded_->SetMultithreadProtected(TRUE); | |
| 805 | |
| 806 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), | |
| 807 dx11_dev_manager_reset_token_); | |
| 808 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | |
| 809 | |
| 810 D3D11_QUERY_DESC query_desc; | |
| 811 query_desc.Query = D3D11_QUERY_EVENT; | |
| 812 query_desc.MiscFlags = 0; | |
| 813 hr = d3d11_device_->CreateQuery( | |
| 814 &query_desc, | |
| 815 d3d11_query_.Receive()); | |
| 816 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); | |
| 817 | |
| 818 HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); | |
| 819 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", | |
| 820 false); | |
| 821 | |
| 822 hr = CreateCOMObjectFromDll( | |
| 823 video_processor_dll, | |
| 824 CLSID_VideoProcessorMFT, | |
| 825 __uuidof(IMFTransform), | |
| 826 video_format_converter_mft_.ReceiveVoid()); | |
| 827 if (FAILED(hr)) { | |
| 828 base::debug::Alias(&hr); | |
| 829 // TODO(ananta) | |
| 830 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 831 // stablizes. | |
| 832 CHECK(false); | |
| 833 } | |
| 834 | |
| 835 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); | |
| 836 return true; | |
| 837 } | |
| 838 | |
| 839 void DXVAVideoDecodeAccelerator::Decode( | |
| 840 const media::BitstreamBuffer& bitstream_buffer) { | |
| 841 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 842 | |
| 843 State state = GetState(); | |
| 844 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped || | |
| 845 state == kFlushing), | |
| 846 "Invalid state: " << state, ILLEGAL_STATE,); | |
| 847 | |
| 848 base::win::ScopedComPtr<IMFSample> sample; | |
| 849 sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer, | |
| 850 input_stream_info_.cbSize, | |
| 851 input_stream_info_.cbAlignment)); | |
| 852 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample", | |
| 853 PLATFORM_FAILURE, ); | |
| 854 | |
| 855 RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()), | |
| 856 "Failed to associate input buffer id with sample", PLATFORM_FAILURE,); | |
| 857 | |
| 858 decoder_thread_task_runner_->PostTask( | |
| 859 FROM_HERE, | |
| 860 base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal, | |
| 861 base::Unretained(this), sample)); | |
| 862 } | |
| 863 | |
| 864 void DXVAVideoDecodeAccelerator::AssignPictureBuffers( | |
| 865 const std::vector<media::PictureBuffer>& buffers) { | |
| 866 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 867 | |
| 868 State state = GetState(); | |
| 869 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | |
| 870 "Invalid state: " << state, ILLEGAL_STATE,); | |
| 871 RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers >= buffers.size()), | |
| 872 "Failed to provide requested picture buffers. (Got " << buffers.size() << | |
| 873 ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,); | |
| 874 | |
| 875 // Copy the picture buffers provided by the client to the available list, | |
| 876 // and mark these buffers as available for use. | |
| 877 for (size_t buffer_index = 0; buffer_index < buffers.size(); | |
| 878 ++buffer_index) { | |
| 879 linked_ptr<DXVAPictureBuffer> picture_buffer = | |
| 880 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); | |
| 881 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), | |
| 882 "Failed to allocate picture buffer", PLATFORM_FAILURE,); | |
| 883 | |
| 884 bool inserted = output_picture_buffers_.insert(std::make_pair( | |
| 885 buffers[buffer_index].id(), picture_buffer)).second; | |
| 886 DCHECK(inserted); | |
| 887 } | |
| 888 | |
| 889 ProcessPendingSamples(); | |
| 890 if (pending_flush_) { | |
| 891 decoder_thread_task_runner_->PostTask( | |
| 892 FROM_HERE, | |
| 893 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | |
| 894 base::Unretained(this))); | |
| 895 } | |
| 896 } | |
| 897 | |
| 898 void DXVAVideoDecodeAccelerator::ReusePictureBuffer( | |
| 899 int32 picture_buffer_id) { | |
| 900 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 901 | |
| 902 State state = GetState(); | |
| 903 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | |
| 904 "Invalid state: " << state, ILLEGAL_STATE,); | |
| 905 | |
| 906 if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty()) | |
| 907 return; | |
| 908 | |
| 909 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id); | |
| 910 // If we didn't find the picture id in the |output_picture_buffers_| map we | |
| 911 // try the |stale_output_picture_buffers_| map, as this may have been an | |
| 912 // output picture buffer from before a resolution change, that at resolution | |
| 913 // change time had yet to be displayed. The client is calling us back to tell | |
| 914 // us that we can now recycle this picture buffer, so if we were waiting to | |
| 915 // dispose of it we now can. | |
| 916 if (it == output_picture_buffers_.end()) { | |
| 917 it = stale_output_picture_buffers_.find(picture_buffer_id); | |
| 918 RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(), | |
| 919 "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,); | |
| 920 main_thread_task_runner_->PostTask( | |
| 921 FROM_HERE, | |
| 922 base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer, | |
| 923 weak_this_factory_.GetWeakPtr(), picture_buffer_id)); | |
| 924 return; | |
| 925 } | |
| 926 | |
| 927 it->second->ReusePictureBuffer(); | |
| 928 ProcessPendingSamples(); | |
| 929 if (pending_flush_) { | |
| 930 decoder_thread_task_runner_->PostTask( | |
| 931 FROM_HERE, | |
| 932 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | |
| 933 base::Unretained(this))); | |
| 934 } | |
| 935 } | |
| 936 | |
| 937 void DXVAVideoDecodeAccelerator::Flush() { | |
| 938 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 939 | |
| 940 DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush"; | |
| 941 | |
| 942 State state = GetState(); | |
| 943 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), | |
| 944 "Unexpected decoder state: " << state, ILLEGAL_STATE,); | |
| 945 | |
| 946 SetState(kFlushing); | |
| 947 | |
| 948 pending_flush_ = true; | |
| 949 | |
| 950 decoder_thread_task_runner_->PostTask( | |
| 951 FROM_HERE, | |
| 952 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | |
| 953 base::Unretained(this))); | |
| 954 } | |
| 955 | |
| 956 void DXVAVideoDecodeAccelerator::Reset() { | |
| 957 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 958 | |
| 959 DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset"; | |
| 960 | |
| 961 State state = GetState(); | |
| 962 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped), | |
| 963 "Reset: invalid state: " << state, ILLEGAL_STATE,); | |
| 964 | |
| 965 decoder_thread_.Stop(); | |
| 966 | |
| 967 SetState(kResetting); | |
| 968 | |
| 969 // If we have pending output frames waiting for display then we drop those | |
| 970 // frames and set the corresponding picture buffer as available. | |
| 971 PendingOutputSamples::iterator index; | |
| 972 for (index = pending_output_samples_.begin(); | |
| 973 index != pending_output_samples_.end(); | |
| 974 ++index) { | |
| 975 if (index->picture_buffer_id != -1) { | |
| 976 OutputBuffers::iterator it = output_picture_buffers_.find( | |
| 977 index->picture_buffer_id); | |
| 978 if (it != output_picture_buffers_.end()) { | |
| 979 DXVAPictureBuffer* picture_buffer = it->second.get(); | |
| 980 picture_buffer->ReusePictureBuffer(); | |
| 981 } | |
| 982 } | |
| 983 } | |
| 984 | |
| 985 pending_output_samples_.clear(); | |
| 986 | |
| 987 NotifyInputBuffersDropped(); | |
| 988 | |
| 989 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0), | |
| 990 "Reset: Failed to send message.", PLATFORM_FAILURE,); | |
| 991 | |
| 992 main_thread_task_runner_->PostTask( | |
| 993 FROM_HERE, | |
| 994 base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone, | |
| 995 weak_this_factory_.GetWeakPtr())); | |
| 996 | |
| 997 StartDecoderThread(); | |
| 998 SetState(kNormal); | |
| 999 } | |
| 1000 | |
| 1001 void DXVAVideoDecodeAccelerator::Destroy() { | |
| 1002 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1003 Invalidate(); | |
| 1004 delete this; | |
| 1005 } | |
| 1006 | |
| 1007 bool DXVAVideoDecodeAccelerator::CanDecodeOnIOThread() { | |
| 1008 return false; | |
| 1009 } | |
| 1010 | |
| 1011 GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const { | |
| 1012 return GL_BGRA_EXT; | |
| 1013 } | |
| 1014 | |
| 1015 // static | |
| 1016 media::VideoDecodeAccelerator::SupportedProfiles | |
| 1017 DXVAVideoDecodeAccelerator::GetSupportedProfiles() { | |
| 1018 // TODO(henryhsu): Need to ensure the profiles are actually supported. | |
| 1019 SupportedProfiles profiles; | |
| 1020 for (const auto& supported_profile : kSupportedProfiles) { | |
| 1021 SupportedProfile profile; | |
| 1022 profile.profile = supported_profile; | |
| 1023 // Windows Media Foundation H.264 decoding does not support decoding videos | |
| 1024 // with any dimension smaller than 48 pixels: | |
| 1025 // http://msdn.microsoft.com/en-us/library/windows/desktop/dd797815 | |
| 1026 profile.min_resolution.SetSize(48, 48); | |
| 1027 // Use 1088 to account for 16x16 macroblocks. | |
| 1028 profile.max_resolution.SetSize(1920, 1088); | |
| 1029 profiles.push_back(profile); | |
| 1030 } | |
| 1031 return profiles; | |
| 1032 } | |
| 1033 | |
| 1034 // static | |
| 1035 void DXVAVideoDecodeAccelerator::PreSandboxInitialization() { | |
| 1036 ::LoadLibrary(L"MFPlat.dll"); | |
| 1037 ::LoadLibrary(L"msmpeg2vdec.dll"); | |
| 1038 | |
| 1039 if (base::win::GetVersion() > base::win::VERSION_WIN7) { | |
| 1040 LoadLibrary(L"msvproc.dll"); | |
| 1041 } else { | |
| 1042 LoadLibrary(L"dxva2.dll"); | |
| 1043 #if defined(ENABLE_DX11_FOR_WIN7) | |
| 1044 LoadLibrary(L"mshtmlmedia.dll"); | |
| 1045 #endif | |
| 1046 } | |
| 1047 } | |
| 1048 | |
| 1049 bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile) { | |
| 1050 HMODULE decoder_dll = NULL; | |
| 1051 | |
| 1052 CLSID clsid = {}; | |
| 1053 | |
| 1054 // Profile must fall within the valid range for one of the supported codecs. | |
| 1055 if (profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX) { | |
| 1056 // We mimic the steps CoCreateInstance uses to instantiate the object. This | |
| 1057 // was previously done because it failed inside the sandbox, and now is done | |
| 1058 // as a more minimal approach to avoid other side-effects CCI might have (as | |
| 1059 // we are still in a reduced sandbox). | |
| 1060 decoder_dll = ::GetModuleHandle(L"msmpeg2vdec.dll"); | |
| 1061 RETURN_ON_FAILURE(decoder_dll, | |
| 1062 "msmpeg2vdec.dll required for decoding is not loaded", | |
| 1063 false); | |
| 1064 | |
| 1065 // Check version of DLL, version 6.7.7140 is blacklisted due to high crash | |
| 1066 // rates in browsers loading that DLL. If that is the version installed we | |
| 1067 // fall back to software decoding. See crbug/403440. | |
| 1068 FileVersionInfo* version_info = | |
| 1069 FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll); | |
| 1070 RETURN_ON_FAILURE(version_info, | |
| 1071 "unable to get version of msmpeg2vdec.dll", | |
| 1072 false); | |
| 1073 base::string16 file_version = version_info->file_version(); | |
| 1074 RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos, | |
| 1075 "blacklisted version of msmpeg2vdec.dll 6.7.7140", | |
| 1076 false); | |
| 1077 codec_ = media::kCodecH264; | |
| 1078 clsid = __uuidof(CMSH264DecoderMFT); | |
| 1079 } else if ((profile == media::VP8PROFILE_ANY || | |
| 1080 profile == media::VP9PROFILE_ANY) && | |
| 1081 base::CommandLine::ForCurrentProcess()->HasSwitch( | |
| 1082 switches::kEnableAcceleratedVpxDecode)) { | |
| 1083 int program_files_key = base::DIR_PROGRAM_FILES; | |
| 1084 if (base::win::OSInfo::GetInstance()->wow64_status() == | |
| 1085 base::win::OSInfo::WOW64_ENABLED) { | |
| 1086 program_files_key = base::DIR_PROGRAM_FILES6432; | |
| 1087 } | |
| 1088 | |
| 1089 base::FilePath dll_path; | |
| 1090 RETURN_ON_FAILURE(PathService::Get(program_files_key, &dll_path), | |
| 1091 "failed to get path for Program Files", false); | |
| 1092 | |
| 1093 dll_path = dll_path.Append(kVPXDecoderDLLPath); | |
| 1094 if (profile == media::VP8PROFILE_ANY) { | |
| 1095 codec_ = media::kCodecVP8; | |
| 1096 dll_path = dll_path.Append(kVP8DecoderDLLName); | |
| 1097 clsid = CLSID_WebmMfVp8Dec; | |
| 1098 } else { | |
| 1099 codec_ = media::kCodecVP9; | |
| 1100 dll_path = dll_path.Append(kVP9DecoderDLLName); | |
| 1101 clsid = CLSID_WebmMfVp9Dec; | |
| 1102 } | |
| 1103 decoder_dll = ::LoadLibraryEx(dll_path.value().data(), NULL, | |
| 1104 LOAD_WITH_ALTERED_SEARCH_PATH); | |
| 1105 RETURN_ON_FAILURE(decoder_dll, "vpx decoder dll is not loaded", false); | |
| 1106 } else { | |
| 1107 RETURN_ON_FAILURE(false, "Unsupported codec.", false); | |
| 1108 } | |
| 1109 | |
| 1110 HRESULT hr = CreateCOMObjectFromDll(decoder_dll, | |
| 1111 clsid, | |
| 1112 __uuidof(IMFTransform), | |
| 1113 decoder_.ReceiveVoid()); | |
| 1114 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); | |
| 1115 | |
| 1116 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), | |
| 1117 "Failed to check decoder DXVA support", false); | |
| 1118 | |
| 1119 ULONG_PTR device_manager_to_use = NULL; | |
| 1120 if (use_dx11_) { | |
| 1121 CHECK(create_dxgi_device_manager_); | |
| 1122 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), | |
| 1123 "Failed to initialize DX11 device and manager", | |
| 1124 PLATFORM_FAILURE, | |
| 1125 false); | |
| 1126 device_manager_to_use = reinterpret_cast<ULONG_PTR>( | |
| 1127 d3d11_device_manager_.get()); | |
| 1128 } else { | |
| 1129 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), | |
| 1130 "Failed to initialize D3D device and manager", | |
| 1131 PLATFORM_FAILURE, | |
| 1132 false); | |
| 1133 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); | |
| 1134 } | |
| 1135 | |
| 1136 hr = decoder_->ProcessMessage( | |
| 1137 MFT_MESSAGE_SET_D3D_MANAGER, | |
| 1138 device_manager_to_use); | |
| 1139 if (use_dx11_) { | |
| 1140 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); | |
| 1141 } else { | |
| 1142 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); | |
| 1143 } | |
| 1144 | |
| 1145 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | |
| 1146 | |
| 1147 EGLint config_attribs[] = { | |
| 1148 EGL_BUFFER_SIZE, 32, | |
| 1149 EGL_RED_SIZE, 8, | |
| 1150 EGL_GREEN_SIZE, 8, | |
| 1151 EGL_BLUE_SIZE, 8, | |
| 1152 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, | |
| 1153 EGL_ALPHA_SIZE, 0, | |
| 1154 EGL_NONE | |
| 1155 }; | |
| 1156 | |
| 1157 EGLint num_configs; | |
| 1158 | |
| 1159 if (!eglChooseConfig( | |
| 1160 egl_display, | |
| 1161 config_attribs, | |
| 1162 &egl_config_, | |
| 1163 1, | |
| 1164 &num_configs)) | |
| 1165 return false; | |
| 1166 | |
| 1167 return SetDecoderMediaTypes(); | |
| 1168 } | |
| 1169 | |
| 1170 bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() { | |
| 1171 base::win::ScopedComPtr<IMFAttributes> attributes; | |
| 1172 HRESULT hr = decoder_->GetAttributes(attributes.Receive()); | |
| 1173 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false); | |
| 1174 | |
| 1175 UINT32 dxva = 0; | |
| 1176 hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva); | |
| 1177 RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false); | |
| 1178 | |
| 1179 if (codec_ == media::kCodecH264) { | |
| 1180 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE); | |
| 1181 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false); | |
| 1182 } | |
| 1183 | |
| 1184 hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE); | |
| 1185 if (SUCCEEDED(hr)) { | |
| 1186 DVLOG(1) << "Successfully set Low latency mode on decoder."; | |
| 1187 } else { | |
| 1188 DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr; | |
| 1189 } | |
| 1190 | |
| 1191 // The decoder should use DX11 iff | |
| 1192 // 1. The underlying H/W decoder supports it. | |
| 1193 // 2. We have a pointer to the MFCreateDXGIDeviceManager function needed for | |
| 1194 // this. This should always be true for Windows 8+. | |
| 1195 // 3. ANGLE is using DX11. | |
| 1196 DCHECK(gl_context_); | |
| 1197 if (create_dxgi_device_manager_ && | |
| 1198 (gl_context_->GetGLRenderer().find("Direct3D11") != | |
| 1199 std::string::npos)) { | |
| 1200 UINT32 dx11_aware = 0; | |
| 1201 attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware); | |
| 1202 use_dx11_ = !!dx11_aware; | |
| 1203 } | |
| 1204 return true; | |
| 1205 } | |
| 1206 | |
| 1207 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() { | |
| 1208 RETURN_ON_FAILURE(SetDecoderInputMediaType(), | |
| 1209 "Failed to set decoder input media type", false); | |
| 1210 return SetDecoderOutputMediaType(MFVideoFormat_NV12); | |
| 1211 } | |
| 1212 | |
| 1213 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() { | |
| 1214 base::win::ScopedComPtr<IMFMediaType> media_type; | |
| 1215 HRESULT hr = MFCreateMediaType(media_type.Receive()); | |
| 1216 RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false); | |
| 1217 | |
| 1218 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
| 1219 RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false); | |
| 1220 | |
| 1221 if (codec_ == media::kCodecH264) { | |
| 1222 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); | |
| 1223 } else if (codec_ == media::kCodecVP8) { | |
| 1224 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP80); | |
| 1225 } else if (codec_ == media::kCodecVP9) { | |
| 1226 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP90); | |
| 1227 } else { | |
| 1228 NOTREACHED(); | |
| 1229 RETURN_ON_FAILURE(false, "Unsupported codec on input media type.", false); | |
| 1230 } | |
| 1231 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false); | |
| 1232 | |
| 1233 // Not sure about this. msdn recommends setting this value on the input | |
| 1234 // media type. | |
| 1235 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 1236 MFVideoInterlace_MixedInterlaceOrProgressive); | |
| 1237 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false); | |
| 1238 | |
| 1239 hr = decoder_->SetInputType(0, media_type.get(), 0); // No flags | |
| 1240 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false); | |
| 1241 return true; | |
| 1242 } | |
| 1243 | |
| 1244 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType( | |
| 1245 const GUID& subtype) { | |
| 1246 base::win::ScopedComPtr<IMFMediaType> out_media_type; | |
| 1247 | |
| 1248 for (uint32 i = 0; | |
| 1249 SUCCEEDED(decoder_->GetOutputAvailableType(0, i, | |
| 1250 out_media_type.Receive())); | |
| 1251 ++i) { | |
| 1252 GUID out_subtype = {0}; | |
| 1253 HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); | |
| 1254 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false); | |
| 1255 | |
| 1256 if (out_subtype == subtype) { | |
| 1257 hr = decoder_->SetOutputType(0, out_media_type.get(), 0); // No flags | |
| 1258 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false); | |
| 1259 return true; | |
| 1260 } | |
| 1261 out_media_type.Release(); | |
| 1262 } | |
| 1263 return false; | |
| 1264 } | |
| 1265 | |
| 1266 bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg, | |
| 1267 int32 param) { | |
| 1268 HRESULT hr = decoder_->ProcessMessage(msg, param); | |
| 1269 return SUCCEEDED(hr); | |
| 1270 } | |
| 1271 | |
| 1272 // Gets the minimum buffer sizes for input and output samples. The MFT will not | |
| 1273 // allocate buffer for input nor output, so we have to do it ourselves and make | |
| 1274 // sure they're the correct size. We only provide decoding if DXVA is enabled. | |
| 1275 bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() { | |
| 1276 HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_); | |
| 1277 RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false); | |
| 1278 | |
| 1279 hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_); | |
| 1280 RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false); | |
| 1281 | |
| 1282 DVLOG(1) << "Input stream info: "; | |
| 1283 DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency; | |
| 1284 if (codec_ == media::kCodecH264) { | |
| 1285 // There should be three flags, one for requiring a whole frame be in a | |
| 1286 // single sample, one for requiring there be one buffer only in a single | |
| 1287 // sample, and one that specifies a fixed sample size. (as in cbSize) | |
| 1288 CHECK_EQ(input_stream_info_.dwFlags, 0x7u); | |
| 1289 } | |
| 1290 | |
| 1291 DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize; | |
| 1292 DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead; | |
| 1293 DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment; | |
| 1294 | |
| 1295 DVLOG(1) << "Output stream info: "; | |
| 1296 // The flags here should be the same and mean the same thing, except when | |
| 1297 // DXVA is enabled, there is an extra 0x100 flag meaning decoder will | |
| 1298 // allocate its own sample. | |
| 1299 DVLOG(1) << "Flags: " | |
| 1300 << std::hex << std::showbase << output_stream_info_.dwFlags; | |
| 1301 if (codec_ == media::kCodecH264) { | |
| 1302 CHECK_EQ(output_stream_info_.dwFlags, 0x107u); | |
| 1303 } | |
| 1304 DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize; | |
| 1305 DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment; | |
| 1306 return true; | |
| 1307 } | |
| 1308 | |
| 1309 void DXVAVideoDecodeAccelerator::DoDecode() { | |
| 1310 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 1311 // This function is also called from FlushInternal in a loop which could | |
| 1312 // result in the state transitioning to kStopped due to no decoded output. | |
| 1313 State state = GetState(); | |
| 1314 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 1315 (state == kNormal || state == kFlushing || state == kStopped), | |
| 1316 "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,); | |
| 1317 | |
| 1318 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; | |
| 1319 DWORD status = 0; | |
| 1320 | |
| 1321 HRESULT hr = decoder_->ProcessOutput(0, // No flags | |
| 1322 1, // # of out streams to pull from | |
| 1323 &output_data_buffer, | |
| 1324 &status); | |
| 1325 IMFCollection* events = output_data_buffer.pEvents; | |
| 1326 if (events != NULL) { | |
| 1327 DVLOG(1) << "Got events from ProcessOuput, but discarding"; | |
| 1328 events->Release(); | |
| 1329 } | |
| 1330 if (FAILED(hr)) { | |
| 1331 // A stream change needs further ProcessInput calls to get back decoder | |
| 1332 // output which is why we need to set the state to stopped. | |
| 1333 if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { | |
| 1334 if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) { | |
| 1335 // Decoder didn't let us set NV12 output format. Not sure as to why | |
| 1336 // this can happen. Give up in disgust. | |
| 1337 NOTREACHED() << "Failed to set decoder output media type to NV12"; | |
| 1338 SetState(kStopped); | |
| 1339 } else { | |
| 1340 DVLOG(1) << "Received output format change from the decoder." | |
| 1341 " Recursively invoking DoDecode"; | |
| 1342 DoDecode(); | |
| 1343 } | |
| 1344 return; | |
| 1345 } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { | |
| 1346 // No more output from the decoder. Stop playback. | |
| 1347 SetState(kStopped); | |
| 1348 return; | |
| 1349 } else { | |
| 1350 NOTREACHED() << "Unhandled error in DoDecode()"; | |
| 1351 return; | |
| 1352 } | |
| 1353 } | |
| 1354 TRACE_EVENT_ASYNC_END0("gpu", "DXVAVideoDecodeAccelerator.Decoding", this); | |
| 1355 | |
| 1356 TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode", | |
| 1357 inputs_before_decode_); | |
| 1358 | |
| 1359 inputs_before_decode_ = 0; | |
| 1360 | |
| 1361 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), | |
| 1362 "Failed to process output sample.", PLATFORM_FAILURE,); | |
| 1363 } | |
| 1364 | |
| 1365 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { | |
| 1366 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); | |
| 1367 | |
| 1368 LONGLONG input_buffer_id = 0; | |
| 1369 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), | |
| 1370 "Failed to get input buffer id associated with sample", | |
| 1371 false); | |
| 1372 | |
| 1373 { | |
| 1374 base::AutoLock lock(decoder_lock_); | |
| 1375 DCHECK(pending_output_samples_.empty()); | |
| 1376 pending_output_samples_.push_back( | |
| 1377 PendingSampleInfo(input_buffer_id, sample)); | |
| 1378 } | |
| 1379 | |
| 1380 if (pictures_requested_) { | |
| 1381 DVLOG(1) << "Waiting for picture slots from the client."; | |
| 1382 main_thread_task_runner_->PostTask( | |
| 1383 FROM_HERE, | |
| 1384 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples, | |
| 1385 weak_this_factory_.GetWeakPtr())); | |
| 1386 return true; | |
| 1387 } | |
| 1388 | |
| 1389 int width = 0; | |
| 1390 int height = 0; | |
| 1391 if (!GetVideoFrameDimensions(sample, &width, &height)) { | |
| 1392 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", | |
| 1393 false); | |
| 1394 } | |
| 1395 | |
| 1396 // Go ahead and request picture buffers. | |
| 1397 main_thread_task_runner_->PostTask( | |
| 1398 FROM_HERE, | |
| 1399 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | |
| 1400 weak_this_factory_.GetWeakPtr(), | |
| 1401 width, | |
| 1402 height)); | |
| 1403 | |
| 1404 pictures_requested_ = true; | |
| 1405 return true; | |
| 1406 } | |
| 1407 | |
| 1408 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { | |
| 1409 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1410 | |
| 1411 if (!output_picture_buffers_.size()) | |
| 1412 return; | |
| 1413 | |
| 1414 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(), | |
| 1415 "Failed to make context current", PLATFORM_FAILURE,); | |
| 1416 | |
| 1417 OutputBuffers::iterator index; | |
| 1418 | |
| 1419 for (index = output_picture_buffers_.begin(); | |
| 1420 index != output_picture_buffers_.end() && | |
| 1421 OutputSamplesPresent(); | |
| 1422 ++index) { | |
| 1423 if (index->second->available()) { | |
| 1424 PendingSampleInfo* pending_sample = NULL; | |
| 1425 { | |
| 1426 base::AutoLock lock(decoder_lock_); | |
| 1427 | |
| 1428 PendingSampleInfo& sample_info = pending_output_samples_.front(); | |
| 1429 if (sample_info.picture_buffer_id != -1) | |
| 1430 continue; | |
| 1431 pending_sample = &sample_info; | |
| 1432 } | |
| 1433 | |
| 1434 int width = 0; | |
| 1435 int height = 0; | |
| 1436 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), | |
| 1437 &width, &height)) { | |
| 1438 RETURN_AND_NOTIFY_ON_FAILURE(false, | |
| 1439 "Failed to get D3D surface from output sample", PLATFORM_FAILURE,); | |
| 1440 } | |
| 1441 | |
| 1442 if (width != index->second->size().width() || | |
| 1443 height != index->second->size().height()) { | |
| 1444 HandleResolutionChanged(width, height); | |
| 1445 return; | |
| 1446 } | |
| 1447 | |
| 1448 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
| 1449 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( | |
| 1450 0, output_buffer.Receive()); | |
| 1451 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1452 "Failed to get buffer from output sample", PLATFORM_FAILURE,); | |
| 1453 | |
| 1454 base::win::ScopedComPtr<IDirect3DSurface9> surface; | |
| 1455 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | |
| 1456 | |
| 1457 if (use_dx11_) { | |
| 1458 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | |
| 1459 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | |
| 1460 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1461 "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,); | |
| 1462 hr = dxgi_buffer->GetResource( | |
| 1463 __uuidof(ID3D11Texture2D), | |
| 1464 reinterpret_cast<void**>(d3d11_texture.Receive())); | |
| 1465 } else { | |
| 1466 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | |
| 1467 IID_PPV_ARGS(surface.Receive())); | |
| 1468 } | |
| 1469 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1470 "Failed to get surface from output sample", PLATFORM_FAILURE,); | |
| 1471 | |
| 1472 pending_sample->picture_buffer_id = index->second->id(); | |
| 1473 | |
| 1474 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 1475 index->second->CopyOutputSampleDataToPictureBuffer( | |
| 1476 this, | |
| 1477 surface.get(), | |
| 1478 d3d11_texture.get(), | |
| 1479 pending_sample->input_buffer_id), | |
| 1480 "Failed to copy output sample", PLATFORM_FAILURE,); | |
| 1481 | |
| 1482 index->second->set_available(false); | |
| 1483 } | |
| 1484 } | |
| 1485 } | |
| 1486 | |
| 1487 void DXVAVideoDecodeAccelerator::StopOnError( | |
| 1488 media::VideoDecodeAccelerator::Error error) { | |
| 1489 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | |
| 1490 main_thread_task_runner_->PostTask( | |
| 1491 FROM_HERE, | |
| 1492 base::Bind(&DXVAVideoDecodeAccelerator::StopOnError, | |
| 1493 weak_this_factory_.GetWeakPtr(), | |
| 1494 error)); | |
| 1495 return; | |
| 1496 } | |
| 1497 | |
| 1498 if (client_) | |
| 1499 client_->NotifyError(error); | |
| 1500 client_ = NULL; | |
| 1501 | |
| 1502 if (GetState() != kUninitialized) { | |
| 1503 Invalidate(); | |
| 1504 } | |
| 1505 } | |
| 1506 | |
| 1507 void DXVAVideoDecodeAccelerator::Invalidate() { | |
| 1508 if (GetState() == kUninitialized) | |
| 1509 return; | |
| 1510 | |
| 1511 decoder_thread_.Stop(); | |
| 1512 weak_this_factory_.InvalidateWeakPtrs(); | |
| 1513 output_picture_buffers_.clear(); | |
| 1514 stale_output_picture_buffers_.clear(); | |
| 1515 pending_output_samples_.clear(); | |
| 1516 pending_input_buffers_.clear(); | |
| 1517 decoder_.Release(); | |
| 1518 | |
| 1519 if (use_dx11_) { | |
| 1520 if (video_format_converter_mft_.get()) { | |
| 1521 video_format_converter_mft_->ProcessMessage( | |
| 1522 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); | |
| 1523 video_format_converter_mft_.Release(); | |
| 1524 } | |
| 1525 d3d11_device_context_.Release(); | |
| 1526 d3d11_device_.Release(); | |
| 1527 d3d11_device_manager_.Release(); | |
| 1528 d3d11_query_.Release(); | |
| 1529 dx11_video_format_converter_media_type_needs_init_ = true; | |
| 1530 } else { | |
| 1531 d3d9_.Release(); | |
| 1532 d3d9_device_ex_.Release(); | |
| 1533 device_manager_.Release(); | |
| 1534 query_.Release(); | |
| 1535 } | |
| 1536 | |
| 1537 SetState(kUninitialized); | |
| 1538 } | |
| 1539 | |
| 1540 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) { | |
| 1541 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1542 if (client_) | |
| 1543 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
| 1544 } | |
| 1545 | |
| 1546 void DXVAVideoDecodeAccelerator::NotifyFlushDone() { | |
| 1547 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1548 if (client_ && pending_flush_) { | |
| 1549 pending_flush_ = false; | |
| 1550 { | |
| 1551 base::AutoLock lock(decoder_lock_); | |
| 1552 sent_drain_message_ = false; | |
| 1553 } | |
| 1554 | |
| 1555 client_->NotifyFlushDone(); | |
| 1556 } | |
| 1557 } | |
| 1558 | |
| 1559 void DXVAVideoDecodeAccelerator::NotifyResetDone() { | |
| 1560 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1561 if (client_) | |
| 1562 client_->NotifyResetDone(); | |
| 1563 } | |
| 1564 | |
| 1565 void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) { | |
| 1566 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1567 // This task could execute after the decoder has been torn down. | |
| 1568 if (GetState() != kUninitialized && client_) { | |
| 1569 client_->ProvidePictureBuffers( | |
| 1570 kNumPictureBuffers, | |
| 1571 gfx::Size(width, height), | |
| 1572 GL_TEXTURE_2D); | |
| 1573 } | |
| 1574 } | |
| 1575 | |
| 1576 void DXVAVideoDecodeAccelerator::NotifyPictureReady( | |
| 1577 int picture_buffer_id, | |
| 1578 int input_buffer_id) { | |
| 1579 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1580 // This task could execute after the decoder has been torn down. | |
| 1581 if (GetState() != kUninitialized && client_) { | |
| 1582 // TODO(henryhsu): Use correct visible size instead of (0, 0). We can't use | |
| 1583 // coded size here so use (0, 0) intentionally to have the client choose. | |
| 1584 media::Picture picture(picture_buffer_id, input_buffer_id, | |
| 1585 gfx::Rect(0, 0), false); | |
| 1586 client_->PictureReady(picture); | |
| 1587 } | |
| 1588 } | |
| 1589 | |
| 1590 void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() { | |
| 1591 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1592 if (!client_) | |
| 1593 return; | |
| 1594 | |
| 1595 for (PendingInputs::iterator it = pending_input_buffers_.begin(); | |
| 1596 it != pending_input_buffers_.end(); ++it) { | |
| 1597 LONGLONG input_buffer_id = 0; | |
| 1598 RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id), | |
| 1599 "Failed to get buffer id associated with sample",); | |
| 1600 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
| 1601 } | |
| 1602 pending_input_buffers_.clear(); | |
| 1603 } | |
| 1604 | |
| 1605 void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() { | |
| 1606 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 1607 State state = GetState(); | |
| 1608 RETURN_AND_NOTIFY_ON_FAILURE((state != kUninitialized), | |
| 1609 "Invalid state: " << state, ILLEGAL_STATE,); | |
| 1610 | |
| 1611 if (pending_input_buffers_.empty() || OutputSamplesPresent()) | |
| 1612 return; | |
| 1613 | |
| 1614 PendingInputs pending_input_buffers_copy; | |
| 1615 std::swap(pending_input_buffers_, pending_input_buffers_copy); | |
| 1616 | |
| 1617 for (PendingInputs::iterator it = pending_input_buffers_copy.begin(); | |
| 1618 it != pending_input_buffers_copy.end(); ++it) { | |
| 1619 DecodeInternal(*it); | |
| 1620 } | |
| 1621 } | |
| 1622 | |
| 1623 void DXVAVideoDecodeAccelerator::FlushInternal() { | |
| 1624 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 1625 | |
| 1626 // We allow only one output frame to be present at any given time. If we have | |
| 1627 // an output frame, then we cannot complete the flush at this time. | |
| 1628 if (OutputSamplesPresent()) | |
| 1629 return; | |
| 1630 | |
| 1631 // First drain the pending input because once the drain message is sent below, | |
| 1632 // the decoder will ignore further input until it's drained. | |
| 1633 if (!pending_input_buffers_.empty()) { | |
| 1634 decoder_thread_task_runner_->PostTask( | |
| 1635 FROM_HERE, | |
| 1636 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | |
| 1637 base::Unretained(this))); | |
| 1638 decoder_thread_task_runner_->PostTask( | |
| 1639 FROM_HERE, | |
| 1640 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | |
| 1641 base::Unretained(this))); | |
| 1642 return; | |
| 1643 } | |
| 1644 | |
| 1645 { | |
| 1646 base::AutoLock lock(decoder_lock_); | |
| 1647 if (!sent_drain_message_) { | |
| 1648 RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0), | |
| 1649 "Failed to send drain message", | |
| 1650 PLATFORM_FAILURE,); | |
| 1651 sent_drain_message_ = true; | |
| 1652 } | |
| 1653 } | |
| 1654 | |
| 1655 // Attempt to retrieve an output frame from the decoder. If we have one, | |
| 1656 // return and proceed when the output frame is processed. If we don't have a | |
| 1657 // frame then we are done. | |
| 1658 DoDecode(); | |
| 1659 if (OutputSamplesPresent()) | |
| 1660 return; | |
| 1661 | |
| 1662 SetState(kFlushing); | |
| 1663 | |
| 1664 main_thread_task_runner_->PostTask( | |
| 1665 FROM_HERE, | |
| 1666 base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone, | |
| 1667 weak_this_factory_.GetWeakPtr())); | |
| 1668 | |
| 1669 SetState(kNormal); | |
| 1670 } | |
| 1671 | |
| 1672 void DXVAVideoDecodeAccelerator::DecodeInternal( | |
| 1673 const base::win::ScopedComPtr<IMFSample>& sample) { | |
| 1674 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 1675 | |
| 1676 if (GetState() == kUninitialized) | |
| 1677 return; | |
| 1678 | |
| 1679 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) { | |
| 1680 pending_input_buffers_.push_back(sample); | |
| 1681 return; | |
| 1682 } | |
| 1683 | |
| 1684 if (!inputs_before_decode_) { | |
| 1685 TRACE_EVENT_ASYNC_BEGIN0("gpu", "DXVAVideoDecodeAccelerator.Decoding", | |
| 1686 this); | |
| 1687 } | |
| 1688 inputs_before_decode_++; | |
| 1689 | |
| 1690 HRESULT hr = decoder_->ProcessInput(0, sample.get(), 0); | |
| 1691 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it | |
| 1692 // has enough data to produce one or more output samples. In this case the | |
| 1693 // recommended options are to | |
| 1694 // 1. Generate new output by calling IMFTransform::ProcessOutput until it | |
| 1695 // returns MF_E_TRANSFORM_NEED_MORE_INPUT. | |
| 1696 // 2. Flush the input data | |
| 1697 // We implement the first option, i.e to retrieve the output sample and then | |
| 1698 // process the input again. Failure in either of these steps is treated as a | |
| 1699 // decoder failure. | |
| 1700 if (hr == MF_E_NOTACCEPTING) { | |
| 1701 DoDecode(); | |
| 1702 // If the DoDecode call resulted in an output frame then we should not | |
| 1703 // process any more input until that frame is copied to the target surface. | |
| 1704 if (!OutputSamplesPresent()) { | |
| 1705 State state = GetState(); | |
| 1706 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal || | |
| 1707 state == kFlushing), | |
| 1708 "Failed to process output. Unexpected decoder state: " << state, | |
| 1709 PLATFORM_FAILURE,); | |
| 1710 hr = decoder_->ProcessInput(0, sample.get(), 0); | |
| 1711 } | |
| 1712 // If we continue to get the MF_E_NOTACCEPTING error we do the following:- | |
| 1713 // 1. Add the input sample to the pending queue. | |
| 1714 // 2. If we don't have any output samples we post the | |
| 1715 // DecodePendingInputBuffers task to process the pending input samples. | |
| 1716 // If we have an output sample then the above task is posted when the | |
| 1717 // output samples are sent to the client. | |
| 1718 // This is because we only support 1 pending output sample at any | |
| 1719 // given time due to the limitation with the Microsoft media foundation | |
| 1720 // decoder where it recycles the output Decoder surfaces. | |
| 1721 if (hr == MF_E_NOTACCEPTING) { | |
| 1722 pending_input_buffers_.push_back(sample); | |
| 1723 decoder_thread_task_runner_->PostTask( | |
| 1724 FROM_HERE, | |
| 1725 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | |
| 1726 base::Unretained(this))); | |
| 1727 return; | |
| 1728 } | |
| 1729 } | |
| 1730 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample", | |
| 1731 PLATFORM_FAILURE,); | |
| 1732 | |
| 1733 DoDecode(); | |
| 1734 | |
| 1735 State state = GetState(); | |
| 1736 RETURN_AND_NOTIFY_ON_FAILURE((state == kStopped || state == kNormal || | |
| 1737 state == kFlushing), | |
| 1738 "Failed to process output. Unexpected decoder state: " << state, | |
| 1739 ILLEGAL_STATE,); | |
| 1740 | |
| 1741 LONGLONG input_buffer_id = 0; | |
| 1742 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), | |
| 1743 "Failed to get input buffer id associated with sample",); | |
| 1744 // The Microsoft Media foundation decoder internally buffers up to 30 frames | |
| 1745 // before returning a decoded frame. We need to inform the client that this | |
| 1746 // input buffer is processed as it may stop sending us further input. | |
| 1747 // Note: This may break clients which expect every input buffer to be | |
| 1748 // associated with a decoded output buffer. | |
| 1749 // TODO(ananta) | |
| 1750 // Do some more investigation into whether it is possible to get the MFT | |
| 1751 // decoder to emit an output packet for every input packet. | |
| 1752 // http://code.google.com/p/chromium/issues/detail?id=108121 | |
| 1753 // http://code.google.com/p/chromium/issues/detail?id=150925 | |
| 1754 main_thread_task_runner_->PostTask( | |
| 1755 FROM_HERE, | |
| 1756 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, | |
| 1757 weak_this_factory_.GetWeakPtr(), | |
| 1758 input_buffer_id)); | |
| 1759 } | |
| 1760 | |
| 1761 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, | |
| 1762 int height) { | |
| 1763 dx11_video_format_converter_media_type_needs_init_ = true; | |
| 1764 | |
| 1765 main_thread_task_runner_->PostTask( | |
| 1766 FROM_HERE, | |
| 1767 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, | |
| 1768 weak_this_factory_.GetWeakPtr())); | |
| 1769 | |
| 1770 main_thread_task_runner_->PostTask( | |
| 1771 FROM_HERE, | |
| 1772 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | |
| 1773 weak_this_factory_.GetWeakPtr(), | |
| 1774 width, | |
| 1775 height)); | |
| 1776 } | |
| 1777 | |
| 1778 void DXVAVideoDecodeAccelerator::DismissStaleBuffers() { | |
| 1779 OutputBuffers::iterator index; | |
| 1780 | |
| 1781 for (index = output_picture_buffers_.begin(); | |
| 1782 index != output_picture_buffers_.end(); | |
| 1783 ++index) { | |
| 1784 if (index->second->available()) { | |
| 1785 DVLOG(1) << "Dismissing picture id: " << index->second->id(); | |
| 1786 client_->DismissPictureBuffer(index->second->id()); | |
| 1787 } else { | |
| 1788 // Move to |stale_output_picture_buffers_| for deferred deletion. | |
| 1789 stale_output_picture_buffers_.insert( | |
| 1790 std::make_pair(index->first, index->second)); | |
| 1791 } | |
| 1792 } | |
| 1793 | |
| 1794 output_picture_buffers_.clear(); | |
| 1795 } | |
| 1796 | |
| 1797 void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer( | |
| 1798 int32 picture_buffer_id) { | |
| 1799 OutputBuffers::iterator it = stale_output_picture_buffers_.find( | |
| 1800 picture_buffer_id); | |
| 1801 DCHECK(it != stale_output_picture_buffers_.end()); | |
| 1802 DVLOG(1) << "Dismissing picture id: " << it->second->id(); | |
| 1803 client_->DismissPictureBuffer(it->second->id()); | |
| 1804 stale_output_picture_buffers_.erase(it); | |
| 1805 } | |
| 1806 | |
| 1807 DXVAVideoDecodeAccelerator::State | |
| 1808 DXVAVideoDecodeAccelerator::GetState() { | |
| 1809 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); | |
| 1810 State state = static_cast<State>( | |
| 1811 InterlockedAdd(reinterpret_cast<volatile long*>(&state_), 0)); | |
| 1812 return state; | |
| 1813 } | |
| 1814 | |
| 1815 void DXVAVideoDecodeAccelerator::SetState(State new_state) { | |
| 1816 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | |
| 1817 main_thread_task_runner_->PostTask( | |
| 1818 FROM_HERE, | |
| 1819 base::Bind(&DXVAVideoDecodeAccelerator::SetState, | |
| 1820 weak_this_factory_.GetWeakPtr(), | |
| 1821 new_state)); | |
| 1822 return; | |
| 1823 } | |
| 1824 | |
| 1825 static_assert(sizeof(State) == sizeof(long), "mismatched type sizes"); | |
| 1826 ::InterlockedExchange(reinterpret_cast<volatile long*>(&state_), | |
| 1827 new_state); | |
| 1828 DCHECK_EQ(state_, new_state); | |
| 1829 } | |
| 1830 | |
| 1831 void DXVAVideoDecodeAccelerator::StartDecoderThread() { | |
| 1832 decoder_thread_.init_com_with_mta(false); | |
| 1833 decoder_thread_.Start(); | |
| 1834 decoder_thread_task_runner_ = decoder_thread_.task_runner(); | |
| 1835 } | |
| 1836 | |
| 1837 bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() { | |
| 1838 base::AutoLock lock(decoder_lock_); | |
| 1839 return !pending_output_samples_.empty(); | |
| 1840 } | |
| 1841 | |
| 1842 void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface, | |
| 1843 IDirect3DSurface9* dest_surface, | |
| 1844 int picture_buffer_id, | |
| 1845 int input_buffer_id) { | |
| 1846 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { | |
| 1847 decoder_thread_task_runner_->PostTask( | |
| 1848 FROM_HERE, | |
| 1849 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, | |
| 1850 base::Unretained(this), | |
| 1851 src_surface, | |
| 1852 dest_surface, | |
| 1853 picture_buffer_id, | |
| 1854 input_buffer_id)); | |
| 1855 return; | |
| 1856 } | |
| 1857 | |
| 1858 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, | |
| 1859 NULL, D3DTEXF_NONE); | |
| 1860 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",); | |
| 1861 | |
| 1862 // Ideally, this should be done immediately before the draw call that uses | |
| 1863 // the texture. Flush it once here though. | |
| 1864 hr = query_->Issue(D3DISSUE_END); | |
| 1865 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",); | |
| 1866 | |
| 1867 // If we are sharing the ANGLE device we don't need to wait for the Flush to | |
| 1868 // complete. | |
| 1869 if (using_angle_device_) { | |
| 1870 main_thread_task_runner_->PostTask( | |
| 1871 FROM_HERE, | |
| 1872 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | |
| 1873 weak_this_factory_.GetWeakPtr(), | |
| 1874 src_surface, | |
| 1875 dest_surface, | |
| 1876 picture_buffer_id, | |
| 1877 input_buffer_id)); | |
| 1878 return; | |
| 1879 } | |
| 1880 | |
| 1881 // Flush the decoder device to ensure that the decoded frame is copied to the | |
| 1882 // target surface. | |
| 1883 decoder_thread_task_runner_->PostDelayedTask( | |
| 1884 FROM_HERE, | |
| 1885 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | |
| 1886 base::Unretained(this), 0, src_surface, dest_surface, | |
| 1887 picture_buffer_id, input_buffer_id), | |
| 1888 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | |
| 1889 } | |
| 1890 | |
| 1891 void DXVAVideoDecodeAccelerator::CopySurfaceComplete( | |
| 1892 IDirect3DSurface9* src_surface, | |
| 1893 IDirect3DSurface9* dest_surface, | |
| 1894 int picture_buffer_id, | |
| 1895 int input_buffer_id) { | |
| 1896 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | |
| 1897 | |
| 1898 // The output buffers may have changed in the following scenarios:- | |
| 1899 // 1. A resolution change. | |
| 1900 // 2. Decoder instance was destroyed. | |
| 1901 // Ignore copy surface notifications for such buffers. | |
| 1902 // copy surface notifications for such buffers. | |
| 1903 OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id); | |
| 1904 if (it == output_picture_buffers_.end()) | |
| 1905 return; | |
| 1906 | |
| 1907 // If the picture buffer is marked as available it probably means that there | |
| 1908 // was a Reset operation which dropped the output frame. | |
| 1909 DXVAPictureBuffer* picture_buffer = it->second.get(); | |
| 1910 if (picture_buffer->available()) | |
| 1911 return; | |
| 1912 | |
| 1913 RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(), | |
| 1914 "Failed to make context current", PLATFORM_FAILURE,); | |
| 1915 | |
| 1916 DCHECK(!output_picture_buffers_.empty()); | |
| 1917 | |
| 1918 picture_buffer->CopySurfaceComplete(src_surface, | |
| 1919 dest_surface); | |
| 1920 | |
| 1921 NotifyPictureReady(picture_buffer->id(), input_buffer_id); | |
| 1922 | |
| 1923 { | |
| 1924 base::AutoLock lock(decoder_lock_); | |
| 1925 if (!pending_output_samples_.empty()) | |
| 1926 pending_output_samples_.pop_front(); | |
| 1927 } | |
| 1928 | |
| 1929 if (pending_flush_) { | |
| 1930 decoder_thread_task_runner_->PostTask( | |
| 1931 FROM_HERE, | |
| 1932 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | |
| 1933 base::Unretained(this))); | |
| 1934 return; | |
| 1935 } | |
| 1936 decoder_thread_task_runner_->PostTask( | |
| 1937 FROM_HERE, | |
| 1938 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | |
| 1939 base::Unretained(this))); | |
| 1940 } | |
| 1941 | |
| 1942 void DXVAVideoDecodeAccelerator::CopyTexture(ID3D11Texture2D* src_texture, | |
| 1943 ID3D11Texture2D* dest_texture, | |
| 1944 IMFSample* video_frame, | |
| 1945 int picture_buffer_id, | |
| 1946 int input_buffer_id) { | |
| 1947 HRESULT hr = E_FAIL; | |
| 1948 | |
| 1949 DCHECK(use_dx11_); | |
| 1950 | |
| 1951 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { | |
| 1952 // The media foundation H.264 decoder outputs YUV12 textures which we | |
| 1953 // cannot copy into ANGLE as they expect ARGB textures. In D3D land | |
| 1954 // the StretchRect API in the IDirect3DDevice9Ex interface did the color | |
| 1955 // space conversion for us. Sadly in DX11 land the API does not provide | |
| 1956 // a straightforward way to do this. | |
| 1957 // We use the video processor MFT. | |
| 1958 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx | |
| 1959 // This object implements a media foundation transform (IMFTransform) | |
| 1960 // which follows the same contract as the decoder. The color space | |
| 1961 // conversion as per msdn is done in the GPU. | |
| 1962 | |
| 1963 D3D11_TEXTURE2D_DESC source_desc; | |
| 1964 src_texture->GetDesc(&source_desc); | |
| 1965 | |
| 1966 // Set up the input and output types for the video processor MFT. | |
| 1967 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width, | |
| 1968 source_desc.Height)) { | |
| 1969 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 1970 false, "Failed to initialize media types for convesion.", | |
| 1971 PLATFORM_FAILURE,); | |
| 1972 } | |
| 1973 | |
| 1974 // The input to the video processor is the output sample. | |
| 1975 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; | |
| 1976 { | |
| 1977 base::AutoLock lock(decoder_lock_); | |
| 1978 PendingSampleInfo& sample_info = pending_output_samples_.front(); | |
| 1979 input_sample_for_conversion = sample_info.output_sample; | |
| 1980 } | |
| 1981 | |
| 1982 decoder_thread_task_runner_->PostTask( | |
| 1983 FROM_HERE, | |
| 1984 base::Bind(&DXVAVideoDecodeAccelerator::CopyTexture, | |
| 1985 base::Unretained(this), | |
| 1986 src_texture, | |
| 1987 dest_texture, | |
| 1988 input_sample_for_conversion.Detach(), | |
| 1989 picture_buffer_id, | |
| 1990 input_buffer_id)); | |
| 1991 return; | |
| 1992 } | |
| 1993 | |
| 1994 DCHECK(video_frame); | |
| 1995 | |
| 1996 base::win::ScopedComPtr<IMFSample> input_sample; | |
| 1997 input_sample.Attach(video_frame); | |
| 1998 | |
| 1999 DCHECK(video_format_converter_mft_.get()); | |
| 2000 | |
| 2001 // d3d11_device_context_->Begin(d3d11_query_.get()); | |
| 2002 | |
| 2003 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0); | |
| 2004 if (FAILED(hr)) { | |
| 2005 DCHECK(false); | |
| 2006 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2007 "Failed to convert output sample format.", PLATFORM_FAILURE,); | |
| 2008 } | |
| 2009 | |
| 2010 // The video processor MFT requires output samples to be allocated by the | |
| 2011 // caller. We create a sample with a buffer backed with the ID3D11Texture2D | |
| 2012 // interface exposed by ANGLE. This works nicely as this ensures that the | |
| 2013 // video processor coverts the color space of the output frame and copies | |
| 2014 // the result into the ANGLE texture. | |
| 2015 base::win::ScopedComPtr<IMFSample> output_sample; | |
| 2016 hr = MFCreateSample(output_sample.Receive()); | |
| 2017 if (FAILED(hr)) { | |
| 2018 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2019 "Failed to create output sample.", PLATFORM_FAILURE,); | |
| 2020 } | |
| 2021 | |
| 2022 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
| 2023 hr = MFCreateDXGISurfaceBuffer( | |
| 2024 __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE, | |
| 2025 output_buffer.Receive()); | |
| 2026 if (FAILED(hr)) { | |
| 2027 base::debug::Alias(&hr); | |
| 2028 // TODO(ananta) | |
| 2029 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2030 // stablizes. | |
| 2031 CHECK(false); | |
| 2032 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2033 "Failed to create output sample.", PLATFORM_FAILURE,); | |
| 2034 } | |
| 2035 | |
| 2036 output_sample->AddBuffer(output_buffer.get()); | |
| 2037 | |
| 2038 // Lock the device here as we are accessing the destination texture created | |
| 2039 // on the main thread. | |
| 2040 multi_threaded_->Enter(); | |
| 2041 | |
| 2042 DWORD status = 0; | |
| 2043 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; | |
| 2044 format_converter_output.pSample = output_sample.get(); | |
| 2045 hr = video_format_converter_mft_->ProcessOutput( | |
| 2046 0, // No flags | |
| 2047 1, // # of out streams to pull from | |
| 2048 &format_converter_output, | |
| 2049 &status); | |
| 2050 | |
| 2051 d3d11_device_context_->Flush(); | |
| 2052 d3d11_device_context_->End(d3d11_query_.get()); | |
| 2053 | |
| 2054 multi_threaded_->Leave(); | |
| 2055 | |
| 2056 if (FAILED(hr)) { | |
| 2057 base::debug::Alias(&hr); | |
| 2058 // TODO(ananta) | |
| 2059 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2060 // stablizes. | |
| 2061 CHECK(false); | |
| 2062 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2063 "Failed to convert output sample format.", PLATFORM_FAILURE,); | |
| 2064 } | |
| 2065 | |
| 2066 main_thread_task_runner_->PostTask( | |
| 2067 FROM_HERE, | |
| 2068 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | |
| 2069 weak_this_factory_.GetWeakPtr(), | |
| 2070 nullptr, | |
| 2071 nullptr, | |
| 2072 picture_buffer_id, | |
| 2073 input_buffer_id)); | |
| 2074 } | |
| 2075 | |
| 2076 void DXVAVideoDecodeAccelerator::FlushDecoder( | |
| 2077 int iterations, | |
| 2078 IDirect3DSurface9* src_surface, | |
| 2079 IDirect3DSurface9* dest_surface, | |
| 2080 int picture_buffer_id, | |
| 2081 int input_buffer_id) { | |
| 2082 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 2083 | |
| 2084 // The DXVA decoder has its own device which it uses for decoding. ANGLE | |
| 2085 // has its own device which we don't have access to. | |
| 2086 // The above code attempts to copy the decoded picture into a surface | |
| 2087 // which is owned by ANGLE. As there are multiple devices involved in | |
| 2088 // this, the StretchRect call above is not synchronous. | |
| 2089 // We attempt to flush the batched operations to ensure that the picture is | |
| 2090 // copied to the surface owned by ANGLE. | |
| 2091 // We need to do this in a loop and call flush multiple times. | |
| 2092 // We have seen the GetData call for flushing the command buffer fail to | |
| 2093 // return success occassionally on multi core machines, leading to an | |
| 2094 // infinite loop. | |
| 2095 // Workaround is to have an upper limit of 4 on the number of iterations to | |
| 2096 // wait for the Flush to finish. | |
| 2097 DCHECK(!use_dx11_); | |
| 2098 | |
| 2099 HRESULT hr = E_FAIL; | |
| 2100 | |
| 2101 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); | |
| 2102 | |
| 2103 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { | |
| 2104 decoder_thread_task_runner_->PostDelayedTask( | |
| 2105 FROM_HERE, | |
| 2106 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | |
| 2107 base::Unretained(this), iterations, src_surface, | |
| 2108 dest_surface, picture_buffer_id, input_buffer_id), | |
| 2109 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | |
| 2110 return; | |
| 2111 } | |
| 2112 | |
| 2113 main_thread_task_runner_->PostTask( | |
| 2114 FROM_HERE, | |
| 2115 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | |
| 2116 weak_this_factory_.GetWeakPtr(), | |
| 2117 src_surface, | |
| 2118 dest_surface, | |
| 2119 picture_buffer_id, | |
| 2120 input_buffer_id)); | |
| 2121 } | |
| 2122 | |
| 2123 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( | |
| 2124 int width, int height) { | |
| 2125 if (!dx11_video_format_converter_media_type_needs_init_) | |
| 2126 return true; | |
| 2127 | |
| 2128 CHECK(video_format_converter_mft_.get()); | |
| 2129 | |
| 2130 HRESULT hr = video_format_converter_mft_->ProcessMessage( | |
| 2131 MFT_MESSAGE_SET_D3D_MANAGER, | |
| 2132 reinterpret_cast<ULONG_PTR>( | |
| 2133 d3d11_device_manager_.get())); | |
| 2134 | |
| 2135 if (FAILED(hr)) { | |
| 2136 base::debug::Alias(&hr); | |
| 2137 // TODO(ananta) | |
| 2138 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2139 // stablizes. | |
| 2140 CHECK(false); | |
| 2141 } | |
| 2142 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2143 "Failed to initialize video format converter", PLATFORM_FAILURE, false); | |
| 2144 | |
| 2145 video_format_converter_mft_->ProcessMessage( | |
| 2146 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); | |
| 2147 | |
| 2148 base::win::ScopedComPtr<IMFMediaType> media_type; | |
| 2149 hr = MFCreateMediaType(media_type.Receive()); | |
| 2150 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", | |
| 2151 PLATFORM_FAILURE, false); | |
| 2152 | |
| 2153 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
| 2154 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", | |
| 2155 PLATFORM_FAILURE, false); | |
| 2156 | |
| 2157 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); | |
| 2158 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", | |
| 2159 PLATFORM_FAILURE, false); | |
| 2160 | |
| 2161 hr = media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); | |
| 2162 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2163 "Failed to set attributes on media type", PLATFORM_FAILURE, false); | |
| 2164 | |
| 2165 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 2166 MFVideoInterlace_Progressive); | |
| 2167 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2168 "Failed to set attributes on media type", PLATFORM_FAILURE, false); | |
| 2169 | |
| 2170 base::win::ScopedComPtr<IMFAttributes> converter_attributes; | |
| 2171 hr = video_format_converter_mft_->GetAttributes( | |
| 2172 converter_attributes.Receive()); | |
| 2173 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get converter attributes", | |
| 2174 PLATFORM_FAILURE, false); | |
| 2175 | |
| 2176 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); | |
| 2177 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes", | |
| 2178 PLATFORM_FAILURE, false); | |
| 2179 | |
| 2180 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); | |
| 2181 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes", | |
| 2182 PLATFORM_FAILURE, false); | |
| 2183 | |
| 2184 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); | |
| 2185 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", | |
| 2186 PLATFORM_FAILURE, false); | |
| 2187 | |
| 2188 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); | |
| 2189 if (FAILED(hr)) { | |
| 2190 base::debug::Alias(&hr); | |
| 2191 // TODO(ananta) | |
| 2192 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2193 // stablizes. | |
| 2194 CHECK(false); | |
| 2195 } | |
| 2196 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", | |
| 2197 PLATFORM_FAILURE, false); | |
| 2198 | |
| 2199 base::win::ScopedComPtr<IMFMediaType> out_media_type; | |
| 2200 | |
| 2201 for (uint32 i = 0; | |
| 2202 SUCCEEDED(video_format_converter_mft_->GetOutputAvailableType(0, i, | |
| 2203 out_media_type.Receive())); | |
| 2204 ++i) { | |
| 2205 GUID out_subtype = {0}; | |
| 2206 hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); | |
| 2207 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output major type", | |
| 2208 PLATFORM_FAILURE, false); | |
| 2209 | |
| 2210 if (out_subtype == MFVideoFormat_ARGB32) { | |
| 2211 hr = out_media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); | |
| 2212 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2213 "Failed to set attributes on media type", PLATFORM_FAILURE, false); | |
| 2214 | |
| 2215 hr = out_media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 2216 MFVideoInterlace_Progressive); | |
| 2217 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2218 "Failed to set attributes on media type", PLATFORM_FAILURE, false); | |
| 2219 | |
| 2220 hr = MFSetAttributeSize(out_media_type.get(), MF_MT_FRAME_SIZE, width, | |
| 2221 height); | |
| 2222 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2223 "Failed to set media type attributes", PLATFORM_FAILURE, false); | |
| 2224 | |
| 2225 hr = video_format_converter_mft_->SetOutputType( | |
| 2226 0, out_media_type.get(), 0); // No flags | |
| 2227 if (FAILED(hr)) { | |
| 2228 base::debug::Alias(&hr); | |
| 2229 // TODO(ananta) | |
| 2230 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2231 // stablizes. | |
| 2232 CHECK(false); | |
| 2233 } | |
| 2234 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2235 "Failed to set converter output type", PLATFORM_FAILURE, false); | |
| 2236 | |
| 2237 hr = video_format_converter_mft_->ProcessMessage( | |
| 2238 MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0); | |
| 2239 if (FAILED(hr)) { | |
| 2240 // TODO(ananta) | |
| 2241 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2242 // stablizes. | |
| 2243 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 2244 false, "Failed to initialize video converter.", PLATFORM_FAILURE, | |
| 2245 false); | |
| 2246 } | |
| 2247 dx11_video_format_converter_media_type_needs_init_ = false; | |
| 2248 return true; | |
| 2249 } | |
| 2250 out_media_type.Release(); | |
| 2251 } | |
| 2252 return false; | |
| 2253 } | |
| 2254 | |
| 2255 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions( | |
| 2256 IMFSample* sample, | |
| 2257 int* width, | |
| 2258 int* height) { | |
| 2259 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
| 2260 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | |
| 2261 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | |
| 2262 | |
| 2263 if (use_dx11_) { | |
| 2264 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | |
| 2265 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | |
| 2266 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | |
| 2267 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", | |
| 2268 false); | |
| 2269 hr = dxgi_buffer->GetResource( | |
| 2270 __uuidof(ID3D11Texture2D), | |
| 2271 reinterpret_cast<void**>(d3d11_texture.Receive())); | |
| 2272 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer", | |
| 2273 false); | |
| 2274 D3D11_TEXTURE2D_DESC d3d11_texture_desc; | |
| 2275 d3d11_texture->GetDesc(&d3d11_texture_desc); | |
| 2276 *width = d3d11_texture_desc.Width; | |
| 2277 *height = d3d11_texture_desc.Height; | |
| 2278 } else { | |
| 2279 base::win::ScopedComPtr<IDirect3DSurface9> surface; | |
| 2280 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | |
| 2281 IID_PPV_ARGS(surface.Receive())); | |
| 2282 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", | |
| 2283 false); | |
| 2284 D3DSURFACE_DESC surface_desc; | |
| 2285 hr = surface->GetDesc(&surface_desc); | |
| 2286 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | |
| 2287 *width = surface_desc.Width; | |
| 2288 *height = surface_desc.Height; | |
| 2289 } | |
| 2290 return true; | |
| 2291 } | |
| 2292 | |
| 2293 } // namespace content | |
| OLD | NEW |