OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h" | 5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h" |
6 | 6 |
7 #if !defined(OS_WIN) | 7 #if !defined(OS_WIN) |
8 #error This file should only be built on Windows. | 8 #error This file should only be built on Windows. |
9 #endif // !defined(OS_WIN) | 9 #endif // !defined(OS_WIN) |
10 | 10 |
11 #include <ks.h> | 11 #include <ks.h> |
12 #include <codecapi.h> | 12 #include <codecapi.h> |
| 13 #include <dxgi1_2.h> |
13 #include <mfapi.h> | 14 #include <mfapi.h> |
14 #include <mferror.h> | 15 #include <mferror.h> |
15 #include <wmcodecdsp.h> | 16 #include <wmcodecdsp.h> |
16 | 17 |
17 #include "base/base_paths_win.h" | 18 #include "base/base_paths_win.h" |
18 #include "base/bind.h" | 19 #include "base/bind.h" |
19 #include "base/callback.h" | 20 #include "base/callback.h" |
20 #include "base/command_line.h" | 21 #include "base/command_line.h" |
| 22 #include "base/debug/alias.h" |
21 #include "base/file_version_info.h" | 23 #include "base/file_version_info.h" |
22 #include "base/files/file_path.h" | 24 #include "base/files/file_path.h" |
23 #include "base/logging.h" | 25 #include "base/logging.h" |
24 #include "base/memory/scoped_ptr.h" | 26 #include "base/memory/scoped_ptr.h" |
25 #include "base/memory/shared_memory.h" | 27 #include "base/memory/shared_memory.h" |
26 #include "base/message_loop/message_loop.h" | 28 #include "base/message_loop/message_loop.h" |
27 #include "base/path_service.h" | 29 #include "base/path_service.h" |
28 #include "base/trace_event/trace_event.h" | 30 #include "base/trace_event/trace_event.h" |
29 #include "base/win/windows_version.h" | 31 #include "base/win/windows_version.h" |
30 #include "media/video/video_decode_accelerator.h" | 32 #include "media/video/video_decode_accelerator.h" |
31 #include "ui/gl/gl_bindings.h" | 33 #include "ui/gl/gl_bindings.h" |
| 34 #include "ui/gl/gl_context.h" |
32 #include "ui/gl/gl_surface_egl.h" | 35 #include "ui/gl/gl_surface_egl.h" |
33 #include "ui/gl/gl_switches.h" | 36 #include "ui/gl/gl_switches.h" |
34 | 37 |
35 namespace { | 38 namespace { |
36 | 39 |
37 // Path is appended on to the PROGRAM_FILES base path. | 40 // Path is appended on to the PROGRAM_FILES base path. |
38 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; | 41 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; |
39 | 42 |
40 const wchar_t kVP8DecoderDLLName[] = | 43 const wchar_t kVP8DecoderDLLName[] = |
41 #if defined(ARCH_CPU_X86) | 44 #if defined(ARCH_CPU_X86) |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
76 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | 79 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } |
77 }; | 80 }; |
78 | 81 |
79 const CLSID MEDIASUBTYPE_VP90 = { | 82 const CLSID MEDIASUBTYPE_VP90 = { |
80 0x30395056, | 83 0x30395056, |
81 0x0000, | 84 0x0000, |
82 0x0010, | 85 0x0010, |
83 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | 86 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } |
84 }; | 87 }; |
85 | 88 |
| 89 // The CLSID of the video processor media foundation transform which we use for |
| 90 // texture color conversion in DX11. |
| 91 DEFINE_GUID(CLSID_VideoProcessorMFT, |
| 92 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, |
| 93 0xc9, 0x82); |
| 94 |
| 95 // MF_XVP_PLAYBACK_MODE |
| 96 // Data type: UINT32 (treat as BOOL) |
| 97 // If this attribute is TRUE, the video processor will run in playback mode |
| 98 // where it allows callers to allocate output samples and allows last frame |
| 99 // regeneration (repaint). |
| 100 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12, |
| 101 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9); |
86 } | 102 } |
87 | 103 |
88 namespace content { | 104 namespace content { |
89 | 105 |
| 106 CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_ |
| 107 = NULL; |
| 108 |
90 #define RETURN_ON_FAILURE(result, log, ret) \ | 109 #define RETURN_ON_FAILURE(result, log, ret) \ |
91 do { \ | 110 do { \ |
92 if (!(result)) { \ | 111 if (!(result)) { \ |
93 DLOG(ERROR) << log; \ | 112 DLOG(ERROR) << log; \ |
94 return ret; \ | 113 return ret; \ |
95 } \ | 114 } \ |
96 } while (0) | 115 } while (0) |
97 | 116 |
98 #define RETURN_ON_HR_FAILURE(result, log, ret) \ | 117 #define RETURN_ON_HR_FAILURE(result, log, ret) \ |
99 RETURN_ON_FAILURE(SUCCEEDED(result), \ | 118 RETURN_ON_FAILURE(SUCCEEDED(result), \ |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
151 } else { | 170 } else { |
152 hr = MFCreateAlignedMemoryBuffer(buffer_length, | 171 hr = MFCreateAlignedMemoryBuffer(buffer_length, |
153 align - 1, | 172 align - 1, |
154 buffer.Receive()); | 173 buffer.Receive()); |
155 } | 174 } |
156 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); | 175 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); |
157 | 176 |
158 hr = sample->AddBuffer(buffer.get()); | 177 hr = sample->AddBuffer(buffer.get()); |
159 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); | 178 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); |
160 | 179 |
| 180 buffer->SetCurrentLength(0); |
161 return sample.Detach(); | 181 return sample.Detach(); |
162 } | 182 } |
163 | 183 |
164 // Creates a Media Foundation sample with one buffer containing a copy of the | 184 // Creates a Media Foundation sample with one buffer containing a copy of the |
165 // given Annex B stream data. | 185 // given Annex B stream data. |
166 // If duration and sample time are not known, provide 0. | 186 // If duration and sample time are not known, provide 0. |
167 // |min_size| specifies the minimum size of the buffer (might be required by | 187 // |min_size| specifies the minimum size of the buffer (might be required by |
168 // the decoder for input). If no alignment is required, provide 0. | 188 // the decoder for input). If no alignment is required, provide 0. |
169 static IMFSample* CreateInputSample(const uint8* stream, int size, | 189 static IMFSample* CreateInputSample(const uint8* stream, int size, |
170 int min_size, int alignment) { | 190 int min_size, int alignment) { |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
222 EGLConfig egl_config); | 242 EGLConfig egl_config); |
223 ~DXVAPictureBuffer(); | 243 ~DXVAPictureBuffer(); |
224 | 244 |
225 void ReusePictureBuffer(); | 245 void ReusePictureBuffer(); |
226 // Copies the output sample data to the picture buffer provided by the | 246 // Copies the output sample data to the picture buffer provided by the |
227 // client. | 247 // client. |
228 // The dest_surface parameter contains the decoded bits. | 248 // The dest_surface parameter contains the decoded bits. |
229 bool CopyOutputSampleDataToPictureBuffer( | 249 bool CopyOutputSampleDataToPictureBuffer( |
230 DXVAVideoDecodeAccelerator* decoder, | 250 DXVAVideoDecodeAccelerator* decoder, |
231 IDirect3DSurface9* dest_surface, | 251 IDirect3DSurface9* dest_surface, |
| 252 ID3D11Texture2D* dx11_texture, |
232 int input_buffer_id); | 253 int input_buffer_id); |
233 | 254 |
234 bool available() const { | 255 bool available() const { |
235 return available_; | 256 return available_; |
236 } | 257 } |
237 | 258 |
238 void set_available(bool available) { | 259 void set_available(bool available) { |
239 available_ = available; | 260 available_ = available; |
240 } | 261 } |
241 | 262 |
(...skipping 10 matching lines...) Expand all Loading... |
252 void CopySurfaceComplete(IDirect3DSurface9* src_surface, | 273 void CopySurfaceComplete(IDirect3DSurface9* src_surface, |
253 IDirect3DSurface9* dest_surface); | 274 IDirect3DSurface9* dest_surface); |
254 | 275 |
255 private: | 276 private: |
256 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer); | 277 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer); |
257 | 278 |
258 bool available_; | 279 bool available_; |
259 media::PictureBuffer picture_buffer_; | 280 media::PictureBuffer picture_buffer_; |
260 EGLSurface decoding_surface_; | 281 EGLSurface decoding_surface_; |
261 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_; | 282 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_; |
| 283 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture_; |
262 | 284 |
263 // The following |IDirect3DSurface9| interface pointers are used to hold | 285 // The following |IDirect3DSurface9| interface pointers are used to hold |
264 // references on the surfaces during the course of a StretchRect operation | 286 // references on the surfaces during the course of a StretchRect operation |
265 // to copy the source surface to the target. The references are released | 287 // to copy the source surface to the target. The references are released |
266 // when the StretchRect operation i.e. the copy completes. | 288 // when the StretchRect operation i.e. the copy completes. |
267 base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_; | 289 base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_; |
268 base::win::ScopedComPtr<IDirect3DSurface9> target_surface_; | 290 base::win::ScopedComPtr<IDirect3DSurface9> target_surface_; |
269 | 291 |
| 292 // This ID3D11Texture2D interface pointer is used to hold a reference to the |
| 293 // decoder texture during the course of a copy operation. This reference is |
| 294 // released when the copy completes. |
| 295 base::win::ScopedComPtr<ID3D11Texture2D> decoder_dx11_texture_; |
| 296 |
270 // Set to true if RGB is supported by the texture. | 297 // Set to true if RGB is supported by the texture. |
271 // Defaults to true. | 298 // Defaults to true. |
272 bool use_rgb_; | 299 bool use_rgb_; |
273 | 300 |
274 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer); | 301 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer); |
275 }; | 302 }; |
276 | 303 |
277 // static | 304 // static |
278 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer> | 305 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer> |
279 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create( | 306 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create( |
(...skipping 28 matching lines...) Expand all Loading... |
308 EGLBoolean ret = eglQuerySurfacePointerANGLE( | 335 EGLBoolean ret = eglQuerySurfacePointerANGLE( |
309 egl_display, | 336 egl_display, |
310 picture_buffer->decoding_surface_, | 337 picture_buffer->decoding_surface_, |
311 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, | 338 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, |
312 &share_handle); | 339 &share_handle); |
313 | 340 |
314 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE, | 341 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE, |
315 "Failed to query ANGLE surface pointer", | 342 "Failed to query ANGLE surface pointer", |
316 linked_ptr<DXVAPictureBuffer>(NULL)); | 343 linked_ptr<DXVAPictureBuffer>(NULL)); |
317 | 344 |
318 // TODO(dshwang): after moving to D3D11, use RGBA surface. crbug.com/438691 | 345 HRESULT hr = E_FAIL; |
319 HRESULT hr = decoder.device_->CreateTexture( | 346 if (decoder.d3d11_device_) { |
320 buffer.size().width(), | 347 base::win::ScopedComPtr<ID3D11Resource> resource; |
321 buffer.size().height(), | 348 hr = decoder.d3d11_device_->OpenSharedResource( |
322 1, | 349 share_handle, |
323 D3DUSAGE_RENDERTARGET, | 350 __uuidof(ID3D11Resource), |
324 use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8, | 351 reinterpret_cast<void**>(resource.Receive())); |
325 D3DPOOL_DEFAULT, | 352 RETURN_ON_HR_FAILURE(hr, "Failed to open shared resource", |
326 picture_buffer->decoding_texture_.Receive(), | 353 linked_ptr<DXVAPictureBuffer>(NULL)); |
327 &share_handle); | 354 hr = picture_buffer->dx11_decoding_texture_.QueryFrom(resource.get()); |
328 | 355 } else { |
| 356 hr = decoder.d3d9_device_ex_->CreateTexture( |
| 357 buffer.size().width(), |
| 358 buffer.size().height(), |
| 359 1, |
| 360 D3DUSAGE_RENDERTARGET, |
| 361 use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8, |
| 362 D3DPOOL_DEFAULT, |
| 363 picture_buffer->decoding_texture_.Receive(), |
| 364 &share_handle); |
| 365 } |
329 RETURN_ON_HR_FAILURE(hr, "Failed to create texture", | 366 RETURN_ON_HR_FAILURE(hr, "Failed to create texture", |
330 linked_ptr<DXVAPictureBuffer>(NULL)); | 367 linked_ptr<DXVAPictureBuffer>(NULL)); |
331 picture_buffer->use_rgb_ = !!use_rgb; | 368 picture_buffer->use_rgb_ = !!use_rgb; |
332 return picture_buffer; | 369 return picture_buffer; |
333 } | 370 } |
334 | 371 |
335 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer( | 372 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer( |
336 const media::PictureBuffer& buffer) | 373 const media::PictureBuffer& buffer) |
337 : available_(true), | 374 : available_(true), |
338 picture_buffer_(buffer), | 375 picture_buffer_(buffer), |
(...skipping 19 matching lines...) Expand all Loading... |
358 | 395 |
359 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { | 396 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { |
360 DCHECK(decoding_surface_); | 397 DCHECK(decoding_surface_); |
361 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 398 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
362 eglReleaseTexImage( | 399 eglReleaseTexImage( |
363 egl_display, | 400 egl_display, |
364 decoding_surface_, | 401 decoding_surface_, |
365 EGL_BACK_BUFFER); | 402 EGL_BACK_BUFFER); |
366 decoder_surface_.Release(); | 403 decoder_surface_.Release(); |
367 target_surface_.Release(); | 404 target_surface_.Release(); |
| 405 decoder_dx11_texture_.Release(); |
368 set_available(true); | 406 set_available(true); |
369 } | 407 } |
370 | 408 |
371 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: | 409 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: |
372 CopyOutputSampleDataToPictureBuffer( | 410 CopyOutputSampleDataToPictureBuffer( |
373 DXVAVideoDecodeAccelerator* decoder, | 411 DXVAVideoDecodeAccelerator* decoder, |
374 IDirect3DSurface9* dest_surface, | 412 IDirect3DSurface9* dest_surface, |
| 413 ID3D11Texture2D* dx11_texture, |
375 int input_buffer_id) { | 414 int input_buffer_id) { |
376 DCHECK(dest_surface); | 415 DCHECK(dest_surface || dx11_texture); |
377 | 416 if (dx11_texture) { |
| 417 // Grab a reference on the decoder texture. This reference will be released |
| 418 // when we receive a notification that the copy was completed or when the |
| 419 // DXVAPictureBuffer instance is destroyed. |
| 420 decoder_dx11_texture_ = dx11_texture; |
| 421 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), NULL, |
| 422 id(), input_buffer_id); |
| 423 return true; |
| 424 } |
378 D3DSURFACE_DESC surface_desc; | 425 D3DSURFACE_DESC surface_desc; |
379 HRESULT hr = dest_surface->GetDesc(&surface_desc); | 426 HRESULT hr = dest_surface->GetDesc(&surface_desc); |
380 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | 427 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); |
381 | 428 |
382 D3DSURFACE_DESC texture_desc; | 429 D3DSURFACE_DESC texture_desc; |
383 decoding_texture_->GetLevelDesc(0, &texture_desc); | 430 decoding_texture_->GetLevelDesc(0, &texture_desc); |
384 | 431 |
385 if (texture_desc.Width != surface_desc.Width || | 432 if (texture_desc.Width != surface_desc.Width || |
386 texture_desc.Height != surface_desc.Height) { | 433 texture_desc.Height != surface_desc.Height) { |
387 NOTREACHED() << "Decode surface of different dimension than texture"; | 434 NOTREACHED() << "Decode surface of different dimension than texture"; |
(...skipping 29 matching lines...) Expand all Loading... |
417 IDirect3DSurface9* dest_surface) { | 464 IDirect3DSurface9* dest_surface) { |
418 DCHECK(!available()); | 465 DCHECK(!available()); |
419 | 466 |
420 GLint current_texture = 0; | 467 GLint current_texture = 0; |
421 glGetIntegerv(GL_TEXTURE_BINDING_2D, ¤t_texture); | 468 glGetIntegerv(GL_TEXTURE_BINDING_2D, ¤t_texture); |
422 | 469 |
423 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id()); | 470 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id()); |
424 | 471 |
425 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 472 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
426 | 473 |
427 DCHECK_EQ(src_surface, decoder_surface_.get()); | 474 if (src_surface && dest_surface) { |
428 DCHECK_EQ(dest_surface, target_surface_.get()); | 475 DCHECK_EQ(src_surface, decoder_surface_.get()); |
429 | 476 DCHECK_EQ(dest_surface, target_surface_.get()); |
430 decoder_surface_.Release(); | 477 decoder_surface_.Release(); |
431 target_surface_.Release(); | 478 target_surface_.Release(); |
| 479 } else { |
| 480 DCHECK(decoder_dx11_texture_.get()); |
| 481 decoder_dx11_texture_.Release(); |
| 482 } |
432 | 483 |
433 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 484 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
434 eglBindTexImage( | 485 eglBindTexImage( |
435 egl_display, | 486 egl_display, |
436 decoding_surface_, | 487 decoding_surface_, |
437 EGL_BACK_BUFFER); | 488 EGL_BACK_BUFFER); |
438 | 489 |
439 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 490 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
440 glBindTexture(GL_TEXTURE_2D, current_texture); | 491 glBindTexture(GL_TEXTURE_2D, current_texture); |
441 } | 492 } |
442 | 493 |
443 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( | 494 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( |
444 int32 buffer_id, IMFSample* sample) | 495 int32 buffer_id, IMFSample* sample) |
445 : input_buffer_id(buffer_id), | 496 : input_buffer_id(buffer_id), |
446 picture_buffer_id(-1) { | 497 picture_buffer_id(-1) { |
447 output_sample.Attach(sample); | 498 output_sample.Attach(sample); |
448 } | 499 } |
449 | 500 |
450 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {} | 501 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {} |
451 | 502 |
452 // static | |
453 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { | |
454 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); | |
455 | |
456 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); | |
457 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); | |
458 | |
459 D3DPRESENT_PARAMETERS present_params = {0}; | |
460 present_params.BackBufferWidth = 1; | |
461 present_params.BackBufferHeight = 1; | |
462 present_params.BackBufferFormat = D3DFMT_UNKNOWN; | |
463 present_params.BackBufferCount = 1; | |
464 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; | |
465 present_params.hDeviceWindow = ::GetShellWindow(); | |
466 present_params.Windowed = TRUE; | |
467 present_params.Flags = D3DPRESENTFLAG_VIDEO; | |
468 present_params.FullScreen_RefreshRateInHz = 0; | |
469 present_params.PresentationInterval = 0; | |
470 | |
471 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT, | |
472 D3DDEVTYPE_HAL, | |
473 ::GetShellWindow(), | |
474 D3DCREATE_FPU_PRESERVE | | |
475 D3DCREATE_SOFTWARE_VERTEXPROCESSING | | |
476 D3DCREATE_DISABLE_PSGP_THREADING | | |
477 D3DCREATE_MULTITHREADED, | |
478 &present_params, | |
479 NULL, | |
480 device_.Receive()); | |
481 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); | |
482 | |
483 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, | |
484 device_manager_.Receive()); | |
485 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); | |
486 | |
487 hr = device_manager_->ResetDevice(device_.get(), dev_manager_reset_token_); | |
488 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | |
489 | |
490 hr = device_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); | |
491 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); | |
492 // Ensure query_ API works (to avoid an infinite loop later in | |
493 // CopyOutputSampleDataToPictureBuffer). | |
494 hr = query_->Issue(D3DISSUE_END); | |
495 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); | |
496 return true; | |
497 } | |
498 | |
499 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( | 503 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( |
500 const base::Callback<bool(void)>& make_context_current) | 504 const base::Callback<bool(void)>& make_context_current, |
| 505 gfx::GLContext* gl_context) |
501 : client_(NULL), | 506 : client_(NULL), |
502 dev_manager_reset_token_(0), | 507 dev_manager_reset_token_(0), |
| 508 dx11_dev_manager_reset_token_(0), |
503 egl_config_(NULL), | 509 egl_config_(NULL), |
504 state_(kUninitialized), | 510 state_(kUninitialized), |
505 pictures_requested_(false), | 511 pictures_requested_(false), |
506 inputs_before_decode_(0), | 512 inputs_before_decode_(0), |
507 sent_drain_message_(false), | 513 sent_drain_message_(false), |
508 make_context_current_(make_context_current), | 514 make_context_current_(make_context_current), |
509 codec_(media::kUnknownVideoCodec), | 515 codec_(media::kUnknownVideoCodec), |
510 decoder_thread_("DXVAVideoDecoderThread"), | 516 decoder_thread_("DXVAVideoDecoderThread"), |
511 weak_this_factory_(this), | 517 weak_this_factory_(this), |
512 weak_ptr_(weak_this_factory_.GetWeakPtr()), | 518 weak_ptr_(weak_this_factory_.GetWeakPtr()), |
513 pending_flush_(false) { | 519 pending_flush_(false), |
| 520 use_dx11_(false), |
| 521 dx11_video_format_converter_media_type_needs_init_(true), |
| 522 gl_context_(gl_context) { |
514 memset(&input_stream_info_, 0, sizeof(input_stream_info_)); | 523 memset(&input_stream_info_, 0, sizeof(input_stream_info_)); |
515 memset(&output_stream_info_, 0, sizeof(output_stream_info_)); | 524 memset(&output_stream_info_, 0, sizeof(output_stream_info_)); |
516 } | 525 } |
517 | 526 |
518 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() { | 527 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() { |
519 client_ = NULL; | 528 client_ = NULL; |
520 } | 529 } |
521 | 530 |
522 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, | 531 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, |
523 Client* client) { | 532 Client* client) { |
524 client_ = client; | 533 client_ = client; |
525 | 534 |
526 main_thread_task_runner_ = base::MessageLoop::current()->task_runner(); | 535 main_thread_task_runner_ = base::MessageLoop::current()->task_runner(); |
527 | 536 |
528 // Not all versions of Windows 7 and later include Media Foundation DLLs. | |
529 // Instead of crashing while delay loading the DLL when calling MFStartup() | |
530 // below, probe whether we can successfully load the DLL now. | |
531 // | |
532 // See http://crbug.com/339678 for details. | |
533 HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll"); | |
534 RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", false); | |
535 | |
536 if (profile != media::H264PROFILE_BASELINE && | 537 if (profile != media::H264PROFILE_BASELINE && |
537 profile != media::H264PROFILE_MAIN && | 538 profile != media::H264PROFILE_MAIN && |
538 profile != media::H264PROFILE_HIGH && | 539 profile != media::H264PROFILE_HIGH && |
539 profile != media::VP8PROFILE_ANY && | 540 profile != media::VP8PROFILE_ANY && |
540 profile != media::VP9PROFILE_ANY) { | 541 profile != media::VP9PROFILE_ANY) { |
541 RETURN_AND_NOTIFY_ON_FAILURE(false, | 542 RETURN_AND_NOTIFY_ON_FAILURE(false, |
542 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false); | 543 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false); |
543 } | 544 } |
544 | 545 |
| 546 // Not all versions of Windows 7 and later include Media Foundation DLLs. |
| 547 // Instead of crashing while delay loading the DLL when calling MFStartup() |
| 548 // below, probe whether we can successfully load the DLL now. |
| 549 // See http://crbug.com/339678 for details. |
| 550 HMODULE dxgi_manager_dll = NULL; |
| 551 if ((dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll")) == NULL) { |
| 552 HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll"); |
| 553 RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", |
| 554 false); |
| 555 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API. |
| 556 // On Windows 7 mshtmlmedia.dll provides it. |
| 557 dxgi_manager_dll = mfplat_dll; |
| 558 } |
| 559 |
| 560 // TODO(ananta) |
| 561 // The code below works, as in we can create the DX11 device manager for |
| 562 // Windows 7. However the IMFTransform we use for texture conversion and |
| 563 // copy does not exist on Windows 7. Look into an alternate approach |
| 564 // and enable the code below. |
| 565 #if defined ENABLE_DX11_FOR_WIN7 |
| 566 if ((base::win::GetVersion() == base::win::VERSION_WIN7) && |
| 567 ((dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll")) == NULL)) { |
| 568 HMODULE mshtml_media_dll = ::LoadLibrary(L"mshtmlmedia.dll"); |
| 569 if (mshtml_media_dll) |
| 570 dxgi_manager_dll = mshtml_media_dll; |
| 571 } |
| 572 #endif |
| 573 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 |
| 574 // decoding. |
| 575 if (dxgi_manager_dll && !create_dxgi_device_manager_) { |
| 576 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( |
| 577 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); |
| 578 } |
| 579 |
545 RETURN_AND_NOTIFY_ON_FAILURE( | 580 RETURN_AND_NOTIFY_ON_FAILURE( |
546 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, | 581 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, |
547 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", | 582 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", |
548 PLATFORM_FAILURE, | 583 PLATFORM_FAILURE, |
549 false); | 584 false); |
550 | 585 |
551 State state = GetState(); | 586 State state = GetState(); |
552 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), | 587 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), |
553 "Initialize: invalid state: " << state, ILLEGAL_STATE, false); | 588 "Initialize: invalid state: " << state, ILLEGAL_STATE, false); |
554 | 589 |
555 HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); | 590 HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); |
556 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE, | 591 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE, |
557 false); | 592 false); |
558 | 593 |
559 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), | |
560 "Failed to initialize D3D device and manager", | |
561 PLATFORM_FAILURE, | |
562 false); | |
563 | |
564 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile), | 594 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile), |
565 "Failed to initialize decoder", PLATFORM_FAILURE, false); | 595 "Failed to initialize decoder", PLATFORM_FAILURE, false); |
566 | 596 |
567 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), | 597 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), |
568 "Failed to get input/output stream info.", PLATFORM_FAILURE, false); | 598 "Failed to get input/output stream info.", PLATFORM_FAILURE, false); |
569 | 599 |
570 RETURN_AND_NOTIFY_ON_FAILURE( | 600 RETURN_AND_NOTIFY_ON_FAILURE( |
571 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), | 601 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), |
572 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", | 602 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", |
573 PLATFORM_FAILURE, false); | 603 PLATFORM_FAILURE, false); |
574 | 604 |
575 RETURN_AND_NOTIFY_ON_FAILURE( | 605 RETURN_AND_NOTIFY_ON_FAILURE( |
576 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), | 606 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), |
577 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", | 607 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", |
578 PLATFORM_FAILURE, false); | 608 PLATFORM_FAILURE, false); |
579 | 609 |
580 SetState(kNormal); | 610 SetState(kNormal); |
581 | 611 |
582 StartDecoderThread(); | 612 StartDecoderThread(); |
583 return true; | 613 return true; |
584 } | 614 } |
585 | 615 |
| 616 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { |
| 617 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); |
| 618 |
| 619 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); |
| 620 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); |
| 621 |
| 622 D3DPRESENT_PARAMETERS present_params = {0}; |
| 623 present_params.BackBufferWidth = 1; |
| 624 present_params.BackBufferHeight = 1; |
| 625 present_params.BackBufferFormat = D3DFMT_UNKNOWN; |
| 626 present_params.BackBufferCount = 1; |
| 627 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; |
| 628 present_params.hDeviceWindow = ::GetShellWindow(); |
| 629 present_params.Windowed = TRUE; |
| 630 present_params.Flags = D3DPRESENTFLAG_VIDEO; |
| 631 present_params.FullScreen_RefreshRateInHz = 0; |
| 632 present_params.PresentationInterval = 0; |
| 633 |
| 634 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT, |
| 635 D3DDEVTYPE_HAL, |
| 636 ::GetShellWindow(), |
| 637 D3DCREATE_FPU_PRESERVE | |
| 638 D3DCREATE_SOFTWARE_VERTEXPROCESSING | |
| 639 D3DCREATE_DISABLE_PSGP_THREADING | |
| 640 D3DCREATE_MULTITHREADED, |
| 641 &present_params, |
| 642 NULL, |
| 643 d3d9_device_ex_.Receive()); |
| 644 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); |
| 645 |
| 646 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, |
| 647 device_manager_.Receive()); |
| 648 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); |
| 649 |
| 650 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(), |
| 651 dev_manager_reset_token_); |
| 652 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
| 653 |
| 654 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); |
| 655 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); |
| 656 // Ensure query_ API works (to avoid an infinite loop later in |
| 657 // CopyOutputSampleDataToPictureBuffer). |
| 658 hr = query_->Issue(D3DISSUE_END); |
| 659 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); |
| 660 return true; |
| 661 } |
| 662 |
| 663 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { |
| 664 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, |
| 665 d3d11_device_manager_.Receive()); |
| 666 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); |
| 667 |
| 668 // This array defines the set of DirectX hardware feature levels we support. |
| 669 // The ordering MUST be preserved. All applications are assumed to support |
| 670 // 9.1 unless otherwise stated by the application, which is not our case. |
| 671 D3D_FEATURE_LEVEL feature_levels[] = { |
| 672 D3D_FEATURE_LEVEL_11_1, |
| 673 D3D_FEATURE_LEVEL_11_0, |
| 674 D3D_FEATURE_LEVEL_10_1, |
| 675 D3D_FEATURE_LEVEL_10_0, |
| 676 D3D_FEATURE_LEVEL_9_3, |
| 677 D3D_FEATURE_LEVEL_9_2, |
| 678 D3D_FEATURE_LEVEL_9_1 }; |
| 679 |
| 680 UINT flags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT; |
| 681 |
| 682 #if defined _DEBUG |
| 683 flags |= D3D11_CREATE_DEVICE_DEBUG; |
| 684 #endif |
| 685 |
| 686 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0; |
| 687 hr = D3D11CreateDevice(NULL, |
| 688 D3D_DRIVER_TYPE_HARDWARE, |
| 689 NULL, |
| 690 flags, |
| 691 feature_levels, |
| 692 arraysize(feature_levels), |
| 693 D3D11_SDK_VERSION, |
| 694 d3d11_device_.Receive(), |
| 695 &feature_level_out, |
| 696 d3d11_device_context_.Receive()); |
| 697 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false); |
| 698 |
| 699 // Enable multithreaded mode on the context. This ensures that accesses to |
| 700 // context are synchronized across threads. We have multiple threads |
| 701 // accessing the context, the media foundation decoder threads and the |
| 702 // decoder thread via the video format conversion transform. |
| 703 base::win::ScopedComPtr<ID3D10Multithread> multi_threaded; |
| 704 hr = multi_threaded.QueryFrom(d3d11_device_context_.get()); |
| 705 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); |
| 706 multi_threaded->SetMultithreadProtected(TRUE); |
| 707 |
| 708 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), |
| 709 dx11_dev_manager_reset_token_); |
| 710 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
| 711 |
| 712 D3D11_QUERY_DESC query_desc; |
| 713 query_desc.Query = D3D11_QUERY_EVENT; |
| 714 query_desc.MiscFlags = 0; |
| 715 hr = d3d11_device_->CreateQuery( |
| 716 &query_desc, |
| 717 d3d11_query_.Receive()); |
| 718 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); |
| 719 |
| 720 hr = ::CoCreateInstance( |
| 721 CLSID_VideoProcessorMFT, |
| 722 NULL, |
| 723 CLSCTX_INPROC_SERVER, |
| 724 IID_IMFTransform, |
| 725 reinterpret_cast<void**>(video_format_converter_mft_.Receive())); |
| 726 |
| 727 if (FAILED(hr)) { |
| 728 base::debug::Alias(&hr); |
| 729 // TODO(ananta) |
| 730 // Remove this CHECK when the change to use DX11 for H/W decoding |
| 731 // stablizes. |
| 732 CHECK(false); |
| 733 } |
| 734 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); |
| 735 return true; |
| 736 } |
| 737 |
586 void DXVAVideoDecodeAccelerator::Decode( | 738 void DXVAVideoDecodeAccelerator::Decode( |
587 const media::BitstreamBuffer& bitstream_buffer) { | 739 const media::BitstreamBuffer& bitstream_buffer) { |
588 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 740 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
589 | 741 |
590 State state = GetState(); | 742 State state = GetState(); |
591 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped || | 743 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped || |
592 state == kFlushing), | 744 state == kFlushing), |
593 "Invalid state: " << state, ILLEGAL_STATE,); | 745 "Invalid state: " << state, ILLEGAL_STATE,); |
594 | 746 |
595 base::win::ScopedComPtr<IMFSample> sample; | 747 base::win::ScopedComPtr<IMFSample> sample; |
(...skipping 29 matching lines...) Expand all Loading... |
625 ++buffer_index) { | 777 ++buffer_index) { |
626 linked_ptr<DXVAPictureBuffer> picture_buffer = | 778 linked_ptr<DXVAPictureBuffer> picture_buffer = |
627 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); | 779 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); |
628 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), | 780 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), |
629 "Failed to allocate picture buffer", PLATFORM_FAILURE,); | 781 "Failed to allocate picture buffer", PLATFORM_FAILURE,); |
630 | 782 |
631 bool inserted = output_picture_buffers_.insert(std::make_pair( | 783 bool inserted = output_picture_buffers_.insert(std::make_pair( |
632 buffers[buffer_index].id(), picture_buffer)).second; | 784 buffers[buffer_index].id(), picture_buffer)).second; |
633 DCHECK(inserted); | 785 DCHECK(inserted); |
634 } | 786 } |
| 787 |
635 ProcessPendingSamples(); | 788 ProcessPendingSamples(); |
636 if (pending_flush_) { | 789 if (pending_flush_) { |
637 decoder_thread_task_runner_->PostTask( | 790 decoder_thread_task_runner_->PostTask( |
638 FROM_HERE, | 791 FROM_HERE, |
639 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 792 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
640 base::Unretained(this))); | 793 base::Unretained(this))); |
641 } | 794 } |
642 } | 795 } |
643 | 796 |
644 void DXVAVideoDecodeAccelerator::ReusePictureBuffer( | 797 void DXVAVideoDecodeAccelerator::ReusePictureBuffer( |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
833 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false); | 986 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false); |
834 | 987 |
835 hr = factory->CreateInstance(NULL, | 988 hr = factory->CreateInstance(NULL, |
836 __uuidof(IMFTransform), | 989 __uuidof(IMFTransform), |
837 reinterpret_cast<void**>(decoder_.Receive())); | 990 reinterpret_cast<void**>(decoder_.Receive())); |
838 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); | 991 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); |
839 | 992 |
840 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), | 993 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), |
841 "Failed to check decoder DXVA support", false); | 994 "Failed to check decoder DXVA support", false); |
842 | 995 |
| 996 ULONG_PTR device_manager_to_use = NULL; |
| 997 if (use_dx11_) { |
| 998 CHECK(create_dxgi_device_manager_); |
| 999 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), |
| 1000 "Failed to initialize DX11 device and manager", |
| 1001 PLATFORM_FAILURE, |
| 1002 false); |
| 1003 device_manager_to_use = reinterpret_cast<ULONG_PTR>( |
| 1004 d3d11_device_manager_.get()); |
| 1005 } else { |
| 1006 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), |
| 1007 "Failed to initialize D3D device and manager", |
| 1008 PLATFORM_FAILURE, |
| 1009 false); |
| 1010 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); |
| 1011 } |
| 1012 |
843 hr = decoder_->ProcessMessage( | 1013 hr = decoder_->ProcessMessage( |
844 MFT_MESSAGE_SET_D3D_MANAGER, | 1014 MFT_MESSAGE_SET_D3D_MANAGER, |
845 reinterpret_cast<ULONG_PTR>(device_manager_.get())); | 1015 device_manager_to_use); |
846 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); | 1016 if (use_dx11_) { |
| 1017 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); |
| 1018 } else { |
| 1019 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); |
| 1020 } |
847 | 1021 |
848 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 1022 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
849 | 1023 |
850 EGLint config_attribs[] = { | 1024 EGLint config_attribs[] = { |
851 EGL_BUFFER_SIZE, 32, | 1025 EGL_BUFFER_SIZE, 32, |
852 EGL_RED_SIZE, 8, | 1026 EGL_RED_SIZE, 8, |
853 EGL_GREEN_SIZE, 8, | 1027 EGL_GREEN_SIZE, 8, |
854 EGL_BLUE_SIZE, 8, | 1028 EGL_BLUE_SIZE, 8, |
855 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, | 1029 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, |
856 EGL_ALPHA_SIZE, 0, | 1030 EGL_ALPHA_SIZE, 0, |
(...skipping 26 matching lines...) Expand all Loading... |
883 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE); | 1057 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE); |
884 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false); | 1058 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false); |
885 } | 1059 } |
886 | 1060 |
887 hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE); | 1061 hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE); |
888 if (SUCCEEDED(hr)) { | 1062 if (SUCCEEDED(hr)) { |
889 DVLOG(1) << "Successfully set Low latency mode on decoder."; | 1063 DVLOG(1) << "Successfully set Low latency mode on decoder."; |
890 } else { | 1064 } else { |
891 DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr; | 1065 DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr; |
892 } | 1066 } |
| 1067 |
| 1068 // The decoder should use DX11 iff |
| 1069 // 1. The underlying H/W decoder supports it. |
| 1070 // 2. We have a pointer to the MFCreateDXGIDeviceManager function needed for |
| 1071 // this. This should always be true for Windows 8+. |
| 1072 // 3. ANGLE is using DX11. |
| 1073 DCHECK(gl_context_); |
| 1074 if (create_dxgi_device_manager_ && |
| 1075 (gl_context_->GetGLRenderer().find("Direct3D11") != |
| 1076 std::string::npos)) { |
| 1077 UINT32 dx11_aware = 0; |
| 1078 attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware); |
| 1079 use_dx11_ = !!dx11_aware; |
| 1080 } |
893 return true; | 1081 return true; |
894 } | 1082 } |
895 | 1083 |
896 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() { | 1084 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() { |
897 RETURN_ON_FAILURE(SetDecoderInputMediaType(), | 1085 RETURN_ON_FAILURE(SetDecoderInputMediaType(), |
898 "Failed to set decoder input media type", false); | 1086 "Failed to set decoder input media type", false); |
899 return SetDecoderOutputMediaType(MFVideoFormat_NV12); | 1087 return SetDecoderOutputMediaType(MFVideoFormat_NV12); |
900 } | 1088 } |
901 | 1089 |
902 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() { | 1090 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() { |
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1047 | 1235 |
1048 inputs_before_decode_ = 0; | 1236 inputs_before_decode_ = 0; |
1049 | 1237 |
1050 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), | 1238 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), |
1051 "Failed to process output sample.", PLATFORM_FAILURE,); | 1239 "Failed to process output sample.", PLATFORM_FAILURE,); |
1052 } | 1240 } |
1053 | 1241 |
1054 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { | 1242 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { |
1055 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); | 1243 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); |
1056 | 1244 |
1057 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
1058 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | |
1059 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | |
1060 | |
1061 base::win::ScopedComPtr<IDirect3DSurface9> surface; | |
1062 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | |
1063 IID_PPV_ARGS(surface.Receive())); | |
1064 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", | |
1065 false); | |
1066 | |
1067 LONGLONG input_buffer_id = 0; | 1245 LONGLONG input_buffer_id = 0; |
1068 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), | 1246 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), |
1069 "Failed to get input buffer id associated with sample", | 1247 "Failed to get input buffer id associated with sample", |
1070 false); | 1248 false); |
1071 | 1249 |
1072 { | 1250 { |
1073 base::AutoLock lock(decoder_lock_); | 1251 base::AutoLock lock(decoder_lock_); |
1074 DCHECK(pending_output_samples_.empty()); | 1252 DCHECK(pending_output_samples_.empty()); |
1075 pending_output_samples_.push_back( | 1253 pending_output_samples_.push_back( |
1076 PendingSampleInfo(input_buffer_id, sample)); | 1254 PendingSampleInfo(input_buffer_id, sample)); |
1077 } | 1255 } |
1078 | 1256 |
1079 if (pictures_requested_) { | 1257 if (pictures_requested_) { |
1080 DVLOG(1) << "Waiting for picture slots from the client."; | 1258 DVLOG(1) << "Waiting for picture slots from the client."; |
1081 main_thread_task_runner_->PostTask( | 1259 main_thread_task_runner_->PostTask( |
1082 FROM_HERE, | 1260 FROM_HERE, |
1083 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples, | 1261 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples, |
1084 weak_this_factory_.GetWeakPtr())); | 1262 weak_this_factory_.GetWeakPtr())); |
1085 return true; | 1263 return true; |
1086 } | 1264 } |
1087 | 1265 |
1088 // We only read the surface description, which contains its width/height when | 1266 int width = 0; |
1089 // we need the picture buffers from the client. Once we have those, then they | 1267 int height = 0; |
1090 // are reused. | 1268 if (!GetVideoFrameDimensions(sample, &width, &height)) { |
1091 D3DSURFACE_DESC surface_desc; | 1269 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", |
1092 hr = surface->GetDesc(&surface_desc); | 1270 false); |
1093 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | 1271 } |
1094 | 1272 |
1095 // Go ahead and request picture buffers. | 1273 // Go ahead and request picture buffers. |
1096 main_thread_task_runner_->PostTask( | 1274 main_thread_task_runner_->PostTask( |
1097 FROM_HERE, | 1275 FROM_HERE, |
1098 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 1276 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
1099 weak_this_factory_.GetWeakPtr(), | 1277 weak_this_factory_.GetWeakPtr(), |
1100 surface_desc.Width, | 1278 width, |
1101 surface_desc.Height)); | 1279 height)); |
1102 | 1280 |
1103 pictures_requested_ = true; | 1281 pictures_requested_ = true; |
1104 return true; | 1282 return true; |
1105 } | 1283 } |
1106 | 1284 |
1107 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { | 1285 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { |
1108 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1286 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1109 | 1287 |
1110 if (!output_picture_buffers_.size()) | 1288 if (!output_picture_buffers_.size()) |
1111 return; | 1289 return; |
(...skipping 11 matching lines...) Expand all Loading... |
1123 PendingSampleInfo* pending_sample = NULL; | 1301 PendingSampleInfo* pending_sample = NULL; |
1124 { | 1302 { |
1125 base::AutoLock lock(decoder_lock_); | 1303 base::AutoLock lock(decoder_lock_); |
1126 | 1304 |
1127 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 1305 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
1128 if (sample_info.picture_buffer_id != -1) | 1306 if (sample_info.picture_buffer_id != -1) |
1129 continue; | 1307 continue; |
1130 pending_sample = &sample_info; | 1308 pending_sample = &sample_info; |
1131 } | 1309 } |
1132 | 1310 |
| 1311 int width = 0; |
| 1312 int height = 0; |
| 1313 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), |
| 1314 &width, &height)) { |
| 1315 RETURN_AND_NOTIFY_ON_FAILURE(false, |
| 1316 "Failed to get D3D surface from output sample", PLATFORM_FAILURE,); |
| 1317 } |
| 1318 |
| 1319 if (width != index->second->size().width() || |
| 1320 height != index->second->size().height()) { |
| 1321 HandleResolutionChanged(width, height); |
| 1322 return; |
| 1323 } |
| 1324 |
1133 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 1325 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
1134 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( | 1326 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( |
1135 0, output_buffer.Receive()); | 1327 0, output_buffer.Receive()); |
1136 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 1328 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
1137 hr, "Failed to get buffer from output sample", PLATFORM_FAILURE,); | 1329 "Failed to get buffer from output sample", PLATFORM_FAILURE,); |
1138 | 1330 |
1139 base::win::ScopedComPtr<IDirect3DSurface9> surface; | 1331 base::win::ScopedComPtr<IDirect3DSurface9> surface; |
1140 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | 1332 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
1141 IID_PPV_ARGS(surface.Receive())); | |
1142 RETURN_AND_NOTIFY_ON_HR_FAILURE( | |
1143 hr, "Failed to get D3D surface from output sample", | |
1144 PLATFORM_FAILURE,); | |
1145 | 1333 |
1146 D3DSURFACE_DESC surface_desc; | 1334 if (use_dx11_) { |
1147 hr = surface->GetDesc(&surface_desc); | 1335 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
1148 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 1336 hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
1149 hr, "Failed to get surface description", PLATFORM_FAILURE,); | 1337 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
1150 | 1338 "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,); |
1151 if (surface_desc.Width != | 1339 hr = dxgi_buffer->GetResource( |
1152 static_cast<uint32>(index->second->size().width()) || | 1340 __uuidof(ID3D11Texture2D), |
1153 surface_desc.Height != | 1341 reinterpret_cast<void**>(d3d11_texture.Receive())); |
1154 static_cast<uint32>(index->second->size().height())) { | 1342 } else { |
1155 HandleResolutionChanged(surface_desc.Width, surface_desc.Height); | 1343 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, |
1156 return; | 1344 IID_PPV_ARGS(surface.Receive())); |
1157 } | 1345 } |
| 1346 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 1347 "Failed to get surface from output sample", PLATFORM_FAILURE,); |
1158 | 1348 |
1159 pending_sample->picture_buffer_id = index->second->id(); | 1349 pending_sample->picture_buffer_id = index->second->id(); |
1160 | 1350 |
1161 RETURN_AND_NOTIFY_ON_FAILURE( | 1351 RETURN_AND_NOTIFY_ON_FAILURE( |
1162 index->second->CopyOutputSampleDataToPictureBuffer( | 1352 index->second->CopyOutputSampleDataToPictureBuffer( |
1163 this, | 1353 this, |
1164 surface.get(), | 1354 surface.get(), |
| 1355 d3d11_texture.get(), |
1165 pending_sample->input_buffer_id), | 1356 pending_sample->input_buffer_id), |
1166 "Failed to copy output sample", PLATFORM_FAILURE, ); | 1357 "Failed to copy output sample", PLATFORM_FAILURE,); |
1167 | 1358 |
1168 index->second->set_available(false); | 1359 index->second->set_available(false); |
1169 } | 1360 } |
1170 } | 1361 } |
1171 } | 1362 } |
1172 | 1363 |
1173 void DXVAVideoDecodeAccelerator::StopOnError( | 1364 void DXVAVideoDecodeAccelerator::StopOnError( |
1174 media::VideoDecodeAccelerator::Error error) { | 1365 media::VideoDecodeAccelerator::Error error) { |
1175 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | 1366 if (!main_thread_task_runner_->BelongsToCurrentThread()) { |
1176 main_thread_task_runner_->PostTask( | 1367 main_thread_task_runner_->PostTask( |
(...skipping 16 matching lines...) Expand all Loading... |
1193 void DXVAVideoDecodeAccelerator::Invalidate() { | 1384 void DXVAVideoDecodeAccelerator::Invalidate() { |
1194 if (GetState() == kUninitialized) | 1385 if (GetState() == kUninitialized) |
1195 return; | 1386 return; |
1196 decoder_thread_.Stop(); | 1387 decoder_thread_.Stop(); |
1197 weak_this_factory_.InvalidateWeakPtrs(); | 1388 weak_this_factory_.InvalidateWeakPtrs(); |
1198 output_picture_buffers_.clear(); | 1389 output_picture_buffers_.clear(); |
1199 stale_output_picture_buffers_.clear(); | 1390 stale_output_picture_buffers_.clear(); |
1200 pending_output_samples_.clear(); | 1391 pending_output_samples_.clear(); |
1201 pending_input_buffers_.clear(); | 1392 pending_input_buffers_.clear(); |
1202 decoder_.Release(); | 1393 decoder_.Release(); |
| 1394 if (video_format_converter_mft_.get()) { |
| 1395 video_format_converter_mft_->ProcessMessage( |
| 1396 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); |
| 1397 video_format_converter_mft_.Release(); |
| 1398 } |
1203 MFShutdown(); | 1399 MFShutdown(); |
| 1400 dx11_video_format_converter_media_type_needs_init_ = true; |
1204 SetState(kUninitialized); | 1401 SetState(kUninitialized); |
1205 } | 1402 } |
1206 | 1403 |
1207 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) { | 1404 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) { |
1208 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1405 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
1209 if (client_) | 1406 if (client_) |
1210 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | 1407 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
1211 } | 1408 } |
1212 | 1409 |
1213 void DXVAVideoDecodeAccelerator::NotifyFlushDone() { | 1410 void DXVAVideoDecodeAccelerator::NotifyFlushDone() { |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1418 // http://code.google.com/p/chromium/issues/detail?id=150925 | 1615 // http://code.google.com/p/chromium/issues/detail?id=150925 |
1419 main_thread_task_runner_->PostTask( | 1616 main_thread_task_runner_->PostTask( |
1420 FROM_HERE, | 1617 FROM_HERE, |
1421 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, | 1618 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, |
1422 weak_this_factory_.GetWeakPtr(), | 1619 weak_this_factory_.GetWeakPtr(), |
1423 input_buffer_id)); | 1620 input_buffer_id)); |
1424 } | 1621 } |
1425 | 1622 |
1426 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, | 1623 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, |
1427 int height) { | 1624 int height) { |
| 1625 dx11_video_format_converter_media_type_needs_init_ = true; |
| 1626 |
1428 main_thread_task_runner_->PostTask( | 1627 main_thread_task_runner_->PostTask( |
1429 FROM_HERE, | 1628 FROM_HERE, |
1430 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, | 1629 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, |
1431 weak_this_factory_.GetWeakPtr())); | 1630 weak_this_factory_.GetWeakPtr())); |
1432 | 1631 |
1433 main_thread_task_runner_->PostTask( | 1632 main_thread_task_runner_->PostTask( |
1434 FROM_HERE, | 1633 FROM_HERE, |
1435 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 1634 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
1436 weak_this_factory_.GetWeakPtr(), | 1635 weak_this_factory_.GetWeakPtr(), |
1437 width, | 1636 width, |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1511 FROM_HERE, | 1710 FROM_HERE, |
1512 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, | 1711 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, |
1513 base::Unretained(this), | 1712 base::Unretained(this), |
1514 src_surface, | 1713 src_surface, |
1515 dest_surface, | 1714 dest_surface, |
1516 picture_buffer_id, | 1715 picture_buffer_id, |
1517 input_buffer_id)); | 1716 input_buffer_id)); |
1518 return; | 1717 return; |
1519 } | 1718 } |
1520 | 1719 |
1521 HRESULT hr = device_->StretchRect(src_surface, NULL, dest_surface, | 1720 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, |
1522 NULL, D3DTEXF_NONE); | 1721 NULL, D3DTEXF_NONE); |
1523 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",); | 1722 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",); |
1524 | 1723 |
1525 // Ideally, this should be done immediately before the draw call that uses | 1724 // Ideally, this should be done immediately before the draw call that uses |
1526 // the texture. Flush it once here though. | 1725 // the texture. Flush it once here though. |
1527 hr = query_->Issue(D3DISSUE_END); | 1726 hr = query_->Issue(D3DISSUE_END); |
1528 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",); | 1727 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",); |
1529 | 1728 |
1530 // Flush the decoder device to ensure that the decoded frame is copied to the | 1729 // Flush the decoder device to ensure that the decoded frame is copied to the |
1531 // target surface. | 1730 // target surface. |
1532 decoder_thread_task_runner_->PostDelayedTask( | 1731 decoder_thread_task_runner_->PostDelayedTask( |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1583 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 1782 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
1584 base::Unretained(this))); | 1783 base::Unretained(this))); |
1585 return; | 1784 return; |
1586 } | 1785 } |
1587 decoder_thread_task_runner_->PostTask( | 1786 decoder_thread_task_runner_->PostTask( |
1588 FROM_HERE, | 1787 FROM_HERE, |
1589 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 1788 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
1590 base::Unretained(this))); | 1789 base::Unretained(this))); |
1591 } | 1790 } |
1592 | 1791 |
| 1792 void DXVAVideoDecodeAccelerator::CopyTexture(ID3D11Texture2D* src_texture, |
| 1793 ID3D11Texture2D* dest_texture, |
| 1794 IMFSample* video_frame, |
| 1795 int picture_buffer_id, |
| 1796 int input_buffer_id) { |
| 1797 HRESULT hr = E_FAIL; |
| 1798 |
| 1799 DCHECK(use_dx11_); |
| 1800 |
| 1801 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { |
| 1802 // The media foundation H.264 decoder outputs YUV12 textures which we |
| 1803 // cannot copy into ANGLE as they expect ARGB textures. In D3D land |
| 1804 // the StretchRect API in the IDirect3DDevice9Ex interface did the color |
| 1805 // space conversion for us. Sadly in DX11 land the API does not provide |
| 1806 // a straightforward way to do this. |
| 1807 // We use the video processor MFT. |
| 1808 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx |
| 1809 // This object implements a media foundation transform (IMFTransform) |
| 1810 // which follows the same contract as the decoder. The color space |
| 1811 // conversion as per msdn is done in the GPU. |
| 1812 |
| 1813 D3D11_TEXTURE2D_DESC source_desc; |
| 1814 src_texture->GetDesc(&source_desc); |
| 1815 |
| 1816 // Set up the input and output types for the video processor MFT. |
| 1817 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width, |
| 1818 source_desc.Height)) { |
| 1819 RETURN_AND_NOTIFY_ON_FAILURE( |
| 1820 false, "Failed to initialize media types for convesion.", |
| 1821 PLATFORM_FAILURE,); |
| 1822 } |
| 1823 |
| 1824 // The input to the video processor is the output sample. |
| 1825 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; |
| 1826 { |
| 1827 base::AutoLock lock(decoder_lock_); |
| 1828 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
| 1829 input_sample_for_conversion = sample_info.output_sample; |
| 1830 } |
| 1831 |
| 1832 decoder_thread_task_runner_->PostTask( |
| 1833 FROM_HERE, |
| 1834 base::Bind(&DXVAVideoDecodeAccelerator::CopyTexture, |
| 1835 base::Unretained(this), |
| 1836 src_texture, |
| 1837 dest_texture, |
| 1838 input_sample_for_conversion.Detach(), |
| 1839 picture_buffer_id, |
| 1840 input_buffer_id)); |
| 1841 return; |
| 1842 } |
| 1843 |
| 1844 DCHECK(video_frame); |
| 1845 |
| 1846 base::win::ScopedComPtr<IMFSample> input_sample; |
| 1847 input_sample.Attach(video_frame); |
| 1848 |
| 1849 DCHECK(video_format_converter_mft_.get()); |
| 1850 |
| 1851 // d3d11_device_context_->Begin(d3d11_query_.get()); |
| 1852 |
| 1853 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0); |
| 1854 if (FAILED(hr)) { |
| 1855 DCHECK(false); |
| 1856 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 1857 "Failed to convert output sample format.", PLATFORM_FAILURE,); |
| 1858 } |
| 1859 |
| 1860 // The video processor MFT requires output samples to be allocated by the |
| 1861 // caller. We create a sample with a buffer backed with the ID3D11Texture2D |
| 1862 // interface exposed by ANGLE. This works nicely as this ensures that the |
| 1863 // video processor coverts the color space of the output frame and copies |
| 1864 // the result into the ANGLE texture. |
| 1865 base::win::ScopedComPtr<IMFSample> output_sample; |
| 1866 hr = MFCreateSample(output_sample.Receive()); |
| 1867 if (FAILED(hr)) { |
| 1868 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 1869 "Failed to create output sample.", PLATFORM_FAILURE,); |
| 1870 } |
| 1871 |
| 1872 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
| 1873 hr = MFCreateDXGISurfaceBuffer( |
| 1874 __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE, |
| 1875 output_buffer.Receive()); |
| 1876 if (FAILED(hr)) { |
| 1877 base::debug::Alias(&hr); |
| 1878 // TODO(ananta) |
| 1879 // Remove this CHECK when the change to use DX11 for H/W decoding |
| 1880 // stablizes. |
| 1881 CHECK(false); |
| 1882 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 1883 "Failed to create output sample.", PLATFORM_FAILURE,); |
| 1884 } |
| 1885 |
| 1886 output_sample->AddBuffer(output_buffer.get()); |
| 1887 |
| 1888 DWORD status = 0; |
| 1889 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; |
| 1890 format_converter_output.pSample = output_sample.get(); |
| 1891 hr = video_format_converter_mft_->ProcessOutput( |
| 1892 0, // No flags |
| 1893 1, // # of out streams to pull from |
| 1894 &format_converter_output, |
| 1895 &status); |
| 1896 |
| 1897 d3d11_device_context_->Flush(); |
| 1898 d3d11_device_context_->End(d3d11_query_.get()); |
| 1899 |
| 1900 if (FAILED(hr)) { |
| 1901 base::debug::Alias(&hr); |
| 1902 // TODO(ananta) |
| 1903 // Remove this CHECK when the change to use DX11 for H/W decoding |
| 1904 // stablizes. |
| 1905 CHECK(false); |
| 1906 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 1907 "Failed to convert output sample format.", PLATFORM_FAILURE,); |
| 1908 } |
| 1909 |
| 1910 decoder_thread_task_runner_->PostDelayedTask( |
| 1911 FROM_HERE, |
| 1912 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
| 1913 base::Unretained(this), 0, |
| 1914 reinterpret_cast<IDirect3DSurface9*>(NULL), |
| 1915 reinterpret_cast<IDirect3DSurface9*>(NULL), |
| 1916 picture_buffer_id, input_buffer_id), |
| 1917 base::TimeDelta::FromMilliseconds( |
| 1918 kFlushDecoderSurfaceTimeoutMs)); |
| 1919 } |
| 1920 |
1593 void DXVAVideoDecodeAccelerator::FlushDecoder( | 1921 void DXVAVideoDecodeAccelerator::FlushDecoder( |
1594 int iterations, | 1922 int iterations, |
1595 IDirect3DSurface9* src_surface, | 1923 IDirect3DSurface9* src_surface, |
1596 IDirect3DSurface9* dest_surface, | 1924 IDirect3DSurface9* dest_surface, |
1597 int picture_buffer_id, | 1925 int picture_buffer_id, |
1598 int input_buffer_id) { | 1926 int input_buffer_id) { |
1599 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1927 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
1600 | 1928 |
1601 // The DXVA decoder has its own device which it uses for decoding. ANGLE | 1929 // The DXVA decoder has its own device which it uses for decoding. ANGLE |
1602 // has its own device which we don't have access to. | 1930 // has its own device which we don't have access to. |
1603 // The above code attempts to copy the decoded picture into a surface | 1931 // The above code attempts to copy the decoded picture into a surface |
1604 // which is owned by ANGLE. As there are multiple devices involved in | 1932 // which is owned by ANGLE. As there are multiple devices involved in |
1605 // this, the StretchRect call above is not synchronous. | 1933 // this, the StretchRect call above is not synchronous. |
1606 // We attempt to flush the batched operations to ensure that the picture is | 1934 // We attempt to flush the batched operations to ensure that the picture is |
1607 // copied to the surface owned by ANGLE. | 1935 // copied to the surface owned by ANGLE. |
1608 // We need to do this in a loop and call flush multiple times. | 1936 // We need to do this in a loop and call flush multiple times. |
1609 // We have seen the GetData call for flushing the command buffer fail to | 1937 // We have seen the GetData call for flushing the command buffer fail to |
1610 // return success occassionally on multi core machines, leading to an | 1938 // return success occassionally on multi core machines, leading to an |
1611 // infinite loop. | 1939 // infinite loop. |
1612 // Workaround is to have an upper limit of 4 on the number of iterations to | 1940 // Workaround is to have an upper limit of 4 on the number of iterations to |
1613 // wait for the Flush to finish. | 1941 // wait for the Flush to finish. |
1614 HRESULT hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); | 1942 HRESULT hr = E_FAIL; |
| 1943 |
| 1944 if (use_dx11_) { |
| 1945 BOOL query_data = 0; |
| 1946 hr = d3d11_device_context_->GetData(d3d11_query_.get(), &query_data, |
| 1947 sizeof(BOOL), 0); |
| 1948 if (FAILED(hr)) { |
| 1949 base::debug::Alias(&hr); |
| 1950 // TODO(ananta) |
| 1951 // Remove this CHECK when the change to use DX11 for H/W decoding |
| 1952 // stablizes. |
| 1953 CHECK(false); |
| 1954 } |
| 1955 } else { |
| 1956 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); |
| 1957 } |
1615 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { | 1958 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { |
1616 decoder_thread_task_runner_->PostDelayedTask( | 1959 decoder_thread_task_runner_->PostDelayedTask( |
1617 FROM_HERE, | 1960 FROM_HERE, |
1618 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 1961 base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
1619 base::Unretained(this), iterations, src_surface, | 1962 base::Unretained(this), iterations, src_surface, |
1620 dest_surface, picture_buffer_id, input_buffer_id), | 1963 dest_surface, picture_buffer_id, input_buffer_id), |
1621 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 1964 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
1622 return; | 1965 return; |
1623 } | 1966 } |
| 1967 |
1624 main_thread_task_runner_->PostTask( | 1968 main_thread_task_runner_->PostTask( |
1625 FROM_HERE, | 1969 FROM_HERE, |
1626 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 1970 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
1627 weak_this_factory_.GetWeakPtr(), | 1971 weak_this_factory_.GetWeakPtr(), |
1628 src_surface, | 1972 src_surface, |
1629 dest_surface, | 1973 dest_surface, |
1630 picture_buffer_id, | 1974 picture_buffer_id, |
1631 input_buffer_id)); | 1975 input_buffer_id)); |
| 1976 } |
| 1977 |
| 1978 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( |
| 1979 int width, int height) { |
| 1980 if (!dx11_video_format_converter_media_type_needs_init_) |
| 1981 return true; |
| 1982 |
| 1983 CHECK(video_format_converter_mft_.get()); |
| 1984 |
| 1985 HRESULT hr = video_format_converter_mft_->ProcessMessage( |
| 1986 MFT_MESSAGE_SET_D3D_MANAGER, |
| 1987 reinterpret_cast<ULONG_PTR>( |
| 1988 d3d11_device_manager_.get())); |
| 1989 |
| 1990 if (FAILED(hr)) { |
| 1991 base::debug::Alias(&hr); |
| 1992 // TODO(ananta) |
| 1993 // Remove this CHECK when the change to use DX11 for H/W decoding |
| 1994 // stablizes. |
| 1995 CHECK(false); |
| 1996 } |
| 1997 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 1998 "Failed to initialize video format converter", PLATFORM_FAILURE, false); |
| 1999 |
| 2000 video_format_converter_mft_->ProcessMessage( |
| 2001 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); |
| 2002 |
| 2003 base::win::ScopedComPtr<IMFMediaType> media_type; |
| 2004 hr = MFCreateMediaType(media_type.Receive()); |
| 2005 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", |
| 2006 PLATFORM_FAILURE, false); |
| 2007 |
| 2008 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
| 2009 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", |
| 2010 PLATFORM_FAILURE, false); |
| 2011 |
| 2012 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); |
| 2013 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", |
| 2014 PLATFORM_FAILURE, false); |
| 2015 |
| 2016 hr = media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); |
| 2017 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 2018 "Failed to set attributes on media type", PLATFORM_FAILURE, false); |
| 2019 |
| 2020 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, |
| 2021 MFVideoInterlace_Progressive); |
| 2022 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 2023 "Failed to set attributes on media type", PLATFORM_FAILURE, false); |
| 2024 |
| 2025 base::win::ScopedComPtr<IMFAttributes> converter_attributes; |
| 2026 hr = video_format_converter_mft_->GetAttributes( |
| 2027 converter_attributes.Receive()); |
| 2028 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get converter attributes", |
| 2029 PLATFORM_FAILURE, false); |
| 2030 |
| 2031 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); |
| 2032 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes", |
| 2033 PLATFORM_FAILURE, false); |
| 2034 |
| 2035 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); |
| 2036 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes", |
| 2037 PLATFORM_FAILURE, false); |
| 2038 |
| 2039 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); |
| 2040 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", |
| 2041 PLATFORM_FAILURE, false); |
| 2042 |
| 2043 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); |
| 2044 if (FAILED(hr)) { |
| 2045 base::debug::Alias(&hr); |
| 2046 // TODO(ananta) |
| 2047 // Remove this CHECK when the change to use DX11 for H/W decoding |
| 2048 // stablizes. |
| 2049 CHECK(false); |
| 2050 } |
| 2051 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", |
| 2052 PLATFORM_FAILURE, false); |
| 2053 |
| 2054 base::win::ScopedComPtr<IMFMediaType> out_media_type; |
| 2055 |
| 2056 for (uint32 i = 0; |
| 2057 SUCCEEDED(video_format_converter_mft_->GetOutputAvailableType(0, i, |
| 2058 out_media_type.Receive())); |
| 2059 ++i) { |
| 2060 GUID out_subtype = {0}; |
| 2061 hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); |
| 2062 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output major type", |
| 2063 PLATFORM_FAILURE, false); |
| 2064 |
| 2065 if (out_subtype == MFVideoFormat_ARGB32) { |
| 2066 hr = out_media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); |
| 2067 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 2068 "Failed to set attributes on media type", PLATFORM_FAILURE, false); |
| 2069 |
| 2070 hr = out_media_type->SetUINT32(MF_MT_INTERLACE_MODE, |
| 2071 MFVideoInterlace_Progressive); |
| 2072 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 2073 "Failed to set attributes on media type", PLATFORM_FAILURE, false); |
| 2074 |
| 2075 hr = MFSetAttributeSize(out_media_type.get(), MF_MT_FRAME_SIZE, width, |
| 2076 height); |
| 2077 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 2078 "Failed to set media type attributes", PLATFORM_FAILURE, false); |
| 2079 |
| 2080 hr = video_format_converter_mft_->SetOutputType( |
| 2081 0, out_media_type.get(), 0); // No flags |
| 2082 if (FAILED(hr)) { |
| 2083 base::debug::Alias(&hr); |
| 2084 // TODO(ananta) |
| 2085 // Remove this CHECK when the change to use DX11 for H/W decoding |
| 2086 // stablizes. |
| 2087 CHECK(false); |
| 2088 } |
| 2089 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 2090 "Failed to set converter output type", PLATFORM_FAILURE, false); |
| 2091 |
| 2092 hr = video_format_converter_mft_->ProcessMessage( |
| 2093 MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0); |
| 2094 if (FAILED(hr)) { |
| 2095 // TODO(ananta) |
| 2096 // Remove this CHECK when the change to use DX11 for H/W decoding |
| 2097 // stablizes. |
| 2098 RETURN_AND_NOTIFY_ON_FAILURE( |
| 2099 false, "Failed to initialize video converter.", PLATFORM_FAILURE, |
| 2100 false); |
| 2101 } |
| 2102 dx11_video_format_converter_media_type_needs_init_ = false; |
| 2103 return true; |
| 2104 } |
| 2105 out_media_type.Release(); |
| 2106 } |
| 2107 return false; |
| 2108 } |
| 2109 |
| 2110 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions( |
| 2111 IMFSample* sample, |
| 2112 int* width, |
| 2113 int* height) { |
| 2114 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
| 2115 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); |
| 2116 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); |
| 2117 |
| 2118 if (use_dx11_) { |
| 2119 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
| 2120 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
| 2121 hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
| 2122 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", |
| 2123 false); |
| 2124 hr = dxgi_buffer->GetResource( |
| 2125 __uuidof(ID3D11Texture2D), |
| 2126 reinterpret_cast<void**>(d3d11_texture.Receive())); |
| 2127 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer", |
| 2128 false); |
| 2129 D3D11_TEXTURE2D_DESC d3d11_texture_desc; |
| 2130 d3d11_texture->GetDesc(&d3d11_texture_desc); |
| 2131 *width = d3d11_texture_desc.Width; |
| 2132 *height = d3d11_texture_desc.Height; |
| 2133 } else { |
| 2134 base::win::ScopedComPtr<IDirect3DSurface9> surface; |
| 2135 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, |
| 2136 IID_PPV_ARGS(surface.Receive())); |
| 2137 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", |
| 2138 false); |
| 2139 D3DSURFACE_DESC surface_desc; |
| 2140 hr = surface->GetDesc(&surface_desc); |
| 2141 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); |
| 2142 *width = surface_desc.Width; |
| 2143 *height = surface_desc.Height; |
| 2144 } |
| 2145 return true; |
1632 } | 2146 } |
1633 | 2147 |
1634 } // namespace content | 2148 } // namespace content |
OLD | NEW |