Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h" | 5 #include "content/common/gpu/media/dxva_video_decode_accelerator.h" |
| 6 | 6 |
| 7 #if !defined(OS_WIN) | 7 #if !defined(OS_WIN) |
| 8 #error This file should only be built on Windows. | 8 #error This file should only be built on Windows. |
| 9 #endif // !defined(OS_WIN) | 9 #endif // !defined(OS_WIN) |
| 10 | 10 |
| 11 #include <ks.h> | 11 #include <ks.h> |
| 12 #include <codecapi.h> | 12 #include <codecapi.h> |
| 13 #include <dxgi1_2.h> | |
| 13 #include <mfapi.h> | 14 #include <mfapi.h> |
| 14 #include <mferror.h> | 15 #include <mferror.h> |
| 15 #include <wmcodecdsp.h> | 16 #include <wmcodecdsp.h> |
| 16 | 17 |
| 17 #include "base/base_paths_win.h" | 18 #include "base/base_paths_win.h" |
| 18 #include "base/bind.h" | 19 #include "base/bind.h" |
| 19 #include "base/callback.h" | 20 #include "base/callback.h" |
| 20 #include "base/command_line.h" | 21 #include "base/command_line.h" |
| 22 #include "base/debug/alias.h" | |
| 21 #include "base/file_version_info.h" | 23 #include "base/file_version_info.h" |
| 22 #include "base/files/file_path.h" | 24 #include "base/files/file_path.h" |
| 23 #include "base/logging.h" | 25 #include "base/logging.h" |
| 24 #include "base/memory/scoped_ptr.h" | 26 #include "base/memory/scoped_ptr.h" |
| 25 #include "base/memory/shared_memory.h" | 27 #include "base/memory/shared_memory.h" |
| 26 #include "base/message_loop/message_loop.h" | 28 #include "base/message_loop/message_loop.h" |
| 27 #include "base/path_service.h" | 29 #include "base/path_service.h" |
| 28 #include "base/trace_event/trace_event.h" | 30 #include "base/trace_event/trace_event.h" |
| 29 #include "base/win/windows_version.h" | 31 #include "base/win/windows_version.h" |
| 30 #include "media/video/video_decode_accelerator.h" | 32 #include "media/video/video_decode_accelerator.h" |
| 31 #include "ui/gl/gl_bindings.h" | 33 #include "ui/gl/gl_bindings.h" |
| 34 #include "ui/gl/gl_context.h" | |
| 32 #include "ui/gl/gl_surface_egl.h" | 35 #include "ui/gl/gl_surface_egl.h" |
| 33 #include "ui/gl/gl_switches.h" | 36 #include "ui/gl/gl_switches.h" |
| 34 | 37 |
| 35 namespace { | 38 namespace { |
| 36 | 39 |
| 37 // Path is appended on to the PROGRAM_FILES base path. | 40 // Path is appended on to the PROGRAM_FILES base path. |
| 38 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; | 41 const wchar_t kVPXDecoderDLLPath[] = L"Intel\\Media SDK\\"; |
| 39 | 42 |
| 40 const wchar_t kVP8DecoderDLLName[] = | 43 const wchar_t kVP8DecoderDLLName[] = |
| 41 #if defined(ARCH_CPU_X86) | 44 #if defined(ARCH_CPU_X86) |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 76 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | 79 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } |
| 77 }; | 80 }; |
| 78 | 81 |
| 79 const CLSID MEDIASUBTYPE_VP90 = { | 82 const CLSID MEDIASUBTYPE_VP90 = { |
| 80 0x30395056, | 83 0x30395056, |
| 81 0x0000, | 84 0x0000, |
| 82 0x0010, | 85 0x0010, |
| 83 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } | 86 { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } |
| 84 }; | 87 }; |
| 85 | 88 |
| 89 // The CLSID of the video processor media foundation transform which we use for | |
| 90 // texture color conversion in DX11. | |
| 91 DEFINE_GUID(CLSID_VideoProcessorMFT, | |
| 92 0x88753b26, 0x5b24, 0x49bd, 0xb2, 0xe7, 0xc, 0x44, 0x5c, 0x78, | |
| 93 0xc9, 0x82); | |
| 94 | |
| 95 // MF_XVP_PLAYBACK_MODE | |
| 96 // Data type: UINT32 (treat as BOOL) | |
| 97 // If this attribute is TRUE, the video processor will run in playback mode | |
| 98 // where it allows callers to allocate output samples and allows last frame | |
| 99 // regeneration (repaint). | |
| 100 DEFINE_GUID(MF_XVP_PLAYBACK_MODE, 0x3c5d293f, 0xad67, 0x4e29, 0xaf, 0x12, | |
| 101 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9); | |
| 86 } | 102 } |
| 87 | 103 |
| 88 namespace content { | 104 namespace content { |
| 89 | 105 |
| 106 CreateDXGIDeviceManager DXVAVideoDecodeAccelerator::create_dxgi_device_manager_ | |
| 107 = NULL; | |
| 108 | |
| 90 #define RETURN_ON_FAILURE(result, log, ret) \ | 109 #define RETURN_ON_FAILURE(result, log, ret) \ |
| 91 do { \ | 110 do { \ |
| 92 if (!(result)) { \ | 111 if (!(result)) { \ |
| 93 DLOG(ERROR) << log; \ | 112 DLOG(ERROR) << log; \ |
| 94 return ret; \ | 113 return ret; \ |
| 95 } \ | 114 } \ |
| 96 } while (0) | 115 } while (0) |
| 97 | 116 |
| 98 #define RETURN_ON_HR_FAILURE(result, log, ret) \ | 117 #define RETURN_ON_HR_FAILURE(result, log, ret) \ |
| 99 RETURN_ON_FAILURE(SUCCEEDED(result), \ | 118 RETURN_ON_FAILURE(SUCCEEDED(result), \ |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 151 } else { | 170 } else { |
| 152 hr = MFCreateAlignedMemoryBuffer(buffer_length, | 171 hr = MFCreateAlignedMemoryBuffer(buffer_length, |
| 153 align - 1, | 172 align - 1, |
| 154 buffer.Receive()); | 173 buffer.Receive()); |
| 155 } | 174 } |
| 156 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); | 175 RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL); |
| 157 | 176 |
| 158 hr = sample->AddBuffer(buffer.get()); | 177 hr = sample->AddBuffer(buffer.get()); |
| 159 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); | 178 RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL); |
| 160 | 179 |
| 180 buffer->SetCurrentLength(0); | |
| 161 return sample.Detach(); | 181 return sample.Detach(); |
| 162 } | 182 } |
| 163 | 183 |
| 164 // Creates a Media Foundation sample with one buffer containing a copy of the | 184 // Creates a Media Foundation sample with one buffer containing a copy of the |
| 165 // given Annex B stream data. | 185 // given Annex B stream data. |
| 166 // If duration and sample time are not known, provide 0. | 186 // If duration and sample time are not known, provide 0. |
| 167 // |min_size| specifies the minimum size of the buffer (might be required by | 187 // |min_size| specifies the minimum size of the buffer (might be required by |
| 168 // the decoder for input). If no alignment is required, provide 0. | 188 // the decoder for input). If no alignment is required, provide 0. |
| 169 static IMFSample* CreateInputSample(const uint8* stream, int size, | 189 static IMFSample* CreateInputSample(const uint8* stream, int size, |
| 170 int min_size, int alignment) { | 190 int min_size, int alignment) { |
| (...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 222 EGLConfig egl_config); | 242 EGLConfig egl_config); |
| 223 ~DXVAPictureBuffer(); | 243 ~DXVAPictureBuffer(); |
| 224 | 244 |
| 225 void ReusePictureBuffer(); | 245 void ReusePictureBuffer(); |
| 226 // Copies the output sample data to the picture buffer provided by the | 246 // Copies the output sample data to the picture buffer provided by the |
| 227 // client. | 247 // client. |
| 228 // The dest_surface parameter contains the decoded bits. | 248 // The dest_surface parameter contains the decoded bits. |
| 229 bool CopyOutputSampleDataToPictureBuffer( | 249 bool CopyOutputSampleDataToPictureBuffer( |
| 230 DXVAVideoDecodeAccelerator* decoder, | 250 DXVAVideoDecodeAccelerator* decoder, |
| 231 IDirect3DSurface9* dest_surface, | 251 IDirect3DSurface9* dest_surface, |
| 252 ID3D11Texture2D* dx11_texture, | |
| 232 int input_buffer_id); | 253 int input_buffer_id); |
| 233 | 254 |
| 234 bool available() const { | 255 bool available() const { |
| 235 return available_; | 256 return available_; |
| 236 } | 257 } |
| 237 | 258 |
| 238 void set_available(bool available) { | 259 void set_available(bool available) { |
| 239 available_ = available; | 260 available_ = available; |
| 240 } | 261 } |
| 241 | 262 |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 252 void CopySurfaceComplete(IDirect3DSurface9* src_surface, | 273 void CopySurfaceComplete(IDirect3DSurface9* src_surface, |
| 253 IDirect3DSurface9* dest_surface); | 274 IDirect3DSurface9* dest_surface); |
| 254 | 275 |
| 255 private: | 276 private: |
| 256 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer); | 277 explicit DXVAPictureBuffer(const media::PictureBuffer& buffer); |
| 257 | 278 |
| 258 bool available_; | 279 bool available_; |
| 259 media::PictureBuffer picture_buffer_; | 280 media::PictureBuffer picture_buffer_; |
| 260 EGLSurface decoding_surface_; | 281 EGLSurface decoding_surface_; |
| 261 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_; | 282 base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_; |
| 283 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture_; | |
| 262 | 284 |
| 263 // The following |IDirect3DSurface9| interface pointers are used to hold | 285 // The following |IDirect3DSurface9| interface pointers are used to hold |
| 264 // references on the surfaces during the course of a StretchRect operation | 286 // references on the surfaces during the course of a StretchRect operation |
| 265 // to copy the source surface to the target. The references are released | 287 // to copy the source surface to the target. The references are released |
| 266 // when the StretchRect operation i.e. the copy completes. | 288 // when the StretchRect operation i.e. the copy completes. |
| 267 base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_; | 289 base::win::ScopedComPtr<IDirect3DSurface9> decoder_surface_; |
| 268 base::win::ScopedComPtr<IDirect3DSurface9> target_surface_; | 290 base::win::ScopedComPtr<IDirect3DSurface9> target_surface_; |
| 269 | 291 |
| 292 // This ID3D11Texture2D interface pointer is used to hold a reference to the | |
| 293 // decoder texture during the course of a copy operation. This reference is | |
| 294 // released when the copy completes. | |
| 295 base::win::ScopedComPtr<ID3D11Texture2D> decoder_dx11_texture_; | |
| 296 | |
| 270 // Set to true if RGB is supported by the texture. | 297 // Set to true if RGB is supported by the texture. |
| 271 // Defaults to true. | 298 // Defaults to true. |
| 272 bool use_rgb_; | 299 bool use_rgb_; |
| 273 | 300 |
| 274 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer); | 301 DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer); |
| 275 }; | 302 }; |
| 276 | 303 |
| 277 // static | 304 // static |
| 278 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer> | 305 linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer> |
| 279 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create( | 306 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create( |
| (...skipping 28 matching lines...) Expand all Loading... | |
| 308 EGLBoolean ret = eglQuerySurfacePointerANGLE( | 335 EGLBoolean ret = eglQuerySurfacePointerANGLE( |
| 309 egl_display, | 336 egl_display, |
| 310 picture_buffer->decoding_surface_, | 337 picture_buffer->decoding_surface_, |
| 311 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, | 338 EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE, |
| 312 &share_handle); | 339 &share_handle); |
| 313 | 340 |
| 314 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE, | 341 RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE, |
| 315 "Failed to query ANGLE surface pointer", | 342 "Failed to query ANGLE surface pointer", |
| 316 linked_ptr<DXVAPictureBuffer>(NULL)); | 343 linked_ptr<DXVAPictureBuffer>(NULL)); |
| 317 | 344 |
| 318 // TODO(dshwang): after moving to D3D11, use RGBA surface. crbug.com/438691 | 345 HRESULT hr = E_FAIL; |
| 319 HRESULT hr = decoder.device_->CreateTexture( | 346 if (decoder.d3d11_device_) { |
| 320 buffer.size().width(), | 347 base::win::ScopedComPtr<ID3D11Resource> resource; |
| 321 buffer.size().height(), | 348 hr = decoder.d3d11_device_->OpenSharedResource( |
| 322 1, | 349 share_handle, |
| 323 D3DUSAGE_RENDERTARGET, | 350 __uuidof(ID3D11Resource), |
| 324 use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8, | 351 reinterpret_cast<void**>(resource.Receive())); |
| 325 D3DPOOL_DEFAULT, | 352 RETURN_ON_HR_FAILURE(hr, "Failed to open shared resource", |
| 326 picture_buffer->decoding_texture_.Receive(), | 353 linked_ptr<DXVAPictureBuffer>(NULL)); |
| 327 &share_handle); | 354 hr = picture_buffer->dx11_decoding_texture_.QueryFrom(resource.get()); |
| 328 | 355 } else { |
| 356 hr = decoder.d3d9_device_ex_->CreateTexture( | |
| 357 buffer.size().width(), | |
| 358 buffer.size().height(), | |
| 359 1, | |
| 360 D3DUSAGE_RENDERTARGET, | |
| 361 use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8, | |
| 362 D3DPOOL_DEFAULT, | |
| 363 picture_buffer->decoding_texture_.Receive(), | |
| 364 &share_handle); | |
| 365 } | |
| 329 RETURN_ON_HR_FAILURE(hr, "Failed to create texture", | 366 RETURN_ON_HR_FAILURE(hr, "Failed to create texture", |
| 330 linked_ptr<DXVAPictureBuffer>(NULL)); | 367 linked_ptr<DXVAPictureBuffer>(NULL)); |
| 331 picture_buffer->use_rgb_ = !!use_rgb; | 368 picture_buffer->use_rgb_ = !!use_rgb; |
| 332 return picture_buffer; | 369 return picture_buffer; |
| 333 } | 370 } |
| 334 | 371 |
| 335 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer( | 372 DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer( |
| 336 const media::PictureBuffer& buffer) | 373 const media::PictureBuffer& buffer) |
| 337 : available_(true), | 374 : available_(true), |
| 338 picture_buffer_(buffer), | 375 picture_buffer_(buffer), |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 358 | 395 |
| 359 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { | 396 void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() { |
| 360 DCHECK(decoding_surface_); | 397 DCHECK(decoding_surface_); |
| 361 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 398 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
| 362 eglReleaseTexImage( | 399 eglReleaseTexImage( |
| 363 egl_display, | 400 egl_display, |
| 364 decoding_surface_, | 401 decoding_surface_, |
| 365 EGL_BACK_BUFFER); | 402 EGL_BACK_BUFFER); |
| 366 decoder_surface_.Release(); | 403 decoder_surface_.Release(); |
| 367 target_surface_.Release(); | 404 target_surface_.Release(); |
| 405 decoder_dx11_texture_.Release(); | |
| 368 set_available(true); | 406 set_available(true); |
| 369 } | 407 } |
| 370 | 408 |
| 371 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: | 409 bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer:: |
| 372 CopyOutputSampleDataToPictureBuffer( | 410 CopyOutputSampleDataToPictureBuffer( |
| 373 DXVAVideoDecodeAccelerator* decoder, | 411 DXVAVideoDecodeAccelerator* decoder, |
| 374 IDirect3DSurface9* dest_surface, | 412 IDirect3DSurface9* dest_surface, |
| 413 ID3D11Texture2D* dx11_texture, | |
| 375 int input_buffer_id) { | 414 int input_buffer_id) { |
| 376 DCHECK(dest_surface); | 415 DCHECK(dest_surface || dx11_texture); |
| 377 | 416 if (dx11_texture) { |
| 417 // Grab a reference on the decoder texture. This reference will be released | |
| 418 // when we receive a notification that the copy was completed or when the | |
| 419 // DXVAPictureBuffer instance is destroyed. | |
| 420 decoder_dx11_texture_ = dx11_texture; | |
| 421 decoder->CopyTexture(dx11_texture, dx11_decoding_texture_.get(), NULL, | |
| 422 id(), input_buffer_id); | |
| 423 return true; | |
| 424 } | |
| 378 D3DSURFACE_DESC surface_desc; | 425 D3DSURFACE_DESC surface_desc; |
| 379 HRESULT hr = dest_surface->GetDesc(&surface_desc); | 426 HRESULT hr = dest_surface->GetDesc(&surface_desc); |
| 380 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | 427 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); |
| 381 | 428 |
| 382 D3DSURFACE_DESC texture_desc; | 429 D3DSURFACE_DESC texture_desc; |
| 383 decoding_texture_->GetLevelDesc(0, &texture_desc); | 430 decoding_texture_->GetLevelDesc(0, &texture_desc); |
| 384 | 431 |
| 385 if (texture_desc.Width != surface_desc.Width || | 432 if (texture_desc.Width != surface_desc.Width || |
| 386 texture_desc.Height != surface_desc.Height) { | 433 texture_desc.Height != surface_desc.Height) { |
| 387 NOTREACHED() << "Decode surface of different dimension than texture"; | 434 NOTREACHED() << "Decode surface of different dimension than texture"; |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 417 IDirect3DSurface9* dest_surface) { | 464 IDirect3DSurface9* dest_surface) { |
| 418 DCHECK(!available()); | 465 DCHECK(!available()); |
| 419 | 466 |
| 420 GLint current_texture = 0; | 467 GLint current_texture = 0; |
| 421 glGetIntegerv(GL_TEXTURE_BINDING_2D, ¤t_texture); | 468 glGetIntegerv(GL_TEXTURE_BINDING_2D, ¤t_texture); |
| 422 | 469 |
| 423 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id()); | 470 glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id()); |
| 424 | 471 |
| 425 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 472 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
| 426 | 473 |
| 427 DCHECK_EQ(src_surface, decoder_surface_.get()); | 474 if (src_surface && dest_surface) { |
| 428 DCHECK_EQ(dest_surface, target_surface_.get()); | 475 DCHECK_EQ(src_surface, decoder_surface_.get()); |
| 429 | 476 DCHECK_EQ(dest_surface, target_surface_.get()); |
| 430 decoder_surface_.Release(); | 477 decoder_surface_.Release(); |
| 431 target_surface_.Release(); | 478 target_surface_.Release(); |
| 479 } else { | |
| 480 DCHECK(decoder_dx11_texture_.get()); | |
| 481 decoder_dx11_texture_.Release(); | |
| 482 } | |
| 432 | 483 |
| 433 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 484 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
| 434 eglBindTexImage( | 485 eglBindTexImage( |
| 435 egl_display, | 486 egl_display, |
| 436 decoding_surface_, | 487 decoding_surface_, |
| 437 EGL_BACK_BUFFER); | 488 EGL_BACK_BUFFER); |
| 438 | 489 |
| 439 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); | 490 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
| 440 glBindTexture(GL_TEXTURE_2D, current_texture); | 491 glBindTexture(GL_TEXTURE_2D, current_texture); |
| 441 } | 492 } |
| 442 | 493 |
| 443 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( | 494 DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo( |
| 444 int32 buffer_id, IMFSample* sample) | 495 int32 buffer_id, IMFSample* sample) |
| 445 : input_buffer_id(buffer_id), | 496 : input_buffer_id(buffer_id), |
| 446 picture_buffer_id(-1) { | 497 picture_buffer_id(-1) { |
| 447 output_sample.Attach(sample); | 498 output_sample.Attach(sample); |
| 448 } | 499 } |
| 449 | 500 |
| 450 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {} | 501 DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {} |
| 451 | 502 |
| 452 // static | |
| 453 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { | |
| 454 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); | |
| 455 | |
| 456 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); | |
| 457 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); | |
| 458 | |
| 459 D3DPRESENT_PARAMETERS present_params = {0}; | |
| 460 present_params.BackBufferWidth = 1; | |
| 461 present_params.BackBufferHeight = 1; | |
| 462 present_params.BackBufferFormat = D3DFMT_UNKNOWN; | |
| 463 present_params.BackBufferCount = 1; | |
| 464 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; | |
| 465 present_params.hDeviceWindow = ::GetShellWindow(); | |
| 466 present_params.Windowed = TRUE; | |
| 467 present_params.Flags = D3DPRESENTFLAG_VIDEO; | |
| 468 present_params.FullScreen_RefreshRateInHz = 0; | |
| 469 present_params.PresentationInterval = 0; | |
| 470 | |
| 471 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT, | |
| 472 D3DDEVTYPE_HAL, | |
| 473 ::GetShellWindow(), | |
| 474 D3DCREATE_FPU_PRESERVE | | |
| 475 D3DCREATE_SOFTWARE_VERTEXPROCESSING | | |
| 476 D3DCREATE_DISABLE_PSGP_THREADING | | |
| 477 D3DCREATE_MULTITHREADED, | |
| 478 &present_params, | |
| 479 NULL, | |
| 480 device_.Receive()); | |
| 481 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); | |
| 482 | |
| 483 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, | |
| 484 device_manager_.Receive()); | |
| 485 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); | |
| 486 | |
| 487 hr = device_manager_->ResetDevice(device_.get(), dev_manager_reset_token_); | |
| 488 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | |
| 489 | |
| 490 hr = device_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); | |
| 491 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); | |
| 492 // Ensure query_ API works (to avoid an infinite loop later in | |
| 493 // CopyOutputSampleDataToPictureBuffer). | |
| 494 hr = query_->Issue(D3DISSUE_END); | |
| 495 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); | |
| 496 return true; | |
| 497 } | |
| 498 | |
| 499 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( | 503 DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( |
| 500 const base::Callback<bool(void)>& make_context_current) | 504 const base::Callback<bool(void)>& make_context_current, |
| 505 gfx::GLContext* gl_context) | |
| 501 : client_(NULL), | 506 : client_(NULL), |
| 502 dev_manager_reset_token_(0), | 507 dev_manager_reset_token_(0), |
| 508 dx11_dev_manager_reset_token_(0), | |
| 503 egl_config_(NULL), | 509 egl_config_(NULL), |
| 504 state_(kUninitialized), | 510 state_(kUninitialized), |
| 505 pictures_requested_(false), | 511 pictures_requested_(false), |
| 506 inputs_before_decode_(0), | 512 inputs_before_decode_(0), |
| 507 sent_drain_message_(false), | 513 sent_drain_message_(false), |
| 508 make_context_current_(make_context_current), | 514 make_context_current_(make_context_current), |
| 509 codec_(media::kUnknownVideoCodec), | 515 codec_(media::kUnknownVideoCodec), |
| 510 decoder_thread_("DXVAVideoDecoderThread"), | 516 decoder_thread_("DXVAVideoDecoderThread"), |
| 511 weak_this_factory_(this), | 517 weak_this_factory_(this), |
| 512 weak_ptr_(weak_this_factory_.GetWeakPtr()), | 518 weak_ptr_(weak_this_factory_.GetWeakPtr()), |
| 513 pending_flush_(false) { | 519 pending_flush_(false), |
| 520 use_dx11_(false), | |
| 521 dx11_video_format_converter_media_type_needs_init_(true), | |
| 522 gl_context_(gl_context) { | |
| 514 memset(&input_stream_info_, 0, sizeof(input_stream_info_)); | 523 memset(&input_stream_info_, 0, sizeof(input_stream_info_)); |
| 515 memset(&output_stream_info_, 0, sizeof(output_stream_info_)); | 524 memset(&output_stream_info_, 0, sizeof(output_stream_info_)); |
| 516 } | 525 } |
| 517 | 526 |
| 518 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() { | 527 DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() { |
| 519 client_ = NULL; | 528 client_ = NULL; |
| 520 } | 529 } |
| 521 | 530 |
| 522 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, | 531 bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile, |
| 523 Client* client) { | 532 Client* client) { |
| 524 client_ = client; | 533 client_ = client; |
| 525 | 534 |
| 526 main_thread_task_runner_ = base::MessageLoop::current()->task_runner(); | 535 main_thread_task_runner_ = base::MessageLoop::current()->task_runner(); |
| 527 | 536 |
| 528 // Not all versions of Windows 7 and later include Media Foundation DLLs. | |
| 529 // Instead of crashing while delay loading the DLL when calling MFStartup() | |
| 530 // below, probe whether we can successfully load the DLL now. | |
| 531 // | |
| 532 // See http://crbug.com/339678 for details. | |
| 533 HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll"); | |
| 534 RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", false); | |
| 535 | |
| 536 if (profile != media::H264PROFILE_BASELINE && | 537 if (profile != media::H264PROFILE_BASELINE && |
| 537 profile != media::H264PROFILE_MAIN && | 538 profile != media::H264PROFILE_MAIN && |
| 538 profile != media::H264PROFILE_HIGH && | 539 profile != media::H264PROFILE_HIGH && |
| 539 profile != media::VP8PROFILE_ANY && | 540 profile != media::VP8PROFILE_ANY && |
| 540 profile != media::VP9PROFILE_ANY) { | 541 profile != media::VP9PROFILE_ANY) { |
| 541 RETURN_AND_NOTIFY_ON_FAILURE(false, | 542 RETURN_AND_NOTIFY_ON_FAILURE(false, |
| 542 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false); | 543 "Unsupported h.264, vp8, or vp9 profile", PLATFORM_FAILURE, false); |
| 543 } | 544 } |
| 544 | 545 |
| 546 // Not all versions of Windows 7 and later include Media Foundation DLLs. | |
| 547 // Instead of crashing while delay loading the DLL when calling MFStartup() | |
| 548 // below, probe whether we can successfully load the DLL now. | |
| 549 // See http://crbug.com/339678 for details. | |
| 550 HMODULE dxgi_manager_dll = NULL; | |
| 551 if ((dxgi_manager_dll = ::GetModuleHandle(L"MFPlat.dll")) == NULL) { | |
| 552 HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll"); | |
| 553 RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", | |
| 554 false); | |
| 555 // On Windows 8+ mfplat.dll provides the MFCreateDXGIDeviceManager API. | |
| 556 // On Windows 7 mshtmlmedia.dll provides it. | |
| 557 dxgi_manager_dll = mfplat_dll; | |
| 558 } | |
| 559 | |
| 560 // TODO(ananta) | |
| 561 // The code below works, as in we can create the DX11 device manager for | |
| 562 // Windows 7. However the IMFTransform we use for texture conversion and | |
| 563 // copy does not exist on Windows 7. Look into an alternate approach | |
| 564 // and enable the code below. | |
| 565 #if defined ENABLE_DX11_FOR_WIN7 | |
| 566 if ((base::win::GetVersion() == base::win::VERSION_WIN7) && | |
| 567 ((dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll")) == NULL)) { | |
| 568 HMODULE mshtml_media_dll = ::LoadLibrary(L"mshtmlmedia.dll"); | |
| 569 if (mshtml_media_dll) | |
| 570 dxgi_manager_dll = mshtml_media_dll; | |
| 571 } | |
| 572 #endif | |
| 573 // If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 | |
| 574 // decoding. | |
| 575 if (dxgi_manager_dll && !create_dxgi_device_manager_) { | |
| 576 create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( | |
| 577 ::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); | |
| 578 } | |
| 579 | |
| 545 RETURN_AND_NOTIFY_ON_FAILURE( | 580 RETURN_AND_NOTIFY_ON_FAILURE( |
| 546 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, | 581 gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle, |
| 547 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", | 582 "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable", |
| 548 PLATFORM_FAILURE, | 583 PLATFORM_FAILURE, |
| 549 false); | 584 false); |
| 550 | 585 |
| 551 State state = GetState(); | 586 State state = GetState(); |
| 552 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), | 587 RETURN_AND_NOTIFY_ON_FAILURE((state == kUninitialized), |
| 553 "Initialize: invalid state: " << state, ILLEGAL_STATE, false); | 588 "Initialize: invalid state: " << state, ILLEGAL_STATE, false); |
| 554 | 589 |
| 555 HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); | 590 HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); |
| 556 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE, | 591 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE, |
| 557 false); | 592 false); |
| 558 | 593 |
| 559 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), | |
| 560 "Failed to initialize D3D device and manager", | |
| 561 PLATFORM_FAILURE, | |
| 562 false); | |
| 563 | |
| 564 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile), | 594 RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile), |
| 565 "Failed to initialize decoder", PLATFORM_FAILURE, false); | 595 "Failed to initialize decoder", PLATFORM_FAILURE, false); |
| 566 | 596 |
| 567 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), | 597 RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(), |
| 568 "Failed to get input/output stream info.", PLATFORM_FAILURE, false); | 598 "Failed to get input/output stream info.", PLATFORM_FAILURE, false); |
| 569 | 599 |
| 570 RETURN_AND_NOTIFY_ON_FAILURE( | 600 RETURN_AND_NOTIFY_ON_FAILURE( |
| 571 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), | 601 SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0), |
| 572 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", | 602 "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed", |
| 573 PLATFORM_FAILURE, false); | 603 PLATFORM_FAILURE, false); |
| 574 | 604 |
| 575 RETURN_AND_NOTIFY_ON_FAILURE( | 605 RETURN_AND_NOTIFY_ON_FAILURE( |
| 576 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), | 606 SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0), |
| 577 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", | 607 "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed", |
| 578 PLATFORM_FAILURE, false); | 608 PLATFORM_FAILURE, false); |
| 579 | 609 |
| 580 SetState(kNormal); | 610 SetState(kNormal); |
| 581 | 611 |
| 582 StartDecoderThread(); | 612 StartDecoderThread(); |
| 583 return true; | 613 return true; |
| 584 } | 614 } |
| 585 | 615 |
| 616 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { | |
| 617 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); | |
| 618 | |
| 619 HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); | |
| 620 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); | |
| 621 | |
| 622 D3DPRESENT_PARAMETERS present_params = {0}; | |
| 623 present_params.BackBufferWidth = 1; | |
| 624 present_params.BackBufferHeight = 1; | |
| 625 present_params.BackBufferFormat = D3DFMT_UNKNOWN; | |
| 626 present_params.BackBufferCount = 1; | |
| 627 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; | |
| 628 present_params.hDeviceWindow = ::GetShellWindow(); | |
| 629 present_params.Windowed = TRUE; | |
| 630 present_params.Flags = D3DPRESENTFLAG_VIDEO; | |
| 631 present_params.FullScreen_RefreshRateInHz = 0; | |
| 632 present_params.PresentationInterval = 0; | |
| 633 | |
| 634 hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT, | |
| 635 D3DDEVTYPE_HAL, | |
| 636 ::GetShellWindow(), | |
| 637 D3DCREATE_FPU_PRESERVE | | |
| 638 D3DCREATE_SOFTWARE_VERTEXPROCESSING | | |
| 639 D3DCREATE_DISABLE_PSGP_THREADING | | |
| 640 D3DCREATE_MULTITHREADED, | |
| 641 &present_params, | |
| 642 NULL, | |
| 643 d3d9_device_ex_.Receive()); | |
| 644 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); | |
| 645 | |
| 646 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, | |
| 647 device_manager_.Receive()); | |
| 648 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); | |
| 649 | |
| 650 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(), | |
| 651 dev_manager_reset_token_); | |
| 652 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | |
| 653 | |
| 654 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); | |
| 655 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); | |
| 656 // Ensure query_ API works (to avoid an infinite loop later in | |
| 657 // CopyOutputSampleDataToPictureBuffer). | |
| 658 hr = query_->Issue(D3DISSUE_END); | |
| 659 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); | |
| 660 return true; | |
| 661 } | |
| 662 | |
| 663 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { | |
| 664 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, | |
| 665 d3d11_device_manager_.Receive()); | |
| 666 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); | |
| 667 | |
| 668 // This array defines the set of DirectX hardware feature levels we support. | |
| 669 // The ordering MUST be preserved. All applications are assumed to support | |
| 670 // 9.1 unless otherwise stated by the application, which is not our case. | |
| 671 D3D_FEATURE_LEVEL feature_levels[] = { | |
| 672 D3D_FEATURE_LEVEL_11_1, | |
| 673 D3D_FEATURE_LEVEL_11_0, | |
| 674 D3D_FEATURE_LEVEL_10_1, | |
| 675 D3D_FEATURE_LEVEL_10_0, | |
| 676 D3D_FEATURE_LEVEL_9_3, | |
| 677 D3D_FEATURE_LEVEL_9_2, | |
| 678 D3D_FEATURE_LEVEL_9_1 }; | |
| 679 | |
| 680 D3D_FEATURE_LEVEL feature_level_out = D3D_FEATURE_LEVEL_11_0; | |
| 681 hr = D3D11CreateDevice(NULL, | |
| 682 D3D_DRIVER_TYPE_HARDWARE, | |
| 683 NULL, | |
| 684 D3D11_CREATE_DEVICE_VIDEO_SUPPORT, | |
| 685 feature_levels, | |
| 686 arraysize(feature_levels), | |
| 687 D3D11_SDK_VERSION, | |
| 688 d3d11_device_.Receive(), | |
| 689 &feature_level_out, | |
| 690 d3d11_device_context_.Receive()); | |
| 691 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false); | |
| 692 | |
| 693 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), | |
| 694 dx11_dev_manager_reset_token_); | |
| 695 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | |
| 696 | |
| 697 hr = ::CoCreateInstance( | |
| 698 CLSID_VideoProcessorMFT, | |
| 699 NULL, | |
| 700 CLSCTX_INPROC_SERVER, | |
| 701 IID_IMFTransform, | |
| 702 reinterpret_cast<void**>(video_format_converter_mft_.Receive())); | |
| 703 | |
| 704 if (FAILED(hr)) { | |
| 705 base::debug::Alias(&hr); | |
| 706 // TODO(ananta) | |
| 707 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 708 // stablizes. | |
| 709 CHECK(false); | |
| 710 } | |
| 711 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); | |
| 712 return true; | |
| 713 } | |
| 714 | |
| 586 void DXVAVideoDecodeAccelerator::Decode( | 715 void DXVAVideoDecodeAccelerator::Decode( |
| 587 const media::BitstreamBuffer& bitstream_buffer) { | 716 const media::BitstreamBuffer& bitstream_buffer) { |
| 588 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 717 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
| 589 | 718 |
| 590 State state = GetState(); | 719 State state = GetState(); |
| 591 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped || | 720 RETURN_AND_NOTIFY_ON_FAILURE((state == kNormal || state == kStopped || |
| 592 state == kFlushing), | 721 state == kFlushing), |
| 593 "Invalid state: " << state, ILLEGAL_STATE,); | 722 "Invalid state: " << state, ILLEGAL_STATE,); |
| 594 | 723 |
| 595 base::win::ScopedComPtr<IMFSample> sample; | 724 base::win::ScopedComPtr<IMFSample> sample; |
| (...skipping 29 matching lines...) Expand all Loading... | |
| 625 ++buffer_index) { | 754 ++buffer_index) { |
| 626 linked_ptr<DXVAPictureBuffer> picture_buffer = | 755 linked_ptr<DXVAPictureBuffer> picture_buffer = |
| 627 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); | 756 DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_); |
| 628 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), | 757 RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(), |
| 629 "Failed to allocate picture buffer", PLATFORM_FAILURE,); | 758 "Failed to allocate picture buffer", PLATFORM_FAILURE,); |
| 630 | 759 |
| 631 bool inserted = output_picture_buffers_.insert(std::make_pair( | 760 bool inserted = output_picture_buffers_.insert(std::make_pair( |
| 632 buffers[buffer_index].id(), picture_buffer)).second; | 761 buffers[buffer_index].id(), picture_buffer)).second; |
| 633 DCHECK(inserted); | 762 DCHECK(inserted); |
| 634 } | 763 } |
| 764 | |
| 635 ProcessPendingSamples(); | 765 ProcessPendingSamples(); |
| 636 if (pending_flush_) { | 766 if (pending_flush_) { |
| 637 decoder_thread_task_runner_->PostTask( | 767 decoder_thread_task_runner_->PostTask( |
| 638 FROM_HERE, | 768 FROM_HERE, |
| 639 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 769 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
| 640 base::Unretained(this))); | 770 base::Unretained(this))); |
| 641 } | 771 } |
| 642 } | 772 } |
| 643 | 773 |
| 644 void DXVAVideoDecodeAccelerator::ReusePictureBuffer( | 774 void DXVAVideoDecodeAccelerator::ReusePictureBuffer( |
| (...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 833 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false); | 963 RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false); |
| 834 | 964 |
| 835 hr = factory->CreateInstance(NULL, | 965 hr = factory->CreateInstance(NULL, |
| 836 __uuidof(IMFTransform), | 966 __uuidof(IMFTransform), |
| 837 reinterpret_cast<void**>(decoder_.Receive())); | 967 reinterpret_cast<void**>(decoder_.Receive())); |
| 838 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); | 968 RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false); |
| 839 | 969 |
| 840 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), | 970 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), |
| 841 "Failed to check decoder DXVA support", false); | 971 "Failed to check decoder DXVA support", false); |
| 842 | 972 |
| 973 ULONG_PTR device_manager_to_use = NULL; | |
| 974 if (use_dx11_) { | |
| 975 CHECK(create_dxgi_device_manager_); | |
| 976 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), | |
| 977 "Failed to initialize DX11 device and manager", | |
| 978 PLATFORM_FAILURE, | |
| 979 false); | |
| 980 device_manager_to_use = reinterpret_cast<ULONG_PTR>( | |
| 981 d3d11_device_manager_.get()); | |
| 982 } else { | |
| 983 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), | |
| 984 "Failed to initialize D3D device and manager", | |
| 985 PLATFORM_FAILURE, | |
| 986 false); | |
| 987 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); | |
| 988 } | |
| 989 | |
| 843 hr = decoder_->ProcessMessage( | 990 hr = decoder_->ProcessMessage( |
| 844 MFT_MESSAGE_SET_D3D_MANAGER, | 991 MFT_MESSAGE_SET_D3D_MANAGER, |
| 845 reinterpret_cast<ULONG_PTR>(device_manager_.get())); | 992 device_manager_to_use); |
| 846 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); | 993 if (use_dx11_) { |
| 994 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); | |
| 995 } else { | |
| 996 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); | |
| 997 } | |
| 847 | 998 |
| 848 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); | 999 EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay(); |
| 849 | 1000 |
| 850 EGLint config_attribs[] = { | 1001 EGLint config_attribs[] = { |
| 851 EGL_BUFFER_SIZE, 32, | 1002 EGL_BUFFER_SIZE, 32, |
| 852 EGL_RED_SIZE, 8, | 1003 EGL_RED_SIZE, 8, |
| 853 EGL_GREEN_SIZE, 8, | 1004 EGL_GREEN_SIZE, 8, |
| 854 EGL_BLUE_SIZE, 8, | 1005 EGL_BLUE_SIZE, 8, |
| 855 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, | 1006 EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, |
| 856 EGL_ALPHA_SIZE, 0, | 1007 EGL_ALPHA_SIZE, 0, |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 883 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE); | 1034 hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE); |
| 884 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false); | 1035 RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false); |
| 885 } | 1036 } |
| 886 | 1037 |
| 887 hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE); | 1038 hr = attributes->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE); |
| 888 if (SUCCEEDED(hr)) { | 1039 if (SUCCEEDED(hr)) { |
| 889 DVLOG(1) << "Successfully set Low latency mode on decoder."; | 1040 DVLOG(1) << "Successfully set Low latency mode on decoder."; |
| 890 } else { | 1041 } else { |
| 891 DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr; | 1042 DVLOG(1) << "Failed to set Low latency mode on decoder. Error: " << hr; |
| 892 } | 1043 } |
| 1044 | |
| 1045 // The decoder should use DX11 iff | |
| 1046 // 1. The underlying H/W decoder supports it. | |
| 1047 // 2. We have a pointer to the MFCreateDXGIDeviceManager function needed for | |
| 1048 // this. This should always be true for Windows 8+. | |
| 1049 // 3. ANGLE is using DX11. | |
| 1050 DCHECK(gl_context_); | |
| 1051 if (create_dxgi_device_manager_ && | |
| 1052 (gl_context_->GetGLRenderer().find("Direct3D11") != | |
| 1053 std::string::npos)) { | |
| 1054 UINT32 dx11_aware = 0; | |
| 1055 attributes->GetUINT32(MF_SA_D3D11_AWARE, &dx11_aware); | |
| 1056 use_dx11_ = !!dx11_aware; | |
| 1057 } | |
| 893 return true; | 1058 return true; |
| 894 } | 1059 } |
| 895 | 1060 |
| 896 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() { | 1061 bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() { |
| 897 RETURN_ON_FAILURE(SetDecoderInputMediaType(), | 1062 RETURN_ON_FAILURE(SetDecoderInputMediaType(), |
| 898 "Failed to set decoder input media type", false); | 1063 "Failed to set decoder input media type", false); |
| 899 return SetDecoderOutputMediaType(MFVideoFormat_NV12); | 1064 return SetDecoderOutputMediaType(MFVideoFormat_NV12); |
| 900 } | 1065 } |
| 901 | 1066 |
| 902 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() { | 1067 bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() { |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1047 | 1212 |
| 1048 inputs_before_decode_ = 0; | 1213 inputs_before_decode_ = 0; |
| 1049 | 1214 |
| 1050 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), | 1215 RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample), |
| 1051 "Failed to process output sample.", PLATFORM_FAILURE,); | 1216 "Failed to process output sample.", PLATFORM_FAILURE,); |
| 1052 } | 1217 } |
| 1053 | 1218 |
| 1054 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { | 1219 bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) { |
| 1055 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); | 1220 RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false); |
| 1056 | 1221 |
| 1057 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
| 1058 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | |
| 1059 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | |
| 1060 | |
| 1061 base::win::ScopedComPtr<IDirect3DSurface9> surface; | |
| 1062 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | |
| 1063 IID_PPV_ARGS(surface.Receive())); | |
| 1064 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", | |
| 1065 false); | |
| 1066 | |
| 1067 LONGLONG input_buffer_id = 0; | 1222 LONGLONG input_buffer_id = 0; |
| 1068 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), | 1223 RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id), |
| 1069 "Failed to get input buffer id associated with sample", | 1224 "Failed to get input buffer id associated with sample", |
| 1070 false); | 1225 false); |
| 1071 | 1226 |
| 1072 { | 1227 { |
| 1073 base::AutoLock lock(decoder_lock_); | 1228 base::AutoLock lock(decoder_lock_); |
| 1074 DCHECK(pending_output_samples_.empty()); | 1229 DCHECK(pending_output_samples_.empty()); |
| 1075 pending_output_samples_.push_back( | 1230 pending_output_samples_.push_back( |
| 1076 PendingSampleInfo(input_buffer_id, sample)); | 1231 PendingSampleInfo(input_buffer_id, sample)); |
| 1077 } | 1232 } |
| 1078 | 1233 |
| 1079 if (pictures_requested_) { | 1234 if (pictures_requested_) { |
| 1080 DVLOG(1) << "Waiting for picture slots from the client."; | 1235 DVLOG(1) << "Waiting for picture slots from the client."; |
| 1081 main_thread_task_runner_->PostTask( | 1236 main_thread_task_runner_->PostTask( |
| 1082 FROM_HERE, | 1237 FROM_HERE, |
| 1083 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples, | 1238 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples, |
| 1084 weak_this_factory_.GetWeakPtr())); | 1239 weak_this_factory_.GetWeakPtr())); |
| 1085 return true; | 1240 return true; |
| 1086 } | 1241 } |
| 1087 | 1242 |
| 1088 // We only read the surface description, which contains its width/height when | 1243 int width = 0; |
| 1089 // we need the picture buffers from the client. Once we have those, then they | 1244 int height = 0; |
| 1090 // are reused. | 1245 if (!GetVideoFrameDimensions(sample, &width, &height)) { |
| 1091 D3DSURFACE_DESC surface_desc; | 1246 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", |
| 1092 hr = surface->GetDesc(&surface_desc); | 1247 false); |
| 1093 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | 1248 } |
| 1094 | 1249 |
| 1095 // Go ahead and request picture buffers. | 1250 // Go ahead and request picture buffers. |
| 1096 main_thread_task_runner_->PostTask( | 1251 main_thread_task_runner_->PostTask( |
| 1097 FROM_HERE, | 1252 FROM_HERE, |
| 1098 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 1253 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
| 1099 weak_this_factory_.GetWeakPtr(), | 1254 weak_this_factory_.GetWeakPtr(), |
| 1100 surface_desc.Width, | 1255 width, |
| 1101 surface_desc.Height)); | 1256 height)); |
| 1102 | 1257 |
| 1103 pictures_requested_ = true; | 1258 pictures_requested_ = true; |
| 1104 return true; | 1259 return true; |
| 1105 } | 1260 } |
| 1106 | 1261 |
| 1107 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { | 1262 void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { |
| 1108 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1263 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
| 1109 | 1264 |
| 1110 if (!output_picture_buffers_.size()) | 1265 if (!output_picture_buffers_.size()) |
| 1111 return; | 1266 return; |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 1123 PendingSampleInfo* pending_sample = NULL; | 1278 PendingSampleInfo* pending_sample = NULL; |
| 1124 { | 1279 { |
| 1125 base::AutoLock lock(decoder_lock_); | 1280 base::AutoLock lock(decoder_lock_); |
| 1126 | 1281 |
| 1127 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 1282 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
| 1128 if (sample_info.picture_buffer_id != -1) | 1283 if (sample_info.picture_buffer_id != -1) |
| 1129 continue; | 1284 continue; |
| 1130 pending_sample = &sample_info; | 1285 pending_sample = &sample_info; |
| 1131 } | 1286 } |
| 1132 | 1287 |
| 1288 int width = 0; | |
| 1289 int height = 0; | |
| 1290 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), | |
| 1291 &width, &height)) { | |
| 1292 RETURN_AND_NOTIFY_ON_FAILURE(false, | |
| 1293 "Failed to get D3D surface from output sample", PLATFORM_FAILURE,); | |
| 1294 } | |
| 1295 | |
| 1296 if (width != index->second->size().width() || | |
| 1297 height != index->second->size().height()) { | |
| 1298 HandleResolutionChanged(width, height); | |
| 1299 return; | |
| 1300 } | |
| 1301 | |
| 1133 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 1302 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
| 1134 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( | 1303 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( |
| 1135 0, output_buffer.Receive()); | 1304 0, output_buffer.Receive()); |
| 1136 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 1305 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 1137 hr, "Failed to get buffer from output sample", PLATFORM_FAILURE,); | 1306 "Failed to get buffer from output sample", PLATFORM_FAILURE,); |
| 1138 | 1307 |
| 1139 base::win::ScopedComPtr<IDirect3DSurface9> surface; | 1308 base::win::ScopedComPtr<IDirect3DSurface9> surface; |
| 1140 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | 1309 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
| 1141 IID_PPV_ARGS(surface.Receive())); | |
| 1142 RETURN_AND_NOTIFY_ON_HR_FAILURE( | |
| 1143 hr, "Failed to get D3D surface from output sample", | |
| 1144 PLATFORM_FAILURE,); | |
| 1145 | 1310 |
| 1146 D3DSURFACE_DESC surface_desc; | 1311 if (use_dx11_) { |
| 1147 hr = surface->GetDesc(&surface_desc); | 1312 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
| 1148 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 1313 hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
| 1149 hr, "Failed to get surface description", PLATFORM_FAILURE,); | 1314 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 1150 | 1315 "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE,); |
| 1151 if (surface_desc.Width != | 1316 hr = dxgi_buffer->GetResource( |
| 1152 static_cast<uint32>(index->second->size().width()) || | 1317 __uuidof(ID3D11Texture2D), |
| 1153 surface_desc.Height != | 1318 reinterpret_cast<void**>(d3d11_texture.Receive())); |
| 1154 static_cast<uint32>(index->second->size().height())) { | 1319 } else { |
| 1155 HandleResolutionChanged(surface_desc.Width, surface_desc.Height); | 1320 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, |
| 1156 return; | 1321 IID_PPV_ARGS(surface.Receive())); |
| 1157 } | 1322 } |
| 1323 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1324 "Failed to get surface from output sample", PLATFORM_FAILURE,); | |
| 1158 | 1325 |
| 1159 pending_sample->picture_buffer_id = index->second->id(); | 1326 pending_sample->picture_buffer_id = index->second->id(); |
| 1160 | 1327 |
| 1161 RETURN_AND_NOTIFY_ON_FAILURE( | 1328 RETURN_AND_NOTIFY_ON_FAILURE( |
| 1162 index->second->CopyOutputSampleDataToPictureBuffer( | 1329 index->second->CopyOutputSampleDataToPictureBuffer( |
| 1163 this, | 1330 this, |
| 1164 surface.get(), | 1331 surface.get(), |
| 1332 d3d11_texture.get(), | |
| 1165 pending_sample->input_buffer_id), | 1333 pending_sample->input_buffer_id), |
| 1166 "Failed to copy output sample", PLATFORM_FAILURE, ); | 1334 "Failed to copy output sample", PLATFORM_FAILURE,); |
| 1167 | 1335 |
| 1168 index->second->set_available(false); | 1336 index->second->set_available(false); |
| 1169 } | 1337 } |
| 1170 } | 1338 } |
| 1171 } | 1339 } |
| 1172 | 1340 |
| 1173 void DXVAVideoDecodeAccelerator::StopOnError( | 1341 void DXVAVideoDecodeAccelerator::StopOnError( |
| 1174 media::VideoDecodeAccelerator::Error error) { | 1342 media::VideoDecodeAccelerator::Error error) { |
| 1175 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | 1343 if (!main_thread_task_runner_->BelongsToCurrentThread()) { |
| 1176 main_thread_task_runner_->PostTask( | 1344 main_thread_task_runner_->PostTask( |
| (...skipping 16 matching lines...) Expand all Loading... | |
| 1193 void DXVAVideoDecodeAccelerator::Invalidate() { | 1361 void DXVAVideoDecodeAccelerator::Invalidate() { |
| 1194 if (GetState() == kUninitialized) | 1362 if (GetState() == kUninitialized) |
| 1195 return; | 1363 return; |
| 1196 decoder_thread_.Stop(); | 1364 decoder_thread_.Stop(); |
| 1197 weak_this_factory_.InvalidateWeakPtrs(); | 1365 weak_this_factory_.InvalidateWeakPtrs(); |
| 1198 output_picture_buffers_.clear(); | 1366 output_picture_buffers_.clear(); |
| 1199 stale_output_picture_buffers_.clear(); | 1367 stale_output_picture_buffers_.clear(); |
| 1200 pending_output_samples_.clear(); | 1368 pending_output_samples_.clear(); |
| 1201 pending_input_buffers_.clear(); | 1369 pending_input_buffers_.clear(); |
| 1202 decoder_.Release(); | 1370 decoder_.Release(); |
| 1371 if (video_format_converter_mft_.get()) { | |
| 1372 video_format_converter_mft_->ProcessMessage( | |
| 1373 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); | |
| 1374 video_format_converter_mft_.Release(); | |
| 1375 } | |
| 1203 MFShutdown(); | 1376 MFShutdown(); |
| 1377 dx11_video_format_converter_media_type_needs_init_ = true; | |
| 1204 SetState(kUninitialized); | 1378 SetState(kUninitialized); |
| 1205 } | 1379 } |
| 1206 | 1380 |
| 1207 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) { | 1381 void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) { |
| 1208 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 1382 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
| 1209 if (client_) | 1383 if (client_) |
| 1210 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | 1384 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); |
| 1211 } | 1385 } |
| 1212 | 1386 |
| 1213 void DXVAVideoDecodeAccelerator::NotifyFlushDone() { | 1387 void DXVAVideoDecodeAccelerator::NotifyFlushDone() { |
| (...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1418 // http://code.google.com/p/chromium/issues/detail?id=150925 | 1592 // http://code.google.com/p/chromium/issues/detail?id=150925 |
| 1419 main_thread_task_runner_->PostTask( | 1593 main_thread_task_runner_->PostTask( |
| 1420 FROM_HERE, | 1594 FROM_HERE, |
| 1421 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, | 1595 base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, |
| 1422 weak_this_factory_.GetWeakPtr(), | 1596 weak_this_factory_.GetWeakPtr(), |
| 1423 input_buffer_id)); | 1597 input_buffer_id)); |
| 1424 } | 1598 } |
| 1425 | 1599 |
| 1426 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, | 1600 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, |
| 1427 int height) { | 1601 int height) { |
| 1602 dx11_video_format_converter_media_type_needs_init_ = true; | |
| 1603 | |
| 1428 main_thread_task_runner_->PostTask( | 1604 main_thread_task_runner_->PostTask( |
| 1429 FROM_HERE, | 1605 FROM_HERE, |
| 1430 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, | 1606 base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, |
| 1431 weak_this_factory_.GetWeakPtr())); | 1607 weak_this_factory_.GetWeakPtr())); |
| 1432 | 1608 |
| 1433 main_thread_task_runner_->PostTask( | 1609 main_thread_task_runner_->PostTask( |
| 1434 FROM_HERE, | 1610 FROM_HERE, |
| 1435 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 1611 base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
| 1436 weak_this_factory_.GetWeakPtr(), | 1612 weak_this_factory_.GetWeakPtr(), |
| 1437 width, | 1613 width, |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1511 FROM_HERE, | 1687 FROM_HERE, |
| 1512 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, | 1688 base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, |
| 1513 base::Unretained(this), | 1689 base::Unretained(this), |
| 1514 src_surface, | 1690 src_surface, |
| 1515 dest_surface, | 1691 dest_surface, |
| 1516 picture_buffer_id, | 1692 picture_buffer_id, |
| 1517 input_buffer_id)); | 1693 input_buffer_id)); |
| 1518 return; | 1694 return; |
| 1519 } | 1695 } |
| 1520 | 1696 |
| 1521 HRESULT hr = device_->StretchRect(src_surface, NULL, dest_surface, | 1697 HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, |
| 1522 NULL, D3DTEXF_NONE); | 1698 NULL, D3DTEXF_NONE); |
| 1523 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",); | 1699 RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",); |
| 1524 | 1700 |
| 1525 // Ideally, this should be done immediately before the draw call that uses | 1701 // Ideally, this should be done immediately before the draw call that uses |
| 1526 // the texture. Flush it once here though. | 1702 // the texture. Flush it once here though. |
| 1527 hr = query_->Issue(D3DISSUE_END); | 1703 hr = query_->Issue(D3DISSUE_END); |
| 1528 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",); | 1704 RETURN_ON_HR_FAILURE(hr, "Failed to issue END",); |
| 1529 | 1705 |
| 1530 // Flush the decoder device to ensure that the decoded frame is copied to the | 1706 // Flush the decoder device to ensure that the decoded frame is copied to the |
| 1531 // target surface. | 1707 // target surface. |
| 1532 decoder_thread_task_runner_->PostDelayedTask( | 1708 decoder_thread_task_runner_->PostDelayedTask( |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1583 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, | 1759 base::Bind(&DXVAVideoDecodeAccelerator::FlushInternal, |
| 1584 base::Unretained(this))); | 1760 base::Unretained(this))); |
| 1585 return; | 1761 return; |
| 1586 } | 1762 } |
| 1587 decoder_thread_task_runner_->PostTask( | 1763 decoder_thread_task_runner_->PostTask( |
| 1588 FROM_HERE, | 1764 FROM_HERE, |
| 1589 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 1765 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
| 1590 base::Unretained(this))); | 1766 base::Unretained(this))); |
| 1591 } | 1767 } |
| 1592 | 1768 |
| 1769 void DXVAVideoDecodeAccelerator::CopyTexture(ID3D11Texture2D* src_texture, | |
| 1770 ID3D11Texture2D* dest_texture, | |
| 1771 IMFSample* video_frame, | |
| 1772 int picture_buffer_id, | |
| 1773 int input_buffer_id) { | |
| 1774 HRESULT hr = E_FAIL; | |
| 1775 | |
| 1776 DCHECK(use_dx11_); | |
| 1777 | |
| 1778 if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { | |
| 1779 // The media foundation H.264 decoder outputs YUV12 textures which we | |
| 1780 // cannot copy into ANGLE as they expect ARGB textures. In D3D land | |
| 1781 // the StretchRect API in the IDirect3DDevice9Ex interface did the color | |
| 1782 // space conversion for us. Sadly in DX11 land the API does not provide | |
| 1783 // a straightforward way to do this. | |
| 1784 // We use the video processor MFT. | |
| 1785 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx | |
| 1786 // This object implements a media foundation transform (IMFTransform) | |
| 1787 // which follows the same contract as the decoder. The color space | |
| 1788 // conversion as per msdn is done in the GPU. | |
| 1789 | |
| 1790 D3D11_TEXTURE2D_DESC source_desc; | |
| 1791 src_texture->GetDesc(&source_desc); | |
| 1792 | |
| 1793 // Set up the input and output types for the video processor MFT. | |
| 1794 if (!InitializeDX11VideoFormatConverterMediaType(source_desc.Width, | |
| 1795 source_desc.Height)) { | |
| 1796 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 1797 false, "Failed to initialize media types for convesion.", | |
| 1798 PLATFORM_FAILURE,); | |
| 1799 } | |
| 1800 | |
| 1801 // The input to the video processor is the output sample. | |
| 1802 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; | |
| 1803 { | |
| 1804 base::AutoLock lock(decoder_lock_); | |
| 1805 PendingSampleInfo& sample_info = pending_output_samples_.front(); | |
| 1806 input_sample_for_conversion = sample_info.output_sample; | |
| 1807 } | |
| 1808 | |
| 1809 decoder_thread_task_runner_->PostTask( | |
| 1810 FROM_HERE, | |
| 1811 base::Bind(&DXVAVideoDecodeAccelerator::CopyTexture, | |
| 1812 base::Unretained(this), | |
| 1813 src_texture, | |
| 1814 dest_texture, | |
| 1815 input_sample_for_conversion.Detach(), | |
| 1816 picture_buffer_id, | |
| 1817 input_buffer_id)); | |
|
jbauman
2015/02/27 02:08:34
I think you need something like DXVAVideoDecodeAcc
ananta
2015/02/27 23:18:33
Done.
| |
| 1818 return; | |
| 1819 } | |
| 1820 | |
| 1821 DCHECK(video_frame); | |
| 1822 | |
| 1823 base::win::ScopedComPtr<IMFSample> input_sample; | |
| 1824 input_sample.Attach(video_frame); | |
| 1825 | |
| 1826 DCHECK(video_format_converter_mft_.get()); | |
| 1827 | |
| 1828 hr = video_format_converter_mft_->ProcessInput(0, video_frame, 0); | |
| 1829 if (FAILED(hr)) { | |
| 1830 DCHECK(false); | |
| 1831 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1832 "Failed to convert output sample format.", PLATFORM_FAILURE,); | |
| 1833 } | |
| 1834 | |
| 1835 // The video processor MFT requires output samples to be allocated by the | |
| 1836 // caller. We create a sample with a buffer backed with the ID3D11Texture2D | |
| 1837 // interface exposed by ANGLE. This works nicely as this ensures that the | |
| 1838 // video processor coverts the color space of the output frame and copies | |
| 1839 // the result into the ANGLE texture. | |
| 1840 base::win::ScopedComPtr<IMFSample> output_sample; | |
| 1841 hr = MFCreateSample(output_sample.Receive()); | |
| 1842 if (FAILED(hr)) { | |
| 1843 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1844 "Failed to create output sample.", PLATFORM_FAILURE,); | |
| 1845 } | |
| 1846 | |
| 1847 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
| 1848 hr = MFCreateDXGISurfaceBuffer( | |
| 1849 __uuidof(ID3D11Texture2D), dest_texture, 0, FALSE, | |
| 1850 output_buffer.Receive()); | |
| 1851 if (FAILED(hr)) { | |
| 1852 base::debug::Alias(&hr); | |
| 1853 // TODO(ananta) | |
| 1854 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 1855 // stablizes. | |
| 1856 CHECK(false); | |
| 1857 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1858 "Failed to create output sample.", PLATFORM_FAILURE,); | |
| 1859 } | |
| 1860 | |
| 1861 output_sample->AddBuffer(output_buffer.get()); | |
| 1862 | |
| 1863 DWORD status = 0; | |
| 1864 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; | |
| 1865 format_converter_output.pSample = output_sample.get(); | |
| 1866 hr = video_format_converter_mft_->ProcessOutput( | |
| 1867 0, // No flags | |
| 1868 1, // # of out streams to pull from | |
| 1869 &format_converter_output, | |
| 1870 &status); | |
| 1871 if (FAILED(hr)) { | |
| 1872 base::debug::Alias(&hr); | |
| 1873 // TODO(ananta) | |
| 1874 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 1875 // stablizes. | |
| 1876 CHECK(false); | |
| 1877 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1878 "Failed to convert output sample format.", PLATFORM_FAILURE,); | |
| 1879 } | |
| 1880 | |
| 1881 main_thread_task_runner_->PostTask( | |
| 1882 FROM_HERE, | |
| 1883 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | |
| 1884 weak_this_factory_.GetWeakPtr(), | |
| 1885 reinterpret_cast<IDirect3DSurface9*>(NULL), | |
| 1886 reinterpret_cast<IDirect3DSurface9*>(NULL), | |
| 1887 picture_buffer_id, | |
| 1888 input_buffer_id)); | |
| 1889 } | |
| 1890 | |
| 1593 void DXVAVideoDecodeAccelerator::FlushDecoder( | 1891 void DXVAVideoDecodeAccelerator::FlushDecoder( |
| 1594 int iterations, | 1892 int iterations, |
| 1595 IDirect3DSurface9* src_surface, | 1893 IDirect3DSurface9* src_surface, |
| 1596 IDirect3DSurface9* dest_surface, | 1894 IDirect3DSurface9* dest_surface, |
| 1597 int picture_buffer_id, | 1895 int picture_buffer_id, |
| 1598 int input_buffer_id) { | 1896 int input_buffer_id) { |
| 1599 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 1897 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
| 1600 | 1898 |
| 1601 // The DXVA decoder has its own device which it uses for decoding. ANGLE | 1899 // The DXVA decoder has its own device which it uses for decoding. ANGLE |
| 1602 // has its own device which we don't have access to. | 1900 // has its own device which we don't have access to. |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 1624 main_thread_task_runner_->PostTask( | 1922 main_thread_task_runner_->PostTask( |
| 1625 FROM_HERE, | 1923 FROM_HERE, |
| 1626 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 1924 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
| 1627 weak_this_factory_.GetWeakPtr(), | 1925 weak_this_factory_.GetWeakPtr(), |
| 1628 src_surface, | 1926 src_surface, |
| 1629 dest_surface, | 1927 dest_surface, |
| 1630 picture_buffer_id, | 1928 picture_buffer_id, |
| 1631 input_buffer_id)); | 1929 input_buffer_id)); |
| 1632 } | 1930 } |
| 1633 | 1931 |
| 1932 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( | |
| 1933 int width, int height) { | |
| 1934 if (!dx11_video_format_converter_media_type_needs_init_) | |
| 1935 return true; | |
| 1936 | |
| 1937 CHECK(video_format_converter_mft_.get()); | |
| 1938 | |
| 1939 HRESULT hr = video_format_converter_mft_->ProcessMessage( | |
| 1940 MFT_MESSAGE_SET_D3D_MANAGER, | |
| 1941 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get())); | |
| 1942 | |
| 1943 if (FAILED(hr)) { | |
| 1944 base::debug::Alias(&hr); | |
| 1945 // TODO(ananta) | |
| 1946 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 1947 // stablizes. | |
| 1948 CHECK(false); | |
| 1949 } | |
| 1950 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1951 "Failed to initialize video format converter", PLATFORM_FAILURE, false); | |
| 1952 | |
| 1953 video_format_converter_mft_->ProcessMessage( | |
| 1954 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); | |
| 1955 | |
| 1956 base::win::ScopedComPtr<IMFMediaType> media_type; | |
| 1957 hr = MFCreateMediaType(media_type.Receive()); | |
| 1958 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", | |
| 1959 PLATFORM_FAILURE, false); | |
| 1960 | |
| 1961 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
| 1962 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", | |
| 1963 PLATFORM_FAILURE, false); | |
| 1964 | |
| 1965 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); | |
| 1966 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", | |
| 1967 PLATFORM_FAILURE, false); | |
| 1968 | |
| 1969 hr = media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); | |
| 1970 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1971 "Failed to set attributes on media type", PLATFORM_FAILURE, false); | |
| 1972 | |
| 1973 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 1974 MFVideoInterlace_Progressive); | |
| 1975 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 1976 "Failed to set attributes on media type", PLATFORM_FAILURE, false); | |
| 1977 | |
| 1978 base::win::ScopedComPtr<IMFAttributes> converter_attributes; | |
| 1979 hr = video_format_converter_mft_->GetAttributes( | |
| 1980 converter_attributes.Receive()); | |
| 1981 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get converter attributes", | |
| 1982 PLATFORM_FAILURE, false); | |
| 1983 | |
| 1984 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); | |
| 1985 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes", | |
| 1986 PLATFORM_FAILURE, false); | |
| 1987 | |
| 1988 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); | |
| 1989 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter attributes", | |
| 1990 PLATFORM_FAILURE, false); | |
| 1991 | |
| 1992 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); | |
| 1993 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", | |
| 1994 PLATFORM_FAILURE, false); | |
| 1995 | |
| 1996 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); | |
| 1997 if (FAILED(hr)) { | |
| 1998 base::debug::Alias(&hr); | |
| 1999 // TODO(ananta) | |
| 2000 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2001 // stablizes. | |
| 2002 CHECK(false); | |
| 2003 } | |
| 2004 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", | |
| 2005 PLATFORM_FAILURE, false); | |
| 2006 | |
| 2007 base::win::ScopedComPtr<IMFMediaType> out_media_type; | |
| 2008 | |
| 2009 for (uint32 i = 0; | |
| 2010 SUCCEEDED(video_format_converter_mft_->GetOutputAvailableType(0, i, | |
| 2011 out_media_type.Receive())); | |
| 2012 ++i) { | |
| 2013 GUID out_subtype = {0}; | |
| 2014 hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); | |
| 2015 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output major type", | |
| 2016 PLATFORM_FAILURE, false); | |
| 2017 | |
| 2018 if (out_subtype == MFVideoFormat_ARGB32) { | |
| 2019 hr = out_media_type->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); | |
| 2020 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2021 "Failed to set attributes on media type", PLATFORM_FAILURE, false); | |
| 2022 | |
| 2023 hr = out_media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 2024 MFVideoInterlace_Progressive); | |
| 2025 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2026 "Failed to set attributes on media type", PLATFORM_FAILURE, false); | |
| 2027 | |
| 2028 hr = MFSetAttributeSize(out_media_type.get(), MF_MT_FRAME_SIZE, width, | |
| 2029 height); | |
| 2030 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2031 "Failed to set media type attributes", PLATFORM_FAILURE, false); | |
| 2032 | |
| 2033 hr = video_format_converter_mft_->SetOutputType( | |
| 2034 0, out_media_type.get(), 0); // No flags | |
| 2035 if (FAILED(hr)) { | |
| 2036 base::debug::Alias(&hr); | |
| 2037 // TODO(ananta) | |
| 2038 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2039 // stablizes. | |
| 2040 CHECK(false); | |
| 2041 } | |
| 2042 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
| 2043 "Failed to set converter output type", PLATFORM_FAILURE, false); | |
| 2044 | |
| 2045 hr = video_format_converter_mft_->ProcessMessage( | |
| 2046 MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0); | |
| 2047 if (FAILED(hr)) { | |
| 2048 // TODO(ananta) | |
| 2049 // Remove this CHECK when the change to use DX11 for H/W decoding | |
| 2050 // stablizes. | |
| 2051 RETURN_AND_NOTIFY_ON_FAILURE( | |
| 2052 false, "Failed to initialize video converter.", PLATFORM_FAILURE, | |
| 2053 false); | |
| 2054 } | |
| 2055 dx11_video_format_converter_media_type_needs_init_ = false; | |
| 2056 return true; | |
| 2057 } | |
| 2058 out_media_type.Release(); | |
| 2059 } | |
| 2060 return false; | |
| 2061 } | |
| 2062 | |
| 2063 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions( | |
| 2064 IMFSample* sample, | |
| 2065 int* width, | |
| 2066 int* height) { | |
| 2067 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
| 2068 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | |
| 2069 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | |
| 2070 | |
| 2071 if (use_dx11_) { | |
| 2072 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | |
| 2073 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | |
| 2074 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | |
| 2075 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", | |
| 2076 false); | |
| 2077 hr = dxgi_buffer->GetResource( | |
| 2078 __uuidof(ID3D11Texture2D), | |
| 2079 reinterpret_cast<void**>(d3d11_texture.Receive())); | |
| 2080 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer", | |
| 2081 false); | |
| 2082 D3D11_TEXTURE2D_DESC d3d11_texture_desc; | |
| 2083 d3d11_texture->GetDesc(&d3d11_texture_desc); | |
| 2084 *width = d3d11_texture_desc.Width; | |
| 2085 *height = d3d11_texture_desc.Height; | |
| 2086 } else { | |
| 2087 base::win::ScopedComPtr<IDirect3DSurface9> surface; | |
| 2088 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | |
| 2089 IID_PPV_ARGS(surface.Receive())); | |
| 2090 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", | |
| 2091 false); | |
| 2092 D3DSURFACE_DESC surface_desc; | |
| 2093 hr = surface->GetDesc(&surface_desc); | |
| 2094 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | |
| 2095 *width = surface_desc.Width; | |
| 2096 *height = surface_desc.Height; | |
| 2097 } | |
| 2098 return true; | |
| 2099 } | |
| 2100 | |
| 1634 } // namespace content | 2101 } // namespace content |
| OLD | NEW |