OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/dxva_video_decode_accelerator_win.h" | 5 #include "media/gpu/dxva_video_decode_accelerator_win.h" |
6 | 6 |
7 #include <memory> | 7 #include <memory> |
8 | 8 |
9 #if !defined(OS_WIN) | 9 #if !defined(OS_WIN) |
10 #error This file should only be built on Windows. | 10 #error This file should only be built on Windows. |
(...skipping 851 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
862 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), | 862 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), |
863 dx11_dev_manager_reset_token_); | 863 dx11_dev_manager_reset_token_); |
864 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | 864 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
865 | 865 |
866 D3D11_QUERY_DESC query_desc; | 866 D3D11_QUERY_DESC query_desc; |
867 query_desc.Query = D3D11_QUERY_EVENT; | 867 query_desc.Query = D3D11_QUERY_EVENT; |
868 query_desc.MiscFlags = 0; | 868 query_desc.MiscFlags = 0; |
869 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); | 869 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); |
870 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); | 870 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); |
871 | 871 |
872 HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); | 872 base::win::ScopedComPtr<ID3D11Device1> device1; |
873 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", | 873 hr = d3d11_device_.QueryInterface(__uuidof(ID3D11Device1), |
jbauman
2017/02/14 23:43:34
device1 isn't actually used.
hubbe
2017/02/15 01:37:13
Well spotted, gone.
| |
874 false); | 874 device1.ReceiveVoid()); |
875 RETURN_ON_HR_FAILURE(hr, "Failed to get device1", false); | |
875 | 876 |
876 hr = CreateCOMObjectFromDll(video_processor_dll, CLSID_VideoProcessorMFT, | 877 base::win::ScopedComPtr<ID3D11DeviceContext1> device_context1; |
877 __uuidof(IMFTransform), | 878 hr = d3d11_device_context_.QueryInterface(__uuidof(ID3D11DeviceContext1), |
878 video_format_converter_mft_.ReceiveVoid()); | 879 device1.ReceiveVoid()); |
jbauman
2017/02/14 23:43:34
device_context1 here. Though through the transitiv
hubbe
2017/02/15 01:37:13
Fixed.
Though through the transitive property of
| |
879 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); | 880 RETURN_ON_HR_FAILURE(hr, "Failed to get device context", false); |
881 hr = d3d11_device_.QueryInterface(__uuidof(ID3D11VideoDevice), | |
jbauman
2017/02/14 23:43:34
ScopedComPtr has a template specialization for Que
hubbe
2017/02/15 01:37:13
nice, done.
| |
882 video_device_.ReceiveVoid()); | |
883 RETURN_ON_HR_FAILURE(hr, "Failed to get video device", false); | |
880 | 884 |
881 base::win::ScopedComPtr<IMFAttributes> converter_attributes; | 885 hr = device1.QueryInterface(__uuidof(ID3D11VideoContext1), |
jbauman
2017/02/14 23:43:34
We need to be able to fall back to ID3D11VideoCont
hubbe
2017/02/15 01:37:13
What's the right way to do that?
| |
882 hr = video_format_converter_mft_->GetAttributes( | 886 video_context_.ReceiveVoid()); |
883 converter_attributes.Receive()); | 887 RETURN_ON_HR_FAILURE(hr, "Failed to get video context", false); |
884 RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false); | |
885 | 888 |
886 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); | 889 // TODO(hubbe): Use actual video format. |
887 RETURN_ON_HR_FAILURE( | 890 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc; |
888 hr, "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", false); | 891 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; |
jbauman
2017/02/14 23:43:34
I'm curious - do we ever support hardware decoding
hubbe
2017/02/15 01:37:13
We do, we just put our fingers in our ears and hum
| |
892 desc.InputFrameRate.Numerator = 60; | |
893 desc.InputFrameRate.Denominator = 1; | |
894 desc.InputWidth = 1920; | |
jbauman
2017/02/14 23:43:34
I think we need to use the actual video size when
hubbe
2017/02/15 01:37:12
Done.
| |
895 desc.InputHeight = 1080; | |
896 desc.OutputFrameRate.Numerator = 60; | |
897 desc.OutputFrameRate.Denominator = 1; | |
898 desc.OutputWidth = 1920; | |
899 desc.OutputHeight = 1080; | |
900 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; | |
889 | 901 |
890 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); | 902 hr = video_device_->CreateVideoProcessorEnumerator(&desc, |
891 RETURN_ON_HR_FAILURE( | 903 enumerator_.Receive()); |
892 hr, "Failed to set MF_LOW_LATENCY attribute on converter", false); | 904 RETURN_ON_HR_FAILURE(hr, "Failed to enumerate video processors", false); |
905 | |
906 // TODO(Hubbe): Find correct index | |
907 hr = video_device_->CreateVideoProcessor(enumerator_.get(), 0, | |
908 d3d11_processor_.Receive()); | |
909 RETURN_ON_HR_FAILURE(hr, "Failed to create video processor.", false); | |
jbauman
2017/02/14 23:43:34
I've found that you need to set ID3D11VideoContext
hubbe
2017/02/15 01:37:13
Thanks for the tip!
| |
910 | |
893 return true; | 911 return true; |
894 } | 912 } |
895 | 913 |
896 void DXVAVideoDecodeAccelerator::Decode( | 914 void DXVAVideoDecodeAccelerator::Decode( |
897 const BitstreamBuffer& bitstream_buffer) { | 915 const BitstreamBuffer& bitstream_buffer) { |
898 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::Decode"); | 916 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::Decode"); |
899 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 917 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
900 | 918 |
901 // SharedMemory will take over the ownership of handle. | 919 // SharedMemory will take over the ownership of handle. |
902 base::SharedMemory shm(bitstream_buffer.handle(), true); | 920 base::SharedMemory shm(bitstream_buffer.handle(), true); |
(...skipping 1056 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1959 // resolution changes. We already handle that in the | 1977 // resolution changes. We already handle that in the |
1960 // HandleResolutionChanged() function. | 1978 // HandleResolutionChanged() function. |
1961 if (GetState() != kConfigChange) { | 1979 if (GetState() != kConfigChange) { |
1962 output_picture_buffers_.clear(); | 1980 output_picture_buffers_.clear(); |
1963 stale_output_picture_buffers_.clear(); | 1981 stale_output_picture_buffers_.clear(); |
1964 // We want to continue processing pending input after detecting a config | 1982 // We want to continue processing pending input after detecting a config |
1965 // change. | 1983 // change. |
1966 pending_input_buffers_.clear(); | 1984 pending_input_buffers_.clear(); |
1967 pictures_requested_ = false; | 1985 pictures_requested_ = false; |
1968 if (use_dx11_) { | 1986 if (use_dx11_) { |
1969 if (video_format_converter_mft_.get()) { | 1987 d3d11_processor_.Release(); |
1970 video_format_converter_mft_->ProcessMessage( | 1988 enumerator_.Release(); |
1971 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); | 1989 video_context_.Release(); |
1972 video_format_converter_mft_.Release(); | 1990 video_device_.Release(); |
1973 } | |
1974 d3d11_device_context_.Release(); | 1991 d3d11_device_context_.Release(); |
1975 d3d11_device_.Release(); | 1992 d3d11_device_.Release(); |
1976 d3d11_device_manager_.Release(); | 1993 d3d11_device_manager_.Release(); |
1977 d3d11_query_.Release(); | 1994 d3d11_query_.Release(); |
1978 multi_threaded_.Release(); | 1995 multi_threaded_.Release(); |
1979 dx11_video_format_converter_media_type_needs_init_ = true; | 1996 dx11_video_format_converter_media_type_needs_init_ = true; |
1980 } else { | 1997 } else { |
1981 d3d9_.Release(); | 1998 d3d9_.Release(); |
1982 d3d9_device_ex_.Release(); | 1999 d3d9_device_ex_.Release(); |
1983 device_manager_.Release(); | 2000 device_manager_.Release(); |
(...skipping 572 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2556 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::CopyTexture"); | 2573 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::CopyTexture"); |
2557 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2574 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2558 | 2575 |
2559 DCHECK(use_dx11_); | 2576 DCHECK(use_dx11_); |
2560 | 2577 |
2561 // The media foundation H.264 decoder outputs YUV12 textures which we | 2578 // The media foundation H.264 decoder outputs YUV12 textures which we |
2562 // cannot copy into ANGLE as they expect ARGB textures. In D3D land | 2579 // cannot copy into ANGLE as they expect ARGB textures. In D3D land |
2563 // the StretchRect API in the IDirect3DDevice9Ex interface did the color | 2580 // the StretchRect API in the IDirect3DDevice9Ex interface did the color |
2564 // space conversion for us. Sadly in DX11 land the API does not provide | 2581 // space conversion for us. Sadly in DX11 land the API does not provide |
2565 // a straightforward way to do this. | 2582 // a straightforward way to do this. |
2566 // We use the video processor MFT. | |
2567 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx | |
2568 // This object implements a media foundation transform (IMFTransform) | |
2569 // which follows the same contract as the decoder. The color space | |
2570 // conversion as per msdn is done in the GPU. | |
2571 | |
2572 D3D11_TEXTURE2D_DESC source_desc; | |
2573 src_texture->GetDesc(&source_desc); | |
2574 | |
2575 // Set up the input and output types for the video processor MFT. | |
2576 if (!InitializeDX11VideoFormatConverterMediaType( | |
2577 source_desc.Width, source_desc.Height, color_space)) { | |
2578 RETURN_AND_NOTIFY_ON_FAILURE( | |
2579 false, "Failed to initialize media types for convesion.", | |
2580 PLATFORM_FAILURE, ); | |
2581 } | |
2582 | 2583 |
2583 // The input to the video processor is the output sample. | 2584 // The input to the video processor is the output sample. |
2584 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; | 2585 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; |
2585 { | 2586 { |
2586 base::AutoLock lock(decoder_lock_); | 2587 base::AutoLock lock(decoder_lock_); |
2587 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 2588 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
2588 input_sample_for_conversion = sample_info.output_sample; | 2589 input_sample_for_conversion = sample_info.output_sample; |
2589 } | 2590 } |
2590 | 2591 |
2591 decoder_thread_task_runner_->PostTask( | 2592 decoder_thread_task_runner_->PostTask( |
2592 FROM_HERE, | 2593 FROM_HERE, |
2593 base::Bind(&DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread, | 2594 base::Bind(&DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread, |
2594 base::Unretained(this), dest_texture, dest_keyed_mutex, | 2595 base::Unretained(this), dest_texture, dest_keyed_mutex, |
2595 keyed_mutex_value, input_sample_for_conversion, | 2596 keyed_mutex_value, input_sample_for_conversion, |
2596 picture_buffer_id, input_buffer_id)); | 2597 picture_buffer_id, input_buffer_id, color_space)); |
2597 } | 2598 } |
2598 | 2599 |
2599 void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread( | 2600 void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread( |
2600 ID3D11Texture2D* dest_texture, | 2601 ID3D11Texture2D* dest_texture, |
2601 base::win::ScopedComPtr<IDXGIKeyedMutex> dest_keyed_mutex, | 2602 base::win::ScopedComPtr<IDXGIKeyedMutex> dest_keyed_mutex, |
2602 uint64_t keyed_mutex_value, | 2603 uint64_t keyed_mutex_value, |
2603 base::win::ScopedComPtr<IMFSample> input_sample, | 2604 base::win::ScopedComPtr<IMFSample> input_sample, |
2604 int picture_buffer_id, | 2605 int picture_buffer_id, |
2605 int input_buffer_id) { | 2606 int input_buffer_id, |
2607 const gfx::ColorSpace& color_space) { | |
2606 TRACE_EVENT0("media", | 2608 TRACE_EVENT0("media", |
2607 "DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread"); | 2609 "DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread"); |
2608 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2610 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2609 HRESULT hr = E_FAIL; | 2611 HRESULT hr = E_FAIL; |
2610 | 2612 |
2611 DCHECK(use_dx11_); | 2613 DCHECK(use_dx11_); |
2612 DCHECK(!!input_sample); | 2614 DCHECK(!!input_sample); |
2613 DCHECK(video_format_converter_mft_.get()); | 2615 DCHECK(d3d11_processor_.get()); |
2614 | 2616 |
2615 if (dest_keyed_mutex) { | 2617 if (dest_keyed_mutex) { |
2616 HRESULT hr = | 2618 HRESULT hr = |
2617 dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs); | 2619 dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs); |
2618 RETURN_AND_NOTIFY_ON_FAILURE( | 2620 RETURN_AND_NOTIFY_ON_FAILURE( |
2619 hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", | 2621 hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", |
2620 PLATFORM_FAILURE, ); | 2622 PLATFORM_FAILURE, ); |
2621 } | 2623 } |
2622 // The video processor MFT requires output samples to be allocated by the | 2624 |
2623 // caller. We create a sample with a buffer backed with the ID3D11Texture2D | 2625 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2624 // interface exposed by ANGLE. This works nicely as this ensures that the | 2626 hr = input_sample->GetBufferByIndex(0, output_buffer.Receive()); |
2625 // video processor coverts the color space of the output frame and copies | 2627 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", ); |
2626 // the result into the ANGLE texture. | 2628 |
2627 base::win::ScopedComPtr<IMFSample> output_sample; | 2629 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
2628 hr = MFCreateSample(output_sample.Receive()); | 2630 hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
2629 if (FAILED(hr)) { | 2631 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", ); |
2630 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", | 2632 UINT index = 0; |
2631 PLATFORM_FAILURE, ); | 2633 hr = dxgi_buffer->GetSubresourceIndex(&index); |
2634 RETURN_ON_HR_FAILURE(hr, "Failed to get resource index", ); | |
2635 | |
2636 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture; | |
2637 hr = dxgi_buffer->GetResource(IID_PPV_ARGS(dx11_decoding_texture.Receive())); | |
2638 RETURN_ON_HR_FAILURE(hr, "Failed to get resource from output sample", ); | |
2639 | |
2640 D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc = { | |
2641 D3D11_VPOV_DIMENSION_TEXTURE2D}; | |
2642 outputViewDesc.Texture2D.MipSlice = 0; | |
jbauman
2017/02/14 23:43:34
Variable naming style (here and elsewhere).
hubbe
2017/02/15 01:37:12
Done.
| |
2643 base::win::ScopedComPtr<ID3D11VideoProcessorOutputView> videoProcOutputView; | |
2644 hr = video_device_->CreateVideoProcessorOutputView( | |
2645 dest_texture, enumerator_.get(), &outputViewDesc, | |
2646 videoProcOutputView.Receive()); | |
2647 RETURN_ON_HR_FAILURE(hr, "Failed to get output view", ); | |
jbauman
2017/02/14 23:43:34
RETURN_AND_NOTIFY_ON_HR_FAILURE here and elsewhere
hubbe
2017/02/15 01:37:13
Done.
| |
2648 | |
2649 D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc = {0}; | |
2650 inputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; | |
2651 inputViewDesc.FourCC = 0; // MAKEFOURCC('P', '0', '1', '0'); | |
jbauman
2017/02/14 23:43:34
Remove
hubbe
2017/02/15 01:37:13
Done.
| |
2652 inputViewDesc.Texture2D.ArraySlice = index; // ?? | |
2653 inputViewDesc.Texture2D.MipSlice = 0; | |
2654 base::win::ScopedComPtr<ID3D11VideoProcessorInputView> videoProcInputView; | |
2655 hr = video_device_->CreateVideoProcessorInputView( | |
2656 dx11_decoding_texture.get(), enumerator_.get(), &inputViewDesc, | |
2657 videoProcInputView.Receive()); | |
2658 RETURN_ON_HR_FAILURE(hr, "Failed to get input view", ); | |
2659 | |
2660 if (use_color_info_) { | |
2661 video_context_->VideoProcessorSetStreamColorSpace1( | |
2662 d3d11_processor_.get(), 0, | |
2663 gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); | |
2664 | |
2665 if (share_nv12_textures_) { | |
jbauman
2017/02/14 23:43:34
We should never hit this code with share_nv12_text
hubbe
2017/02/15 01:37:13
Done.
| |
2666 video_context_->VideoProcessorSetOutputColorSpace1( | |
2667 d3d11_processor_.get(), | |
2668 gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); | |
2669 } else { | |
2670 video_context_->VideoProcessorSetOutputColorSpace1( | |
2671 d3d11_processor_.get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); | |
2672 } | |
2632 } | 2673 } |
2633 | 2674 |
2634 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2675 D3D11_VIDEO_PROCESSOR_STREAM streams = {0}; |
2635 hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), dest_texture, 0, | 2676 streams.Enable = TRUE; |
2636 FALSE, output_buffer.Receive()); | 2677 streams.pInputSurface = videoProcInputView.get(); |
2637 if (FAILED(hr)) { | |
2638 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", | |
2639 PLATFORM_FAILURE, ); | |
2640 } | |
2641 | 2678 |
2642 output_sample->AddBuffer(output_buffer.get()); | 2679 hr = video_context_->VideoProcessorBlt( |
2680 d3d11_processor_.get(), videoProcOutputView.get(), 0, 1, &streams); | |
2643 | 2681 |
2644 hr = video_format_converter_mft_->ProcessInput(0, input_sample.get(), 0); | 2682 RETURN_ON_HR_FAILURE(hr, "VideoProcessBlit failed", ); |
2645 if (FAILED(hr)) { | |
2646 DCHECK(false); | |
2647 RETURN_AND_NOTIFY_ON_HR_FAILURE( | |
2648 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); | |
2649 } | |
2650 | |
2651 input_sample.Release(); | |
2652 | |
2653 DWORD status = 0; | |
2654 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; | |
2655 format_converter_output.pSample = output_sample.get(); | |
2656 hr = video_format_converter_mft_->ProcessOutput( | |
2657 0, // No flags | |
2658 1, // # of out streams to pull from | |
2659 &format_converter_output, &status); | |
2660 | |
2661 if (FAILED(hr)) { | |
2662 DCHECK(false); | |
2663 RETURN_AND_NOTIFY_ON_HR_FAILURE( | |
2664 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); | |
2665 } | |
2666 | 2683 |
2667 if (dest_keyed_mutex) { | 2684 if (dest_keyed_mutex) { |
2668 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); | 2685 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); |
2669 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", | 2686 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", |
2670 PLATFORM_FAILURE, ); | 2687 PLATFORM_FAILURE, ); |
2671 | 2688 |
2672 main_thread_task_runner_->PostTask( | 2689 main_thread_task_runner_->PostTask( |
2673 FROM_HERE, | 2690 FROM_HERE, |
2674 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, weak_ptr_, | 2691 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, weak_ptr_, |
2675 nullptr, nullptr, picture_buffer_id, input_buffer_id)); | 2692 nullptr, nullptr, picture_buffer_id, input_buffer_id)); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2728 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2745 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2729 return; | 2746 return; |
2730 } | 2747 } |
2731 | 2748 |
2732 main_thread_task_runner_->PostTask( | 2749 main_thread_task_runner_->PostTask( |
2733 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 2750 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
2734 weak_ptr_, src_surface, dest_surface, | 2751 weak_ptr_, src_surface, dest_surface, |
2735 picture_buffer_id, input_buffer_id)); | 2752 picture_buffer_id, input_buffer_id)); |
2736 } | 2753 } |
2737 | 2754 |
2738 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( | |
2739 int width, | |
2740 int height, | |
2741 const gfx::ColorSpace& color_space) { | |
2742 if (!dx11_video_format_converter_media_type_needs_init_ && | |
jbauman
2017/02/14 23:43:34
dx11_video_format_converter_media_type_needs_init_
hubbe
2017/02/15 01:37:13
Done.
| |
2743 (!use_color_info_ || color_space == dx11_converter_color_space_)) { | |
2744 return true; | |
2745 } | |
2746 | |
2747 CHECK(video_format_converter_mft_.get()); | |
2748 | |
2749 HRESULT hr = video_format_converter_mft_->ProcessMessage( | |
2750 MFT_MESSAGE_SET_D3D_MANAGER, | |
2751 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get())); | |
2752 | |
2753 if (FAILED(hr)) | |
2754 DCHECK(false); | |
2755 | |
2756 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, | |
2757 "Failed to initialize video format converter", | |
2758 PLATFORM_FAILURE, false); | |
2759 | |
2760 video_format_converter_mft_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, | |
2761 0); | |
2762 | |
2763 base::win::ScopedComPtr<IMFMediaType> media_type; | |
2764 hr = MFCreateMediaType(media_type.Receive()); | |
2765 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", | |
2766 PLATFORM_FAILURE, false); | |
2767 | |
2768 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
2769 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", | |
2770 PLATFORM_FAILURE, false); | |
2771 | |
2772 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); | |
2773 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", | |
2774 PLATFORM_FAILURE, false); | |
2775 | |
2776 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); | |
2777 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", | |
2778 PLATFORM_FAILURE, false); | |
2779 | |
2780 if (use_color_info_) { | |
2781 DXVA2_ExtendedFormat format = | |
2782 gfx::ColorSpaceWin::GetExtendedFormat(color_space); | |
2783 media_type->SetUINT32(MF_MT_YUV_MATRIX, format.VideoTransferMatrix); | |
2784 media_type->SetUINT32(MF_MT_VIDEO_NOMINAL_RANGE, format.NominalRange); | |
2785 media_type->SetUINT32(MF_MT_VIDEO_PRIMARIES, format.VideoPrimaries); | |
2786 media_type->SetUINT32(MF_MT_TRANSFER_FUNCTION, | |
2787 format.VideoTransferFunction); | |
2788 dx11_converter_color_space_ = color_space; | |
2789 } | |
2790 | |
2791 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); | |
2792 if (FAILED(hr)) | |
2793 DCHECK(false); | |
2794 | |
2795 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", | |
2796 PLATFORM_FAILURE, false); | |
2797 | |
2798 // It appears that we fail to set MFVideoFormat_ARGB32 as the output media | |
2799 // type in certain configurations. Try to fallback to MFVideoFormat_RGB32 | |
2800 // in such cases. If both fail, then bail. | |
2801 | |
2802 bool media_type_set = false; | |
2803 if (copy_nv12_textures_) { | |
2804 media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), | |
2805 MFVideoFormat_NV12, width, height); | |
2806 RETURN_AND_NOTIFY_ON_FAILURE(media_type_set, | |
2807 "Failed to set NV12 converter output type", | |
2808 PLATFORM_FAILURE, false); | |
2809 } | |
2810 | |
2811 if (!media_type_set) { | |
2812 media_type_set = SetTransformOutputType( | |
2813 video_format_converter_mft_.get(), MFVideoFormat_ARGB32, width, height); | |
2814 } | |
2815 if (!media_type_set) { | |
2816 media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), | |
2817 MFVideoFormat_RGB32, width, height); | |
2818 } | |
2819 | |
2820 if (!media_type_set) { | |
2821 LOG(ERROR) << "Failed to find a matching RGB output type in the converter"; | |
2822 return false; | |
2823 } | |
2824 | |
2825 dx11_video_format_converter_media_type_needs_init_ = false; | |
2826 return true; | |
2827 } | |
2828 | |
2829 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, | 2755 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, |
2830 int* width, | 2756 int* width, |
2831 int* height) { | 2757 int* height) { |
2832 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2758 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2833 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | 2759 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); |
2834 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | 2760 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); |
2835 | 2761 |
2836 if (use_dx11_) { | 2762 if (use_dx11_) { |
2837 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 2763 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
2838 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | 2764 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2924 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2850 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2925 base::Unretained(this))); | 2851 base::Unretained(this))); |
2926 } | 2852 } |
2927 | 2853 |
2928 uint32_t DXVAVideoDecodeAccelerator::GetTextureTarget() const { | 2854 uint32_t DXVAVideoDecodeAccelerator::GetTextureTarget() const { |
2929 bool provide_nv12_textures = share_nv12_textures_ || copy_nv12_textures_; | 2855 bool provide_nv12_textures = share_nv12_textures_ || copy_nv12_textures_; |
2930 return provide_nv12_textures ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D; | 2856 return provide_nv12_textures ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D; |
2931 } | 2857 } |
2932 | 2858 |
2933 } // namespace media | 2859 } // namespace media |
OLD | NEW |