Chromium Code Reviews| Index: media/gpu/dxva_video_decode_accelerator_win.cc |
| diff --git a/media/gpu/dxva_video_decode_accelerator_win.cc b/media/gpu/dxva_video_decode_accelerator_win.cc |
| index e154b7e2b75fc94759fbbfd61dbf84f87a50b942..02d3a6fb537f7f6008fc42941eb62a6304a41c83 100644 |
| --- a/media/gpu/dxva_video_decode_accelerator_win.cc |
| +++ b/media/gpu/dxva_video_decode_accelerator_win.cc |
| @@ -500,7 +500,6 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator( |
| !workarounds.disable_nv12_dxgi_video), |
| use_dx11_(false), |
| use_keyed_mutex_(false), |
| - dx11_video_format_converter_media_type_needs_init_(true), |
| using_angle_device_(false), |
| enable_accelerated_vpx_decode_( |
| gpu_preferences.enable_accelerated_vpx_decode), |
| @@ -869,27 +868,18 @@ bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { |
| hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); |
| RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); |
| - HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); |
| - RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", |
| - false); |
| - |
| - hr = CreateCOMObjectFromDll(video_processor_dll, CLSID_VideoProcessorMFT, |
| - __uuidof(IMFTransform), |
| - video_format_converter_mft_.ReceiveVoid()); |
| - RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); |
| + base::win::ScopedComPtr<ID3D11DeviceContext1> device_context1; |
| + hr = d3d11_device_context_.QueryInterface(__uuidof(ID3D11DeviceContext1), |
| + device_context1.ReceiveVoid()); |
| + RETURN_ON_HR_FAILURE(hr, "Failed to get device context", false); |
| - base::win::ScopedComPtr<IMFAttributes> converter_attributes; |
| - hr = video_format_converter_mft_->GetAttributes( |
| - converter_attributes.Receive()); |
| - RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false); |
| + hr = d3d11_device_.QueryInterface(video_device_.Receive()); |
| + RETURN_ON_HR_FAILURE(hr, "Failed to get video device", false); |
| - hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); |
| - RETURN_ON_HR_FAILURE( |
| - hr, "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", false); |
| + hr = device_context1.QueryInterface(__uuidof(ID3D11VideoContext1), |
|
jbauman
2017/02/15 02:01:39
You could just do "hr = d3d11_device_context_.Quer
hubbe
2017/02/16 01:08:20
Done.
|
| + video_context_.ReceiveVoid()); |
| + RETURN_ON_HR_FAILURE(hr, "Failed to get video context", false); |
| - hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); |
| - RETURN_ON_HR_FAILURE( |
| - hr, "Failed to set MF_LOW_LATENCY attribute on converter", false); |
| return true; |
| } |
| @@ -1966,17 +1956,16 @@ void DXVAVideoDecodeAccelerator::Invalidate() { |
| pending_input_buffers_.clear(); |
| pictures_requested_ = false; |
| if (use_dx11_) { |
| - if (video_format_converter_mft_.get()) { |
| - video_format_converter_mft_->ProcessMessage( |
| - MFT_MESSAGE_NOTIFY_END_STREAMING, 0); |
| - video_format_converter_mft_.Release(); |
| - } |
| + d3d11_processor_.Release(); |
| + enumerator_.Release(); |
| + video_context_.Release(); |
| + video_device_.Release(); |
| d3d11_device_context_.Release(); |
| d3d11_device_.Release(); |
| d3d11_device_manager_.Release(); |
| d3d11_query_.Release(); |
| multi_threaded_.Release(); |
| - dx11_video_format_converter_media_type_needs_init_ = true; |
| + processor_width_ = processor_height_ = 0; |
| } else { |
| d3d9_.Release(); |
| d3d9_device_ex_.Release(); |
| @@ -2277,8 +2266,6 @@ void DXVAVideoDecodeAccelerator::DecodeInternal( |
| void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, |
| int height) { |
| - dx11_video_format_converter_media_type_needs_init_ = true; |
| - |
| main_thread_task_runner_->PostTask( |
| FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, |
| weak_ptr_, false)); |
| @@ -2563,22 +2550,11 @@ void DXVAVideoDecodeAccelerator::CopyTexture( |
| // the StretchRect API in the IDirect3DDevice9Ex interface did the color |
| // space conversion for us. Sadly in DX11 land the API does not provide |
| // a straightforward way to do this. |
| - // We use the video processor MFT. |
| - // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx |
| - // This object implements a media foundation transform (IMFTransform) |
| - // which follows the same contract as the decoder. The color space |
| - // conversion as per msdn is done in the GPU. |
| D3D11_TEXTURE2D_DESC source_desc; |
| src_texture->GetDesc(&source_desc); |
| - |
| - // Set up the input and output types for the video processor MFT. |
| - if (!InitializeDX11VideoFormatConverterMediaType( |
| - source_desc.Width, source_desc.Height, color_space)) { |
| - RETURN_AND_NOTIFY_ON_FAILURE( |
| - false, "Failed to initialize media types for convesion.", |
| - PLATFORM_FAILURE, ); |
| - } |
| + InitializeI3D11VideoProcessor(source_desc.Width, source_desc.Height, |
|
jbauman
2017/02/15 02:01:39
Check the return value and do RETURN_AND_NOTIFY_ON
hubbe
2017/02/16 01:08:20
Done.
|
| + color_space); |
| // The input to the video processor is the output sample. |
| base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; |
| @@ -2610,7 +2586,7 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread( |
| DCHECK(use_dx11_); |
| DCHECK(!!input_sample); |
| - DCHECK(video_format_converter_mft_.get()); |
| + DCHECK(d3d11_processor_.get()); |
| if (dest_keyed_mutex) { |
| HRESULT hr = |
| @@ -2619,50 +2595,56 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread( |
| hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", |
| PLATFORM_FAILURE, ); |
| } |
| - // The video processor MFT requires output samples to be allocated by the |
| - // caller. We create a sample with a buffer backed with the ID3D11Texture2D |
| - // interface exposed by ANGLE. This works nicely as this ensures that the |
| - // video processor coverts the color space of the output frame and copies |
| - // the result into the ANGLE texture. |
| - base::win::ScopedComPtr<IMFSample> output_sample; |
| - hr = MFCreateSample(output_sample.Receive()); |
| - if (FAILED(hr)) { |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
| - PLATFORM_FAILURE, ); |
| - } |
| base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
| - hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), dest_texture, 0, |
| - FALSE, output_buffer.Receive()); |
| - if (FAILED(hr)) { |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
| - PLATFORM_FAILURE, ); |
| - } |
| + hr = input_sample->GetBufferByIndex(0, output_buffer.Receive()); |
| + RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get buffer from output sample", |
| + PLATFORM_FAILURE, ); |
| - output_sample->AddBuffer(output_buffer.get()); |
| + base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
| + hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
| + RETURN_AND_NOTIFY_ON_FAILURE( |
| + hr, "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE, ); |
| + UINT index = 0; |
| + hr = dxgi_buffer->GetSubresourceIndex(&index); |
| + RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get resource index", |
|
jbauman
2017/02/15 02:01:39
These should be RETURN_AND_NOTIFY_ON_HR_FAILURE.
hubbe
2017/02/16 01:08:20
Done.
|
| + PLATFORM_FAILURE, ); |
| - hr = video_format_converter_mft_->ProcessInput(0, input_sample.get(), 0); |
| - if (FAILED(hr)) { |
| - DCHECK(false); |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE( |
| - hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
| - } |
| + base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture; |
| + hr = dxgi_buffer->GetResource(IID_PPV_ARGS(dx11_decoding_texture.Receive())); |
| + RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get resource from output sample", |
| + PLATFORM_FAILURE, ); |
| - input_sample.Release(); |
| + D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_view_desc = { |
| + D3D11_VPOV_DIMENSION_TEXTURE2D}; |
| + output_view_desc.Texture2D.MipSlice = 0; |
| + base::win::ScopedComPtr<ID3D11VideoProcessorOutputView> output_view; |
| + hr = video_device_->CreateVideoProcessorOutputView( |
| + dest_texture, enumerator_.get(), &output_view_desc, |
| + output_view.Receive()); |
| + RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get output view", |
| + PLATFORM_FAILURE, ); |
| - DWORD status = 0; |
| - MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; |
| - format_converter_output.pSample = output_sample.get(); |
| - hr = video_format_converter_mft_->ProcessOutput( |
| - 0, // No flags |
| - 1, // # of out streams to pull from |
| - &format_converter_output, &status); |
| + D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_view_desc = {0}; |
| + input_view_desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; |
| + input_view_desc.Texture2D.ArraySlice = index; |
| + input_view_desc.Texture2D.MipSlice = 0; |
| + base::win::ScopedComPtr<ID3D11VideoProcessorInputView> input_view; |
| + hr = video_device_->CreateVideoProcessorInputView( |
| + dx11_decoding_texture.get(), enumerator_.get(), &input_view_desc, |
| + input_view.Receive()); |
| + RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get input view", |
| + PLATFORM_FAILURE, ); |
| - if (FAILED(hr)) { |
| - DCHECK(false); |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE( |
| - hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
| - } |
| + D3D11_VIDEO_PROCESSOR_STREAM streams = {0}; |
| + streams.Enable = TRUE; |
| + streams.pInputSurface = input_view.get(); |
| + |
| + hr = video_context_->VideoProcessorBlt(d3d11_processor_.get(), |
| + output_view.get(), 0, 1, &streams); |
| + |
| + RETURN_AND_NOTIFY_ON_FAILURE(hr, "VideoProcessBlit failed", |
| + PLATFORM_FAILURE, ); |
| if (dest_keyed_mutex) { |
| HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); |
| @@ -2735,94 +2717,60 @@ void DXVAVideoDecodeAccelerator::FlushDecoder(int iterations, |
| picture_buffer_id, input_buffer_id)); |
| } |
| -bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( |
| +bool DXVAVideoDecodeAccelerator::InitializeI3D11VideoProcessor( |
| int width, |
| int height, |
| const gfx::ColorSpace& color_space) { |
| - if (!dx11_video_format_converter_media_type_needs_init_ && |
| - (!use_color_info_ || color_space == dx11_converter_color_space_)) { |
| - return true; |
| + if (width < processor_width_ || height != processor_height_) { |
| + d3d11_processor_.Release(); |
| + enumerator_.Release(); |
| + processor_width_ = 0; |
| + processor_height_ = 0; |
| + |
| + D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc; |
| + desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; |
| + desc.InputFrameRate.Numerator = 60; |
| + desc.InputFrameRate.Denominator = 1; |
| + desc.InputWidth = width; |
| + desc.InputHeight = height; |
| + desc.OutputFrameRate.Numerator = 60; |
| + desc.OutputFrameRate.Denominator = 1; |
| + desc.OutputWidth = width; |
| + desc.OutputHeight = height; |
| + desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; |
| + |
| + HRESULT hr = video_device_->CreateVideoProcessorEnumerator( |
| + &desc, enumerator_.Receive()); |
| + RETURN_ON_HR_FAILURE(hr, "Failed to enumerate video processors", false); |
| + |
| + // TODO(Hubbe): Find correct index |
| + hr = video_device_->CreateVideoProcessor(enumerator_.get(), 0, |
| + d3d11_processor_.Receive()); |
| + RETURN_ON_HR_FAILURE(hr, "Failed to create video processor.", false); |
| + processor_width_ = width; |
| + processor_height_ = height; |
| + |
| + video_context_->VideoProcessorSetStreamAutoProcessingMode( |
| + d3d11_processor_.get(), 0, false); |
| + |
| + if (copy_nv12_textures_) { |
| + video_context_->VideoProcessorSetOutputColorSpace1( |
| + d3d11_processor_.get(), |
| + gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); |
| + } else { |
| + video_context_->VideoProcessorSetOutputColorSpace1( |
| + d3d11_processor_.get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); |
| + } |
| } |
| - CHECK(video_format_converter_mft_.get()); |
| - |
| - HRESULT hr = video_format_converter_mft_->ProcessMessage( |
| - MFT_MESSAGE_SET_D3D_MANAGER, |
| - reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get())); |
| - |
| - if (FAILED(hr)) |
| - DCHECK(false); |
| - |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| - "Failed to initialize video format converter", |
| - PLATFORM_FAILURE, false); |
| - |
| - video_format_converter_mft_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, |
| - 0); |
| - |
| - base::win::ScopedComPtr<IMFMediaType> media_type; |
| - hr = MFCreateMediaType(media_type.Receive()); |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", |
| - PLATFORM_FAILURE, false); |
| - |
| - hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", |
| - PLATFORM_FAILURE, false); |
| - |
| - hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", |
| - PLATFORM_FAILURE, false); |
| - |
| - hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", |
| - PLATFORM_FAILURE, false); |
| - |
| - if (use_color_info_) { |
| - DXVA2_ExtendedFormat format = |
| - gfx::ColorSpaceWin::GetExtendedFormat(color_space); |
| - media_type->SetUINT32(MF_MT_YUV_MATRIX, format.VideoTransferMatrix); |
| - media_type->SetUINT32(MF_MT_VIDEO_NOMINAL_RANGE, format.NominalRange); |
| - media_type->SetUINT32(MF_MT_VIDEO_PRIMARIES, format.VideoPrimaries); |
| - media_type->SetUINT32(MF_MT_TRANSFER_FUNCTION, |
| - format.VideoTransferFunction); |
| + // Not sure if this call is expensive, let's only do it if the color |
| + // space changes. |
| + if (use_color_info_ && dx11_converter_color_space_ != color_space) { |
| + video_context_->VideoProcessorSetStreamColorSpace1( |
|
jbauman
2017/02/15 02:01:39
Use QueryInterface to get an ID3D11VideoContext1 h
hubbe
2017/02/16 01:08:20
Done.
|
| + d3d11_processor_.get(), 0, |
| + gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); |
| dx11_converter_color_space_ = color_space; |
| } |
| - |
| - hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); |
| - if (FAILED(hr)) |
| - DCHECK(false); |
| - |
| - RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", |
| - PLATFORM_FAILURE, false); |
| - |
| - // It appears that we fail to set MFVideoFormat_ARGB32 as the output media |
| - // type in certain configurations. Try to fallback to MFVideoFormat_RGB32 |
| - // in such cases. If both fail, then bail. |
| - |
| - bool media_type_set = false; |
| - if (copy_nv12_textures_) { |
| - media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), |
| - MFVideoFormat_NV12, width, height); |
| - RETURN_AND_NOTIFY_ON_FAILURE(media_type_set, |
| - "Failed to set NV12 converter output type", |
| - PLATFORM_FAILURE, false); |
| - } |
| - |
| - if (!media_type_set) { |
| - media_type_set = SetTransformOutputType( |
| - video_format_converter_mft_.get(), MFVideoFormat_ARGB32, width, height); |
| - } |
| - if (!media_type_set) { |
| - media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), |
| - MFVideoFormat_RGB32, width, height); |
| - } |
| - |
| - if (!media_type_set) { |
| - LOG(ERROR) << "Failed to find a matching RGB output type in the converter"; |
| - return false; |
| - } |
| - |
| - dx11_video_format_converter_media_type_needs_init_ = false; |
| return true; |
| } |