Index: media/gpu/dxva_video_decode_accelerator_win.cc |
diff --git a/media/gpu/dxva_video_decode_accelerator_win.cc b/media/gpu/dxva_video_decode_accelerator_win.cc |
index e154b7e2b75fc94759fbbfd61dbf84f87a50b942..aa0947d9788f9820c084d046063477860ef3a725 100644 |
--- a/media/gpu/dxva_video_decode_accelerator_win.cc |
+++ b/media/gpu/dxva_video_decode_accelerator_win.cc |
@@ -869,27 +869,45 @@ bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { |
hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); |
RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); |
- HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); |
- RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", |
- false); |
- |
- hr = CreateCOMObjectFromDll(video_processor_dll, CLSID_VideoProcessorMFT, |
- __uuidof(IMFTransform), |
- video_format_converter_mft_.ReceiveVoid()); |
- RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); |
- |
- base::win::ScopedComPtr<IMFAttributes> converter_attributes; |
- hr = video_format_converter_mft_->GetAttributes( |
- converter_attributes.Receive()); |
- RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false); |
- |
- hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); |
- RETURN_ON_HR_FAILURE( |
- hr, "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", false); |
+ base::win::ScopedComPtr<ID3D11Device1> device1; |
+ hr = d3d11_device_.QueryInterface(__uuidof(ID3D11Device1), |
jbauman
2017/02/14 23:43:34
device1 isn't actually used.
hubbe
2017/02/15 01:37:13
Well spotted, gone.
|
+ device1.ReceiveVoid()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get device1", false); |
+ |
+ base::win::ScopedComPtr<ID3D11DeviceContext1> device_context1; |
+ hr = d3d11_device_context_.QueryInterface(__uuidof(ID3D11DeviceContext1), |
+ device1.ReceiveVoid()); |
jbauman
2017/02/14 23:43:34
device_context1 here. Though through the transitiv
hubbe
2017/02/15 01:37:13
Fixed.
Though through the transitive property of
|
+ RETURN_ON_HR_FAILURE(hr, "Failed to get device context", false); |
+ hr = d3d11_device_.QueryInterface(__uuidof(ID3D11VideoDevice), |
jbauman
2017/02/14 23:43:34
ScopedComPtr has a template specialization for Que
hubbe
2017/02/15 01:37:13
nice, done.
|
+ video_device_.ReceiveVoid()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get video device", false); |
+ |
+ hr = device1.QueryInterface(__uuidof(ID3D11VideoContext1), |
jbauman
2017/02/14 23:43:34
We need to be able to fall back to ID3D11VideoCont
hubbe
2017/02/15 01:37:13
What's the right way to do that?
|
+ video_context_.ReceiveVoid()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get video context", false); |
+ |
+ // TODO(hubbe): Use actual video format. |
+ D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc; |
+ desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; |
jbauman
2017/02/14 23:43:34
I'm curious - do we ever support hardware decoding
hubbe
2017/02/15 01:37:13
We do, we just put our fingers in our ears and hum
|
+ desc.InputFrameRate.Numerator = 60; |
+ desc.InputFrameRate.Denominator = 1; |
+ desc.InputWidth = 1920; |
jbauman
2017/02/14 23:43:34
I think we need to use the actual video size when
hubbe
2017/02/15 01:37:12
Done.
|
+ desc.InputHeight = 1080; |
+ desc.OutputFrameRate.Numerator = 60; |
+ desc.OutputFrameRate.Denominator = 1; |
+ desc.OutputWidth = 1920; |
+ desc.OutputHeight = 1080; |
+ desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; |
+ |
+ hr = video_device_->CreateVideoProcessorEnumerator(&desc, |
+ enumerator_.Receive()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to enumerate video processors", false); |
+ |
+ // TODO(Hubbe): Find correct index |
+ hr = video_device_->CreateVideoProcessor(enumerator_.get(), 0, |
+ d3d11_processor_.Receive()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to create video processor.", false); |
jbauman
2017/02/14 23:43:34
I've found that you need to set ID3D11VideoContext
hubbe
2017/02/15 01:37:13
Thanks for the tip!
|
- hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); |
- RETURN_ON_HR_FAILURE( |
- hr, "Failed to set MF_LOW_LATENCY attribute on converter", false); |
return true; |
} |
@@ -1966,11 +1984,10 @@ void DXVAVideoDecodeAccelerator::Invalidate() { |
pending_input_buffers_.clear(); |
pictures_requested_ = false; |
if (use_dx11_) { |
- if (video_format_converter_mft_.get()) { |
- video_format_converter_mft_->ProcessMessage( |
- MFT_MESSAGE_NOTIFY_END_STREAMING, 0); |
- video_format_converter_mft_.Release(); |
- } |
+ d3d11_processor_.Release(); |
+ enumerator_.Release(); |
+ video_context_.Release(); |
+ video_device_.Release(); |
d3d11_device_context_.Release(); |
d3d11_device_.Release(); |
d3d11_device_manager_.Release(); |
@@ -2563,22 +2580,6 @@ void DXVAVideoDecodeAccelerator::CopyTexture( |
// the StretchRect API in the IDirect3DDevice9Ex interface did the color |
// space conversion for us. Sadly in DX11 land the API does not provide |
// a straightforward way to do this. |
- // We use the video processor MFT. |
- // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx |
- // This object implements a media foundation transform (IMFTransform) |
- // which follows the same contract as the decoder. The color space |
- // conversion as per msdn is done in the GPU. |
- |
- D3D11_TEXTURE2D_DESC source_desc; |
- src_texture->GetDesc(&source_desc); |
- |
- // Set up the input and output types for the video processor MFT. |
- if (!InitializeDX11VideoFormatConverterMediaType( |
- source_desc.Width, source_desc.Height, color_space)) { |
- RETURN_AND_NOTIFY_ON_FAILURE( |
- false, "Failed to initialize media types for convesion.", |
- PLATFORM_FAILURE, ); |
- } |
// The input to the video processor is the output sample. |
base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; |
@@ -2593,7 +2594,7 @@ void DXVAVideoDecodeAccelerator::CopyTexture( |
base::Bind(&DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread, |
base::Unretained(this), dest_texture, dest_keyed_mutex, |
keyed_mutex_value, input_sample_for_conversion, |
- picture_buffer_id, input_buffer_id)); |
+ picture_buffer_id, input_buffer_id, color_space)); |
} |
void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread( |
@@ -2602,7 +2603,8 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread( |
uint64_t keyed_mutex_value, |
base::win::ScopedComPtr<IMFSample> input_sample, |
int picture_buffer_id, |
- int input_buffer_id) { |
+ int input_buffer_id, |
+ const gfx::ColorSpace& color_space) { |
TRACE_EVENT0("media", |
"DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread"); |
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
@@ -2610,7 +2612,7 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread( |
DCHECK(use_dx11_); |
DCHECK(!!input_sample); |
- DCHECK(video_format_converter_mft_.get()); |
+ DCHECK(d3d11_processor_.get()); |
if (dest_keyed_mutex) { |
HRESULT hr = |
@@ -2619,50 +2621,65 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread( |
hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", |
PLATFORM_FAILURE, ); |
} |
- // The video processor MFT requires output samples to be allocated by the |
- // caller. We create a sample with a buffer backed with the ID3D11Texture2D |
- // interface exposed by ANGLE. This works nicely as this ensures that the |
- // video processor coverts the color space of the output frame and copies |
- // the result into the ANGLE texture. |
- base::win::ScopedComPtr<IMFSample> output_sample; |
- hr = MFCreateSample(output_sample.Receive()); |
- if (FAILED(hr)) { |
- RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
- PLATFORM_FAILURE, ); |
- } |
base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
- hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), dest_texture, 0, |
- FALSE, output_buffer.Receive()); |
- if (FAILED(hr)) { |
- RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
- PLATFORM_FAILURE, ); |
- } |
+ hr = input_sample->GetBufferByIndex(0, output_buffer.Receive()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", ); |
+ |
+ base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
+ hr = dxgi_buffer.QueryFrom(output_buffer.get()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", ); |
+ UINT index = 0; |
+ hr = dxgi_buffer->GetSubresourceIndex(&index); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get resource index", ); |
+ |
+ base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture; |
+ hr = dxgi_buffer->GetResource(IID_PPV_ARGS(dx11_decoding_texture.Receive())); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get resource from output sample", ); |
+ |
+ D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC outputViewDesc = { |
+ D3D11_VPOV_DIMENSION_TEXTURE2D}; |
+ outputViewDesc.Texture2D.MipSlice = 0; |
jbauman
2017/02/14 23:43:34
Variable naming style (here and elsewhere).
hubbe
2017/02/15 01:37:12
Done.
|
+ base::win::ScopedComPtr<ID3D11VideoProcessorOutputView> videoProcOutputView; |
+ hr = video_device_->CreateVideoProcessorOutputView( |
+ dest_texture, enumerator_.get(), &outputViewDesc, |
+ videoProcOutputView.Receive()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get output view", ); |
jbauman
2017/02/14 23:43:34
RETURN_AND_NOTIFY_ON_HR_FAILURE here and elsewhere
hubbe
2017/02/15 01:37:13
Done.
|
+ |
+ D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputViewDesc = {0}; |
+ inputViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; |
+ inputViewDesc.FourCC = 0; // MAKEFOURCC('P', '0', '1', '0'); |
jbauman
2017/02/14 23:43:34
Remove
hubbe
2017/02/15 01:37:13
Done.
|
+ inputViewDesc.Texture2D.ArraySlice = index; // ?? |
+ inputViewDesc.Texture2D.MipSlice = 0; |
+ base::win::ScopedComPtr<ID3D11VideoProcessorInputView> videoProcInputView; |
+ hr = video_device_->CreateVideoProcessorInputView( |
+ dx11_decoding_texture.get(), enumerator_.get(), &inputViewDesc, |
+ videoProcInputView.Receive()); |
+ RETURN_ON_HR_FAILURE(hr, "Failed to get input view", ); |
- output_sample->AddBuffer(output_buffer.get()); |
- |
- hr = video_format_converter_mft_->ProcessInput(0, input_sample.get(), 0); |
- if (FAILED(hr)) { |
- DCHECK(false); |
- RETURN_AND_NOTIFY_ON_HR_FAILURE( |
- hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
+ if (use_color_info_) { |
+ video_context_->VideoProcessorSetStreamColorSpace1( |
+ d3d11_processor_.get(), 0, |
+ gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); |
+ |
+ if (share_nv12_textures_) { |
jbauman
2017/02/14 23:43:34
We should never hit this code with share_nv12_text
hubbe
2017/02/15 01:37:13
Done.
|
+ video_context_->VideoProcessorSetOutputColorSpace1( |
+ d3d11_processor_.get(), |
+ gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); |
+ } else { |
+ video_context_->VideoProcessorSetOutputColorSpace1( |
+ d3d11_processor_.get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709); |
+ } |
} |
- input_sample.Release(); |
+ D3D11_VIDEO_PROCESSOR_STREAM streams = {0}; |
+ streams.Enable = TRUE; |
+ streams.pInputSurface = videoProcInputView.get(); |
- DWORD status = 0; |
- MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; |
- format_converter_output.pSample = output_sample.get(); |
- hr = video_format_converter_mft_->ProcessOutput( |
- 0, // No flags |
- 1, // # of out streams to pull from |
- &format_converter_output, &status); |
+ hr = video_context_->VideoProcessorBlt( |
+ d3d11_processor_.get(), videoProcOutputView.get(), 0, 1, &streams); |
- if (FAILED(hr)) { |
- DCHECK(false); |
- RETURN_AND_NOTIFY_ON_HR_FAILURE( |
- hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
- } |
+ RETURN_ON_HR_FAILURE(hr, "VideoProcessBlit failed", ); |
if (dest_keyed_mutex) { |
HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); |
@@ -2735,97 +2752,6 @@ void DXVAVideoDecodeAccelerator::FlushDecoder(int iterations, |
picture_buffer_id, input_buffer_id)); |
} |
-bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( |
- int width, |
- int height, |
- const gfx::ColorSpace& color_space) { |
- if (!dx11_video_format_converter_media_type_needs_init_ && |
jbauman
2017/02/14 23:43:34
dx11_video_format_converter_media_type_needs_init_
hubbe
2017/02/15 01:37:13
Done.
|
- (!use_color_info_ || color_space == dx11_converter_color_space_)) { |
- return true; |
- } |
- |
- CHECK(video_format_converter_mft_.get()); |
- |
- HRESULT hr = video_format_converter_mft_->ProcessMessage( |
- MFT_MESSAGE_SET_D3D_MANAGER, |
- reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get())); |
- |
- if (FAILED(hr)) |
- DCHECK(false); |
- |
- RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
- "Failed to initialize video format converter", |
- PLATFORM_FAILURE, false); |
- |
- video_format_converter_mft_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, |
- 0); |
- |
- base::win::ScopedComPtr<IMFMediaType> media_type; |
- hr = MFCreateMediaType(media_type.Receive()); |
- RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", |
- PLATFORM_FAILURE, false); |
- |
- hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
- RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", |
- PLATFORM_FAILURE, false); |
- |
- hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); |
- RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", |
- PLATFORM_FAILURE, false); |
- |
- hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); |
- RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", |
- PLATFORM_FAILURE, false); |
- |
- if (use_color_info_) { |
- DXVA2_ExtendedFormat format = |
- gfx::ColorSpaceWin::GetExtendedFormat(color_space); |
- media_type->SetUINT32(MF_MT_YUV_MATRIX, format.VideoTransferMatrix); |
- media_type->SetUINT32(MF_MT_VIDEO_NOMINAL_RANGE, format.NominalRange); |
- media_type->SetUINT32(MF_MT_VIDEO_PRIMARIES, format.VideoPrimaries); |
- media_type->SetUINT32(MF_MT_TRANSFER_FUNCTION, |
- format.VideoTransferFunction); |
- dx11_converter_color_space_ = color_space; |
- } |
- |
- hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); |
- if (FAILED(hr)) |
- DCHECK(false); |
- |
- RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", |
- PLATFORM_FAILURE, false); |
- |
- // It appears that we fail to set MFVideoFormat_ARGB32 as the output media |
- // type in certain configurations. Try to fallback to MFVideoFormat_RGB32 |
- // in such cases. If both fail, then bail. |
- |
- bool media_type_set = false; |
- if (copy_nv12_textures_) { |
- media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), |
- MFVideoFormat_NV12, width, height); |
- RETURN_AND_NOTIFY_ON_FAILURE(media_type_set, |
- "Failed to set NV12 converter output type", |
- PLATFORM_FAILURE, false); |
- } |
- |
- if (!media_type_set) { |
- media_type_set = SetTransformOutputType( |
- video_format_converter_mft_.get(), MFVideoFormat_ARGB32, width, height); |
- } |
- if (!media_type_set) { |
- media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), |
- MFVideoFormat_RGB32, width, height); |
- } |
- |
- if (!media_type_set) { |
- LOG(ERROR) << "Failed to find a matching RGB output type in the converter"; |
- return false; |
- } |
- |
- dx11_video_format_converter_media_type_needs_init_ = false; |
- return true; |
-} |
- |
bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, |
int* width, |
int* height) { |