| Index: media/gpu/dxva_video_decode_accelerator_win.cc
 | 
| diff --git a/media/gpu/dxva_video_decode_accelerator_win.cc b/media/gpu/dxva_video_decode_accelerator_win.cc
 | 
| index e046b3582d47c2f95c2255c8a41fafbef21d9b40..a3ebb2d14593aeff7f5d82a6133ac04c39d8664f 100644
 | 
| --- a/media/gpu/dxva_video_decode_accelerator_win.cc
 | 
| +++ b/media/gpu/dxva_video_decode_accelerator_win.cc
 | 
| @@ -500,7 +500,6 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
 | 
|                            !workarounds.disable_nv12_dxgi_video),
 | 
|        use_dx11_(false),
 | 
|        use_keyed_mutex_(false),
 | 
| -      dx11_video_format_converter_media_type_needs_init_(true),
 | 
|        using_angle_device_(false),
 | 
|        enable_accelerated_vpx_decode_(
 | 
|            gpu_preferences.enable_accelerated_vpx_decode),
 | 
| @@ -830,6 +829,12 @@ bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
 | 
|            &feature_level_out, d3d11_device_context_.Receive());
 | 
|        RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device", false);
 | 
|      }
 | 
| +
 | 
| +    hr = d3d11_device_.QueryInterface(video_device_.Receive());
 | 
| +    RETURN_ON_HR_FAILURE(hr, "Failed to get video device", false);
 | 
| +
 | 
| +    hr = d3d11_device_context_.QueryInterface(video_context_.Receive());
 | 
| +    RETURN_ON_HR_FAILURE(hr, "Failed to get video context", false);
 | 
|    }
 | 
|  
 | 
|    D3D11_FEATURE_DATA_D3D11_OPTIONS options;
 | 
| @@ -868,27 +873,6 @@ bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
 | 
|    hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive());
 | 
|    RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false);
 | 
|  
 | 
| -  HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll");
 | 
| -  RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor",
 | 
| -                    false);
 | 
| -
 | 
| -  hr = CreateCOMObjectFromDll(video_processor_dll, CLSID_VideoProcessorMFT,
 | 
| -                              __uuidof(IMFTransform),
 | 
| -                              video_format_converter_mft_.ReceiveVoid());
 | 
| -  RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false);
 | 
| -
 | 
| -  base::win::ScopedComPtr<IMFAttributes> converter_attributes;
 | 
| -  hr = video_format_converter_mft_->GetAttributes(
 | 
| -      converter_attributes.Receive());
 | 
| -  RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false);
 | 
| -
 | 
| -  hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
 | 
| -  RETURN_ON_HR_FAILURE(
 | 
| -      hr, "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", false);
 | 
| -
 | 
| -  hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE);
 | 
| -  RETURN_ON_HR_FAILURE(
 | 
| -      hr, "Failed to set MF_LOW_LATENCY attribute on converter", false);
 | 
|    return true;
 | 
|  }
 | 
|  
 | 
| @@ -1967,17 +1951,16 @@ void DXVAVideoDecodeAccelerator::Invalidate() {
 | 
|      pending_input_buffers_.clear();
 | 
|      pictures_requested_ = false;
 | 
|      if (use_dx11_) {
 | 
| -      if (video_format_converter_mft_.get()) {
 | 
| -        video_format_converter_mft_->ProcessMessage(
 | 
| -            MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
 | 
| -        video_format_converter_mft_.Release();
 | 
| -      }
 | 
| +      d3d11_processor_.Release();
 | 
| +      enumerator_.Release();
 | 
| +      video_context_.Release();
 | 
| +      video_device_.Release();
 | 
|        d3d11_device_context_.Release();
 | 
|        d3d11_device_.Release();
 | 
|        d3d11_device_manager_.Release();
 | 
|        d3d11_query_.Release();
 | 
|        multi_threaded_.Release();
 | 
| -      dx11_video_format_converter_media_type_needs_init_ = true;
 | 
| +      processor_width_ = processor_height_ = 0;
 | 
|      } else {
 | 
|        d3d9_.Release();
 | 
|        d3d9_device_ex_.Release();
 | 
| @@ -2278,8 +2261,6 @@ void DXVAVideoDecodeAccelerator::DecodeInternal(
 | 
|  
 | 
|  void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width,
 | 
|                                                           int height) {
 | 
| -  dx11_video_format_converter_media_type_needs_init_ = true;
 | 
| -
 | 
|    main_thread_task_runner_->PostTask(
 | 
|        FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers,
 | 
|                              weak_ptr_, false));
 | 
| @@ -2569,21 +2550,14 @@ void DXVAVideoDecodeAccelerator::CopyTexture(
 | 
|    // the StretchRect API in the IDirect3DDevice9Ex interface did the color
 | 
|    // space conversion for us. Sadly in DX11 land the API does not provide
 | 
|    // a straightforward way to do this.
 | 
| -  // We use the video processor MFT.
 | 
| -  // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx
 | 
| -  // This object implements a media foundation transform (IMFTransform)
 | 
| -  // which follows the same contract as the decoder. The color space
 | 
| -  // conversion as per msdn is done in the GPU.
 | 
|  
 | 
|    D3D11_TEXTURE2D_DESC source_desc;
 | 
|    src_texture->GetDesc(&source_desc);
 | 
| -
 | 
| -  // Set up the input and output types for the video processor MFT.
 | 
| -  if (!InitializeDX11VideoFormatConverterMediaType(
 | 
| -          source_desc.Width, source_desc.Height, color_space)) {
 | 
| -    RETURN_AND_NOTIFY_ON_FAILURE(
 | 
| -        false, "Failed to initialize media types for convesion.",
 | 
| -        PLATFORM_FAILURE, );
 | 
| +  if (!InitializeID3D11VideoProcessor(source_desc.Width, source_desc.Height,
 | 
| +                                      color_space)) {
 | 
| +    RETURN_AND_NOTIFY_ON_FAILURE(false,
 | 
| +                                 "Failed to initialize D3D11 video processor.",
 | 
| +                                 PLATFORM_FAILURE, );
 | 
|    }
 | 
|  
 | 
|    // The input to the video processor is the output sample.
 | 
| @@ -2616,7 +2590,7 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread(
 | 
|  
 | 
|    DCHECK(use_dx11_);
 | 
|    DCHECK(!!input_sample);
 | 
| -  DCHECK(video_format_converter_mft_.get());
 | 
| +  DCHECK(d3d11_processor_.get());
 | 
|  
 | 
|    if (dest_keyed_mutex) {
 | 
|      HRESULT hr =
 | 
| @@ -2625,50 +2599,56 @@ void DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread(
 | 
|          hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.",
 | 
|          PLATFORM_FAILURE, );
 | 
|    }
 | 
| -  // The video processor MFT requires output samples to be allocated by the
 | 
| -  // caller. We create a sample with a buffer backed with the ID3D11Texture2D
 | 
| -  // interface exposed by ANGLE. This works nicely as this ensures that the
 | 
| -  // video processor coverts the color space of the output frame and copies
 | 
| -  // the result into the ANGLE texture.
 | 
| -  base::win::ScopedComPtr<IMFSample> output_sample;
 | 
| -  hr = MFCreateSample(output_sample.Receive());
 | 
| -  if (FAILED(hr)) {
 | 
| -    RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.",
 | 
| -                                    PLATFORM_FAILURE, );
 | 
| -  }
 | 
|  
 | 
|    base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
 | 
| -  hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), dest_texture, 0,
 | 
| -                                 FALSE, output_buffer.Receive());
 | 
| -  if (FAILED(hr)) {
 | 
| -    RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.",
 | 
| -                                    PLATFORM_FAILURE, );
 | 
| -  }
 | 
| +  hr = input_sample->GetBufferByIndex(0, output_buffer.Receive());
 | 
| +  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get buffer from output sample",
 | 
| +                                  PLATFORM_FAILURE, );
 | 
|  
 | 
| -  output_sample->AddBuffer(output_buffer.get());
 | 
| +  base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer;
 | 
| +  hr = dxgi_buffer.QueryFrom(output_buffer.get());
 | 
| +  RETURN_AND_NOTIFY_ON_HR_FAILURE(
 | 
| +      hr, "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE, );
 | 
| +  UINT index = 0;
 | 
| +  hr = dxgi_buffer->GetSubresourceIndex(&index);
 | 
| +  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get resource index",
 | 
| +                                  PLATFORM_FAILURE, );
 | 
|  
 | 
| -  hr = video_format_converter_mft_->ProcessInput(0, input_sample.get(), 0);
 | 
| -  if (FAILED(hr)) {
 | 
| -    DCHECK(false);
 | 
| -    RETURN_AND_NOTIFY_ON_HR_FAILURE(
 | 
| -        hr, "Failed to convert output sample format.", PLATFORM_FAILURE, );
 | 
| -  }
 | 
| +  base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture;
 | 
| +  hr = dxgi_buffer->GetResource(IID_PPV_ARGS(dx11_decoding_texture.Receive()));
 | 
| +  RETURN_AND_NOTIFY_ON_HR_FAILURE(
 | 
| +      hr, "Failed to get resource from output sample", PLATFORM_FAILURE, );
 | 
| +
 | 
| +  D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_view_desc = {
 | 
| +      D3D11_VPOV_DIMENSION_TEXTURE2D};
 | 
| +  output_view_desc.Texture2D.MipSlice = 0;
 | 
| +  base::win::ScopedComPtr<ID3D11VideoProcessorOutputView> output_view;
 | 
| +  hr = video_device_->CreateVideoProcessorOutputView(
 | 
| +      dest_texture, enumerator_.get(), &output_view_desc,
 | 
| +      output_view.Receive());
 | 
| +  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output view",
 | 
| +                                  PLATFORM_FAILURE, );
 | 
|  
 | 
| -  input_sample.Release();
 | 
| +  D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_view_desc = {0};
 | 
| +  input_view_desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
 | 
| +  input_view_desc.Texture2D.ArraySlice = index;
 | 
| +  input_view_desc.Texture2D.MipSlice = 0;
 | 
| +  base::win::ScopedComPtr<ID3D11VideoProcessorInputView> input_view;
 | 
| +  hr = video_device_->CreateVideoProcessorInputView(
 | 
| +      dx11_decoding_texture.get(), enumerator_.get(), &input_view_desc,
 | 
| +      input_view.Receive());
 | 
| +  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get input view",
 | 
| +                                  PLATFORM_FAILURE, );
 | 
|  
 | 
| -  DWORD status = 0;
 | 
| -  MFT_OUTPUT_DATA_BUFFER format_converter_output = {};
 | 
| -  format_converter_output.pSample = output_sample.get();
 | 
| -  hr = video_format_converter_mft_->ProcessOutput(
 | 
| -      0,  // No flags
 | 
| -      1,  // # of out streams to pull from
 | 
| -      &format_converter_output, &status);
 | 
| +  D3D11_VIDEO_PROCESSOR_STREAM streams = {0};
 | 
| +  streams.Enable = TRUE;
 | 
| +  streams.pInputSurface = input_view.get();
 | 
|  
 | 
| -  if (FAILED(hr)) {
 | 
| -    DCHECK(false);
 | 
| -    RETURN_AND_NOTIFY_ON_HR_FAILURE(
 | 
| -        hr, "Failed to convert output sample format.", PLATFORM_FAILURE, );
 | 
| -  }
 | 
| +  hr = video_context_->VideoProcessorBlt(d3d11_processor_.get(),
 | 
| +                                         output_view.get(), 0, 1, &streams);
 | 
| +
 | 
| +  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "VideoProcessBlit failed",
 | 
| +                                  PLATFORM_FAILURE, );
 | 
|  
 | 
|    if (dest_keyed_mutex) {
 | 
|      HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1);
 | 
| @@ -2741,94 +2721,82 @@ void DXVAVideoDecodeAccelerator::FlushDecoder(int iterations,
 | 
|                              picture_buffer_id, input_buffer_id));
 | 
|  }
 | 
|  
 | 
| -bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType(
 | 
| +bool DXVAVideoDecodeAccelerator::InitializeID3D11VideoProcessor(
 | 
|      int width,
 | 
|      int height,
 | 
|      const gfx::ColorSpace& color_space) {
 | 
| -  if (!dx11_video_format_converter_media_type_needs_init_ &&
 | 
| -      (!use_color_info_ || color_space == dx11_converter_color_space_)) {
 | 
| -    return true;
 | 
| +  if (width < processor_width_ || height != processor_height_) {
 | 
| +    d3d11_processor_.Release();
 | 
| +    enumerator_.Release();
 | 
| +    processor_width_ = 0;
 | 
| +    processor_height_ = 0;
 | 
| +
 | 
| +    D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc;
 | 
| +    desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
 | 
| +    desc.InputFrameRate.Numerator = 60;
 | 
| +    desc.InputFrameRate.Denominator = 1;
 | 
| +    desc.InputWidth = width;
 | 
| +    desc.InputHeight = height;
 | 
| +    desc.OutputFrameRate.Numerator = 60;
 | 
| +    desc.OutputFrameRate.Denominator = 1;
 | 
| +    desc.OutputWidth = width;
 | 
| +    desc.OutputHeight = height;
 | 
| +    desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
 | 
| +
 | 
| +    HRESULT hr = video_device_->CreateVideoProcessorEnumerator(
 | 
| +        &desc, enumerator_.Receive());
 | 
| +    RETURN_ON_HR_FAILURE(hr, "Failed to enumerate video processors", false);
 | 
| +
 | 
| +    // TODO(Hubbe): Find correct index
 | 
| +    hr = video_device_->CreateVideoProcessor(enumerator_.get(), 0,
 | 
| +                                             d3d11_processor_.Receive());
 | 
| +    RETURN_ON_HR_FAILURE(hr, "Failed to create video processor.", false);
 | 
| +    processor_width_ = width;
 | 
| +    processor_height_ = height;
 | 
| +
 | 
| +    video_context_->VideoProcessorSetStreamAutoProcessingMode(
 | 
| +        d3d11_processor_.get(), 0, false);
 | 
|    }
 | 
|  
 | 
| -  CHECK(video_format_converter_mft_.get());
 | 
| -
 | 
| -  HRESULT hr = video_format_converter_mft_->ProcessMessage(
 | 
| -      MFT_MESSAGE_SET_D3D_MANAGER,
 | 
| -      reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get()));
 | 
| -
 | 
| -  if (FAILED(hr))
 | 
| -    DCHECK(false);
 | 
| -
 | 
| -  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
 | 
| -                                  "Failed to initialize video format converter",
 | 
| -                                  PLATFORM_FAILURE, false);
 | 
| -
 | 
| -  video_format_converter_mft_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING,
 | 
| -                                              0);
 | 
| -
 | 
| -  base::win::ScopedComPtr<IMFMediaType> media_type;
 | 
| -  hr = MFCreateMediaType(media_type.Receive());
 | 
| -  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed",
 | 
| -                                  PLATFORM_FAILURE, false);
 | 
| -
 | 
| -  hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
 | 
| -  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type",
 | 
| -                                  PLATFORM_FAILURE, false);
 | 
| -
 | 
| -  hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
 | 
| -  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type",
 | 
| -                                  PLATFORM_FAILURE, false);
 | 
| -
 | 
| -  hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height);
 | 
| -  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes",
 | 
| -                                  PLATFORM_FAILURE, false);
 | 
| -
 | 
| -  if (use_color_info_) {
 | 
| -    DXVA2_ExtendedFormat format =
 | 
| -        gfx::ColorSpaceWin::GetExtendedFormat(color_space);
 | 
| -    media_type->SetUINT32(MF_MT_YUV_MATRIX, format.VideoTransferMatrix);
 | 
| -    media_type->SetUINT32(MF_MT_VIDEO_NOMINAL_RANGE, format.NominalRange);
 | 
| -    media_type->SetUINT32(MF_MT_VIDEO_PRIMARIES, format.VideoPrimaries);
 | 
| -    media_type->SetUINT32(MF_MT_TRANSFER_FUNCTION,
 | 
| -                          format.VideoTransferFunction);
 | 
| -    dx11_converter_color_space_ = color_space;
 | 
| -  }
 | 
| -
 | 
| -  hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0);
 | 
| -  if (FAILED(hr))
 | 
| -    DCHECK(false);
 | 
| -
 | 
| -  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type",
 | 
| -                                  PLATFORM_FAILURE, false);
 | 
| -
 | 
| -  // It appears that we fail to set MFVideoFormat_ARGB32 as the output media
 | 
| -  // type in certain configurations. Try to fallback to MFVideoFormat_RGB32
 | 
| -  // in such cases. If both fail, then bail.
 | 
| -
 | 
| -  bool media_type_set = false;
 | 
|    if (copy_nv12_textures_) {
 | 
| -    media_type_set = SetTransformOutputType(video_format_converter_mft_.get(),
 | 
| -                                            MFVideoFormat_NV12, width, height);
 | 
| -    RETURN_AND_NOTIFY_ON_FAILURE(media_type_set,
 | 
| -                                 "Failed to set NV12 converter output type",
 | 
| -                                 PLATFORM_FAILURE, false);
 | 
| -  }
 | 
| +    // If we're copying NV12 textures, make sure we set the same
 | 
| +    // color space on input and output.
 | 
| +    D3D11_VIDEO_PROCESSOR_COLOR_SPACE d3d11_color_space = {0};
 | 
| +    d3d11_color_space.RGB_Range = 1;
 | 
| +    d3d11_color_space.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255;
 | 
|  
 | 
| -  if (!media_type_set) {
 | 
| -    media_type_set = SetTransformOutputType(
 | 
| -        video_format_converter_mft_.get(), MFVideoFormat_ARGB32, width, height);
 | 
| -  }
 | 
| -  if (!media_type_set) {
 | 
| -    media_type_set = SetTransformOutputType(video_format_converter_mft_.get(),
 | 
| -                                            MFVideoFormat_RGB32, width, height);
 | 
| -  }
 | 
| +    video_context_->VideoProcessorSetOutputColorSpace(d3d11_processor_.get(),
 | 
| +                                                      &d3d11_color_space);
 | 
|  
 | 
| -  if (!media_type_set) {
 | 
| -    LOG(ERROR) << "Failed to find a matching RGB output type in the converter";
 | 
| -    return false;
 | 
| +    video_context_->VideoProcessorSetStreamColorSpace(d3d11_processor_.get(), 0,
 | 
| +                                                      &d3d11_color_space);
 | 
| +  } else {
 | 
| +    // Not sure if this call is expensive, let's only do it if the color
 | 
| +    // space changes.
 | 
| +    gfx::ColorSpace output_color_space = gfx::ColorSpace::CreateSRGB();
 | 
| +    if (use_color_info_ && dx11_converter_color_space_ != color_space) {
 | 
| +      base::win::ScopedComPtr<ID3D11VideoContext1> video_context1;
 | 
| +      HRESULT hr = video_context_.QueryInterface(video_context1.Receive());
 | 
| +      if (SUCCEEDED(hr)) {
 | 
| +        video_context1->VideoProcessorSetStreamColorSpace1(
 | 
| +            d3d11_processor_.get(), 0,
 | 
| +            gfx::ColorSpaceWin::GetDXGIColorSpace(color_space));
 | 
| +        video_context1->VideoProcessorSetOutputColorSpace1(
 | 
| +            d3d11_processor_.get(),
 | 
| +            gfx::ColorSpaceWin::GetDXGIColorSpace(output_color_space));
 | 
| +      } else {
 | 
| +        D3D11_VIDEO_PROCESSOR_COLOR_SPACE d3d11_color_space =
 | 
| +            gfx::ColorSpaceWin::GetD3D11ColorSpace(color_space);
 | 
| +        video_context_->VideoProcessorSetStreamColorSpace(
 | 
| +            d3d11_processor_.get(), 0, &d3d11_color_space);
 | 
| +        d3d11_color_space =
 | 
| +            gfx::ColorSpaceWin::GetD3D11ColorSpace(output_color_space);
 | 
| +        video_context_->VideoProcessorSetOutputColorSpace(
 | 
| +            d3d11_processor_.get(), &d3d11_color_space);
 | 
| +      }
 | 
| +      dx11_converter_color_space_ = color_space;
 | 
| +    }
 | 
|    }
 | 
| -
 | 
| -  dx11_video_format_converter_media_type_needs_init_ = false;
 | 
|    return true;
 | 
|  }
 | 
|  
 | 
| 
 |