Index: media/gpu/dxva_video_decode_accelerator_win.cc |
diff --git a/media/gpu/dxva_video_decode_accelerator_win.cc b/media/gpu/dxva_video_decode_accelerator_win.cc |
index e1a43a5d1671b10bf67df3a4473d74f967d7d626..af8711e41aa419e19965e5fa9d143b335ed73e5b 100644 |
--- a/media/gpu/dxva_video_decode_accelerator_win.cc |
+++ b/media/gpu/dxva_video_decode_accelerator_win.cc |
@@ -35,6 +35,7 @@ |
#include "base/stl_util.h" |
#include "base/threading/thread_task_runner_handle.h" |
#include "base/trace_event/trace_event.h" |
+#include "base/win/scoped_co_mem.h" |
#include "base/win/windows_version.h" |
#include "build/build_config.h" |
#include "gpu/command_buffer/service/gpu_preferences.h" |
@@ -581,6 +582,7 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config, |
// copy does not exist on Windows 7. Look into an alternate approach |
// and enable the code below. |
#if defined(ENABLE_DX11_FOR_WIN7) |
+ LOG(ERROR) << "DO THIS!"; |
if (base::win::GetVersion() == base::win::VERSION_WIN7) { |
dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll"); |
RETURN_ON_FAILURE(dxgi_manager_dll, |
@@ -590,6 +592,7 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config, |
// If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9 |
// decoding. |
if (dxgi_manager_dll && !create_dxgi_device_manager_) { |
+ LOG(ERROR) << "DX9 DECODING"; |
create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>( |
::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager")); |
} |
@@ -700,9 +703,95 @@ bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { |
// CopyOutputSampleDataToPictureBuffer). |
hr = query_->Issue(D3DISSUE_END); |
RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); |
+ |
+ CreateVideoProcessor(); |
return true; |
} |
+bool DXVAVideoDecodeAccelerator::CreateVideoProcessor() { |
+ // TODO(Hubbe): Don't try again if we failed tried and failed already. |
+ if (video_processor_service_.get()) |
+ return true; |
+ HRESULT hr = DXVA2CreateVideoService(d3d9_device_ex_.get(), |
+ IID_IDirectXVideoProcessorService, |
+ video_processor_service_.ReceiveVoid()); |
+ RETURN_ON_HR_FAILURE(hr, "DXVA2CreateVideoService failed", false); |
+ |
+ // TODO(Hubbe): Use actual video settings. |
+ DXVA2_VideoDesc inputDesc; |
+ inputDesc.SampleWidth = 1920; |
+ inputDesc.SampleHeight = 1080; |
+ inputDesc.SampleFormat.VideoChromaSubsampling = |
+ DXVA2_VideoChromaSubsampling_MPEG2; |
+ inputDesc.SampleFormat.NominalRange = DXVA2_NominalRange_16_235; |
+ inputDesc.SampleFormat.VideoTransferMatrix = DXVA2_VideoTransferMatrix_BT709; |
+ inputDesc.SampleFormat.VideoLighting = DXVA2_VideoLighting_dim; |
+ inputDesc.SampleFormat.VideoPrimaries = DXVA2_VideoPrimaries_BT709; |
+ inputDesc.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_709; |
+ inputDesc.SampleFormat.SampleFormat = DXVA2_SampleProgressiveFrame; |
+ inputDesc.Format = (D3DFORMAT)MAKEFOURCC('N', 'V', '1', '2'); |
+ inputDesc.InputSampleFreq.Numerator = 30; |
+ inputDesc.InputSampleFreq.Denominator = 1; |
+ inputDesc.OutputFrameFreq.Numerator = 30; |
+ inputDesc.OutputFrameFreq.Denominator = 1; |
+ |
+ UINT guid_count = 0; |
+ base::win::ScopedCoMem<GUID> guids; |
+ hr = video_processor_service_->GetVideoProcessorDeviceGuids( |
+ &inputDesc, &guid_count, &guids); |
+ RETURN_ON_HR_FAILURE(hr, "GetVideoProcessorDeviceGuids failed", false); |
+ |
+ for (UINT g = 0; g < guid_count; g++) { |
+ DXVA2_VideoProcessorCaps caps; |
+ hr = video_processor_service_->GetVideoProcessorCaps( |
+ guids[g], &inputDesc, D3DFMT_X8R8G8B8, &caps); |
+ if (hr) |
+ continue; |
+ |
+ if (!(caps.VideoProcessorOperations & DXVA2_VideoProcess_YUV2RGB)) |
+ continue; |
+ |
+ // if (!(caps.VideoProcessorOperations & |
+ // DXVA2_VideoProcess_YUV2RGBExtended)) |
+ // continue; |
+ |
+ base::win::ScopedCoMem<D3DFORMAT> formats; |
+ UINT format_count = 0; |
+ hr = video_processor_service_->GetVideoProcessorRenderTargets( |
+ guids[g], &inputDesc, &format_count, &formats); |
+ if (hr) |
+ continue; |
+ |
+ UINT f; |
+ for (f = 0; f < format_count; f++) { |
+ if (formats[f] == D3DFMT_X8R8G8B8) { |
+ break; |
+ } |
+ } |
+ if (f == format_count) |
+ continue; |
+ |
+ // Create video processor |
+ hr = video_processor_service_->CreateVideoProcessor( |
+ guids[g], &inputDesc, D3DFMT_X8R8G8B8, 0, processor_.Receive()); |
+ if (hr) |
+ continue; |
+ |
+ DXVA2_ValueRange range; |
+ processor_->GetProcAmpRange(DXVA2_ProcAmp_Brightness, &range); |
+ default_procamp_values_.Brightness = range.DefaultValue; |
+ processor_->GetProcAmpRange(DXVA2_ProcAmp_Contrast, &range); |
+ default_procamp_values_.Contrast = range.DefaultValue; |
+ processor_->GetProcAmpRange(DXVA2_ProcAmp_Hue, &range); |
+ default_procamp_values_.Hue = range.DefaultValue; |
+ processor_->GetProcAmpRange(DXVA2_ProcAmp_Saturation, &range); |
+ default_procamp_values_.Saturation = range.DefaultValue; |
+ |
+ return true; |
+ } |
+ return false; |
+} |
+ |
bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { |
// The device may exist if the last state was a config change. |
if (d3d11_device_.get()) |
@@ -1765,12 +1854,13 @@ void DXVAVideoDecodeAccelerator::ProcessPendingSamples() { |
// We only propagate the input color space if we can give the raw YUV data |
// back to the browser process. When we cannot return the YUV data, we |
// have to do a copy to an RGBA texture, which makes proper color |
- // management difficult as some fidelity is lost. Also, we currently let |
- // the drivers decide how to actually do the YUV to RGB conversion, which |
- // means that even if we wanted to try to color-adjust the RGB output, we |
- // don't actually know exactly what color space it is in anymore. |
+ // management difficult as some fidelity is lost. Also, for dx11 we |
+ // currently let the drivers decide how to actually do the YUV to RGB |
+ // conversion, which means that even if we wanted to try to color-adjust |
+ // the RGB output, we don't actually know exactly what color space it is |
+ // in anymore. |
// TODO(hubbe): Figure out a way to always return the raw YUV data. |
- if (share_nv12_textures_ || copy_nv12_textures_) { |
+ if (share_nv12_textures_ || copy_nv12_textures_ || !use_dx11_) { |
index->second->set_color_space(pending_sample->color_space); |
} |
@@ -2223,23 +2313,124 @@ bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() { |
return !pending_output_samples_.empty(); |
} |
-void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface, |
- IDirect3DSurface9* dest_surface, |
- int picture_buffer_id, |
- int input_buffer_id) { |
+void DXVAVideoDecodeAccelerator::CopySurface( |
+ IDirect3DSurface9* src_surface, |
+ IDirect3DSurface9* dest_surface, |
+ int picture_buffer_id, |
+ int input_buffer_id, |
+ const gfx::ColorSpace& color_space) { |
TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::CopySurface"); |
if (!decoder_thread_task_runner_->BelongsToCurrentThread()) { |
decoder_thread_task_runner_->PostTask( |
FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurface, |
base::Unretained(this), src_surface, dest_surface, |
- picture_buffer_id, input_buffer_id)); |
+ picture_buffer_id, input_buffer_id, color_space)); |
return; |
} |
- HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, |
- NULL, D3DTEXF_NONE); |
- RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed", ); |
+ HRESULT hr; |
+ if (processor_) { |
+ D3DSURFACE_DESC src_desc; |
+ src_surface->GetDesc(&src_desc); |
+ int width = src_desc.Width; |
+ int height = src_desc.Height; |
+ RECT rect = {0, 0, width, height}; |
+ DXVA2_VideoSample sample = {0}; |
+ sample.End = 1000; |
+ sample.SampleFormat.SampleFormat = DXVA2_SampleProgressiveFrame; |
+ switch (color_space.range_) { |
+ case gfx::ColorSpace::RangeID::LIMITED: |
+ sample.SampleFormat.NominalRange = DXVA2_NominalRange_16_235; |
+ break; |
+ case gfx::ColorSpace::RangeID::FULL: |
+ sample.SampleFormat.NominalRange = DXVA2_NominalRange_0_255; |
+ break; |
+ } |
+ |
+ switch (color_space.transfer_) { |
+ case gfx::ColorSpace::MatrixID::BT709: |
+ sample.SampleFormat.VideoTransferMatrix = |
+ DXVA2_VideoTransferMatrix_BT709; |
+ break; |
+ case gfx::ColorSpace::MatrixID::BT470BG: |
+ case gfx::ColorSpace::MatrixID::SMPTE170M: |
+ sample.SampleFormat.VideoTransferMatrix = |
+ DXVA2_VideoTransferMatrix_BT601; |
+ break; |
+ case gfx::ColorSpace::MatrixID::SMPTE240M: |
+ sample.SampleFormat.VideoTransferMatrix = |
+ DXVA2_VideoTransferMatrix_SMPTE240M; |
+ break; |
+ } |
+ switch (color_space.primaries_) { |
+ case gfx::ColorSpace::PrimaryID::BT709: |
+ sample.SampleFormat.VideoPrimaries = DXVA2_VideoPrimaries_BT709; |
+ break; |
+ case gfx::ColorSpace::PrimaryID::BT470M: |
+ sample.SampleFormat.VideoPrimaries = DXVA2_VideoPrimaries_BT470_2_SysM; |
+ break; |
+ case gfx::ColorSpace::PrimaryID::BT470BG: |
+ sample.SampleFormat.VideoPrimaries = DXVA2_VideoPrimaries_BT470_2_SysBG; |
+ break; |
+ case gfx::ColorSpace::PrimaryID::SMPTE170M: |
+ sample.SampleFormat.VideoPrimaries = DXVA2_VideoPrimaries_SMPTE170M; |
+ break; |
+ case gfx::ColorSpace::PrimaryID::SMPTE240M: |
+ sample.SampleFormat.VideoPrimaries = DXVA2_VideoPrimaries_SMPTE240M; |
+ break; |
+ } |
+ |
+ switch (color_space.transfer_) { |
+ case gfx::ColorSpace::TransferID::BT709: |
+ case gfx::ColorSpace::TransferID::SMPTE170M: |
+ sample.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_709; |
+ break; |
+ case gfx::ColorSpace::TransferID::SMPTE240M: |
+ sample.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_240M; |
+ break; |
+ case gfx::ColorSpace::TransferID::GAMMA22: |
+ sample.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_22; |
+ break; |
+ case gfx::ColorSpace::TransferID::GAMMA28: |
+ sample.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_28; |
+ break; |
+ case gfx::ColorSpace::TransferID::LINEAR: |
+ sample.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_10; |
+ break; |
+ case gfx::ColorSpace::TransferID::IEC61966_2_1: |
+ sample.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_sRGB; |
+ break; |
+ } |
+ sample.SrcSurface = src_surface; |
+ sample.SrcRect = rect; |
+ sample.DstRect = rect; |
+ sample.PlanarAlpha = DXVA2_Fixed32OpaqueAlpha(); |
+ |
+ DXVA2_VideoProcessBltParams params = {0}; |
+ params.TargetFrame = 0; |
+ params.TargetRect = rect; |
+ params.ConstrictionSize = {width, height}; |
+ params.BackgroundColor = {0, 0, 0, 0xFFFF}; |
+ params.ProcAmpValues = default_procamp_values_; |
+ |
+ params.Alpha = DXVA2_Fixed32OpaqueAlpha(); |
+ |
+ hr = processor_->VideoProcessBlt(dest_surface, ¶ms, &sample, 1, NULL); |
+ if (hr != S_OK) { |
+ LOG(ERROR) << "VideoProcessBlt failed with code " << hr |
+ << " E_INVALIDARG= " << E_INVALIDARG; |
+ |
+ // Release the processor and fall back to StretchRect() |
+ processor_ = NULL; |
+ } |
+ } |
+ |
+ if (!processor_) { |
+ hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, NULL, |
+ D3DTEXF_NONE); |
+ RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed", ); |
+ } |
// Ideally, this should be done immediately before the draw call that uses |
// the texture. Flush it once here though. |
hr = query_->Issue(D3DISSUE_END); |