Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(730)

Unified Diff: media/gpu/dxva_video_decode_accelerator_win.cc

Issue 2503063002: media: Inform VideoBlit/MFTransform of video color information (Closed)
Patch Set: moved enum translation to color_space_win.cc Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « media/gpu/dxva_video_decode_accelerator_win.h ('k') | ui/gfx/BUILD.gn » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: media/gpu/dxva_video_decode_accelerator_win.cc
diff --git a/media/gpu/dxva_video_decode_accelerator_win.cc b/media/gpu/dxva_video_decode_accelerator_win.cc
index e1a43a5d1671b10bf67df3a4473d74f967d7d626..d54c4f8e1d210215a70b81db53ebb00aa16d6949 100644
--- a/media/gpu/dxva_video_decode_accelerator_win.cc
+++ b/media/gpu/dxva_video_decode_accelerator_win.cc
@@ -35,6 +35,7 @@
#include "base/stl_util.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
+#include "base/win/scoped_co_mem.h"
#include "base/win/windows_version.h"
#include "build/build_config.h"
#include "gpu/command_buffer/service/gpu_preferences.h"
@@ -45,6 +46,7 @@
#include "media/video/video_decode_accelerator.h"
#include "third_party/angle/include/EGL/egl.h"
#include "third_party/angle/include/EGL/eglext.h"
+#include "ui/gfx/color_space_win.h"
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_context.h"
#include "ui/gl/gl_fence.h"
@@ -581,6 +583,7 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config,
// copy does not exist on Windows 7. Look into an alternate approach
// and enable the code below.
#if defined(ENABLE_DX11_FOR_WIN7)
+ LOG(ERROR) << "DO THIS!";
if (base::win::GetVersion() == base::win::VERSION_WIN7) {
dxgi_manager_dll = ::GetModuleHandle(L"mshtmlmedia.dll");
RETURN_ON_FAILURE(dxgi_manager_dll,
@@ -590,6 +593,7 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config,
// If we don't find the MFCreateDXGIDeviceManager API we fallback to D3D9
// decoding.
if (dxgi_manager_dll && !create_dxgi_device_manager_) {
+ LOG(ERROR) << "DX9 DECODING";
jbauman 2016/11/17 01:02:22 Remove these.
hubbe 2016/11/17 18:46:59 Oops, done.
create_dxgi_device_manager_ = reinterpret_cast<CreateDXGIDeviceManager>(
::GetProcAddress(dxgi_manager_dll, "MFCreateDXGIDeviceManager"));
}
@@ -700,9 +704,95 @@ bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
// CopyOutputSampleDataToPictureBuffer).
hr = query_->Issue(D3DISSUE_END);
RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false);
+
+ CreateVideoProcessor();
return true;
}
+bool DXVAVideoDecodeAccelerator::CreateVideoProcessor() {
+ // TODO(Hubbe): Don't try again if we failed tried and failed already.
+ if (video_processor_service_.get())
+ return true;
+ HRESULT hr = DXVA2CreateVideoService(d3d9_device_ex_.get(),
jbauman 2016/11/17 01:02:22 Might be a good idea to add a finch trial for this
hubbe 2016/11/17 18:46:59 Done.
+ IID_IDirectXVideoProcessorService,
+ video_processor_service_.ReceiveVoid());
+ RETURN_ON_HR_FAILURE(hr, "DXVA2CreateVideoService failed", false);
+
+ // TODO(Hubbe): Use actual video settings.
+ DXVA2_VideoDesc inputDesc;
+ inputDesc.SampleWidth = 1920;
+ inputDesc.SampleHeight = 1080;
+ inputDesc.SampleFormat.VideoChromaSubsampling =
+ DXVA2_VideoChromaSubsampling_MPEG2;
+ inputDesc.SampleFormat.NominalRange = DXVA2_NominalRange_16_235;
+ inputDesc.SampleFormat.VideoTransferMatrix = DXVA2_VideoTransferMatrix_BT709;
+ inputDesc.SampleFormat.VideoLighting = DXVA2_VideoLighting_dim;
+ inputDesc.SampleFormat.VideoPrimaries = DXVA2_VideoPrimaries_BT709;
+ inputDesc.SampleFormat.VideoTransferFunction = DXVA2_VideoTransFunc_709;
+ inputDesc.SampleFormat.SampleFormat = DXVA2_SampleProgressiveFrame;
+ inputDesc.Format = (D3DFORMAT)MAKEFOURCC('N', 'V', '1', '2');
+ inputDesc.InputSampleFreq.Numerator = 30;
+ inputDesc.InputSampleFreq.Denominator = 1;
+ inputDesc.OutputFrameFreq.Numerator = 30;
+ inputDesc.OutputFrameFreq.Denominator = 1;
+
+ UINT guid_count = 0;
+ base::win::ScopedCoMem<GUID> guids;
+ hr = video_processor_service_->GetVideoProcessorDeviceGuids(
+ &inputDesc, &guid_count, &guids);
+ RETURN_ON_HR_FAILURE(hr, "GetVideoProcessorDeviceGuids failed", false);
+
+ for (UINT g = 0; g < guid_count; g++) {
+ DXVA2_VideoProcessorCaps caps;
+ hr = video_processor_service_->GetVideoProcessorCaps(
+ guids[g], &inputDesc, D3DFMT_X8R8G8B8, &caps);
+ if (hr)
+ continue;
+
+ if (!(caps.VideoProcessorOperations & DXVA2_VideoProcess_YUV2RGB))
+ continue;
+
+ // if (!(caps.VideoProcessorOperations &
+ // DXVA2_VideoProcess_YUV2RGBExtended))
+ // continue;
+
+ base::win::ScopedCoMem<D3DFORMAT> formats;
+ UINT format_count = 0;
+ hr = video_processor_service_->GetVideoProcessorRenderTargets(
+ guids[g], &inputDesc, &format_count, &formats);
+ if (hr)
+ continue;
+
+ UINT f;
+ for (f = 0; f < format_count; f++) {
+ if (formats[f] == D3DFMT_X8R8G8B8) {
+ break;
+ }
+ }
+ if (f == format_count)
+ continue;
+
+ // Create video processor
+ hr = video_processor_service_->CreateVideoProcessor(
+ guids[g], &inputDesc, D3DFMT_X8R8G8B8, 0, processor_.Receive());
+ if (hr)
+ continue;
+
+ DXVA2_ValueRange range;
+ processor_->GetProcAmpRange(DXVA2_ProcAmp_Brightness, &range);
+ default_procamp_values_.Brightness = range.DefaultValue;
+ processor_->GetProcAmpRange(DXVA2_ProcAmp_Contrast, &range);
+ default_procamp_values_.Contrast = range.DefaultValue;
+ processor_->GetProcAmpRange(DXVA2_ProcAmp_Hue, &range);
+ default_procamp_values_.Hue = range.DefaultValue;
+ processor_->GetProcAmpRange(DXVA2_ProcAmp_Saturation, &range);
+ default_procamp_values_.Saturation = range.DefaultValue;
+
+ return true;
+ }
+ return false;
+}
+
bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() {
// The device may exist if the last state was a config change.
if (d3d11_device_.get())
@@ -1765,12 +1855,13 @@ void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
// We only propagate the input color space if we can give the raw YUV data
// back to the browser process. When we cannot return the YUV data, we
// have to do a copy to an RGBA texture, which makes proper color
- // management difficult as some fidelity is lost. Also, we currently let
- // the drivers decide how to actually do the YUV to RGB conversion, which
- // means that even if we wanted to try to color-adjust the RGB output, we
- // don't actually know exactly what color space it is in anymore.
+ // management difficult as some fidelity is lost. Also, for dx11 we
+ // currently let the drivers decide how to actually do the YUV to RGB
+ // conversion, which means that even if we wanted to try to color-adjust
+ // the RGB output, we don't actually know exactly what color space it is
+ // in anymore.
// TODO(hubbe): Figure out a way to always return the raw YUV data.
- if (share_nv12_textures_ || copy_nv12_textures_) {
+ if (share_nv12_textures_ || copy_nv12_textures_ || !use_dx11_) {
index->second->set_color_space(pending_sample->color_space);
}
@@ -2223,23 +2314,60 @@ bool DXVAVideoDecodeAccelerator::OutputSamplesPresent() {
return !pending_output_samples_.empty();
}
-void DXVAVideoDecodeAccelerator::CopySurface(IDirect3DSurface9* src_surface,
- IDirect3DSurface9* dest_surface,
- int picture_buffer_id,
- int input_buffer_id) {
+void DXVAVideoDecodeAccelerator::CopySurface(
+ IDirect3DSurface9* src_surface,
+ IDirect3DSurface9* dest_surface,
+ int picture_buffer_id,
+ int input_buffer_id,
+ const gfx::ColorSpace& color_space) {
TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::CopySurface");
if (!decoder_thread_task_runner_->BelongsToCurrentThread()) {
decoder_thread_task_runner_->PostTask(
FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurface,
base::Unretained(this), src_surface, dest_surface,
- picture_buffer_id, input_buffer_id));
+ picture_buffer_id, input_buffer_id, color_space));
return;
}
- HRESULT hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface,
- NULL, D3DTEXF_NONE);
- RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed", );
+ HRESULT hr;
+ if (processor_) {
+ D3DSURFACE_DESC src_desc;
+ src_surface->GetDesc(&src_desc);
+ int width = src_desc.Width;
+ int height = src_desc.Height;
+ RECT rect = {0, 0, width, height};
+ DXVA2_VideoSample sample = {0};
+ sample.End = 1000;
+ sample.SampleFormat = gfx::ColorSpaceWin::GetExtendedFormat(color_space);
+ sample.SrcSurface = src_surface;
+ sample.SrcRect = rect;
+ sample.DstRect = rect;
+ sample.PlanarAlpha = DXVA2_Fixed32OpaqueAlpha();
+
+ DXVA2_VideoProcessBltParams params = {0};
+ params.TargetFrame = 0;
+ params.TargetRect = rect;
+ params.ConstrictionSize = {width, height};
+ params.BackgroundColor = {0, 0, 0, 0xFFFF};
+ params.ProcAmpValues = default_procamp_values_;
+
+ params.Alpha = DXVA2_Fixed32OpaqueAlpha();
+
+ hr = processor_->VideoProcessBlt(dest_surface, &params, &sample, 1, NULL);
+ if (hr != S_OK) {
+ LOG(ERROR) << "VideoProcessBlt failed with code " << hr
+ << " E_INVALIDARG= " << E_INVALIDARG;
+
+ // Release the processor and fall back to StretchRect()
+ processor_ = NULL;
+ }
+ }
+ if (!processor_) {
+ hr = d3d9_device_ex_->StretchRect(src_surface, NULL, dest_surface, NULL,
+ D3DTEXF_NONE);
+ RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed", );
+ }
// Ideally, this should be done immediately before the draw call that uses
// the texture. Flush it once here though.
hr = query_->Issue(D3DISSUE_END);
« no previous file with comments | « media/gpu/dxva_video_decode_accelerator_win.h ('k') | ui/gfx/BUILD.gn » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698