Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(83)

Side by Side Diff: media/gpu/dxva_video_decode_accelerator_win.cc

Issue 2696963002: Replace IMFTransform with ID3D11VideoProcessor (Closed)
Patch Set: fixed all comments except one... Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/dxva_video_decode_accelerator_win.h" 5 #include "media/gpu/dxva_video_decode_accelerator_win.h"
6 6
7 #include <memory> 7 #include <memory>
8 8
9 #if !defined(OS_WIN) 9 #if !defined(OS_WIN)
10 #error This file should only be built on Windows. 10 #error This file should only be built on Windows.
(...skipping 482 matching lines...) Expand 10 before | Expand all | Expand 10 after
493 codec_(kUnknownVideoCodec), 493 codec_(kUnknownVideoCodec),
494 decoder_thread_("DXVAVideoDecoderThread"), 494 decoder_thread_("DXVAVideoDecoderThread"),
495 pending_flush_(false), 495 pending_flush_(false),
496 enable_low_latency_(gpu_preferences.enable_low_latency_dxva), 496 enable_low_latency_(gpu_preferences.enable_low_latency_dxva),
497 share_nv12_textures_(gpu_preferences.enable_zero_copy_dxgi_video && 497 share_nv12_textures_(gpu_preferences.enable_zero_copy_dxgi_video &&
498 !workarounds.disable_dxgi_zero_copy_video), 498 !workarounds.disable_dxgi_zero_copy_video),
499 copy_nv12_textures_(gpu_preferences.enable_nv12_dxgi_video && 499 copy_nv12_textures_(gpu_preferences.enable_nv12_dxgi_video &&
500 !workarounds.disable_nv12_dxgi_video), 500 !workarounds.disable_nv12_dxgi_video),
501 use_dx11_(false), 501 use_dx11_(false),
502 use_keyed_mutex_(false), 502 use_keyed_mutex_(false),
503 dx11_video_format_converter_media_type_needs_init_(true),
504 using_angle_device_(false), 503 using_angle_device_(false),
505 enable_accelerated_vpx_decode_( 504 enable_accelerated_vpx_decode_(
506 gpu_preferences.enable_accelerated_vpx_decode), 505 gpu_preferences.enable_accelerated_vpx_decode),
507 processing_config_changed_(false), 506 processing_config_changed_(false),
508 weak_this_factory_(this) { 507 weak_this_factory_(this) {
509 weak_ptr_ = weak_this_factory_.GetWeakPtr(); 508 weak_ptr_ = weak_this_factory_.GetWeakPtr();
510 memset(&input_stream_info_, 0, sizeof(input_stream_info_)); 509 memset(&input_stream_info_, 0, sizeof(input_stream_info_));
511 memset(&output_stream_info_, 0, sizeof(output_stream_info_)); 510 memset(&output_stream_info_, 0, sizeof(output_stream_info_));
512 use_color_info_ = base::FeatureList::IsEnabled(kVideoBlitColorAccuracy); 511 use_color_info_ = base::FeatureList::IsEnabled(kVideoBlitColorAccuracy);
513 } 512 }
(...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after
862 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), 861 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(),
863 dx11_dev_manager_reset_token_); 862 dx11_dev_manager_reset_token_);
864 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); 863 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
865 864
866 D3D11_QUERY_DESC query_desc; 865 D3D11_QUERY_DESC query_desc;
867 query_desc.Query = D3D11_QUERY_EVENT; 866 query_desc.Query = D3D11_QUERY_EVENT;
868 query_desc.MiscFlags = 0; 867 query_desc.MiscFlags = 0;
869 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); 868 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive());
870 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); 869 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false);
871 870
872 HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); 871 base::win::ScopedComPtr<ID3D11DeviceContext1> device_context1;
873 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", 872 hr = d3d11_device_context_.QueryInterface(__uuidof(ID3D11DeviceContext1),
874 false); 873 device_context1.ReceiveVoid());
874 RETURN_ON_HR_FAILURE(hr, "Failed to get device context", false);
875 875
876 hr = CreateCOMObjectFromDll(video_processor_dll, CLSID_VideoProcessorMFT, 876 hr = d3d11_device_.QueryInterface(video_device_.Receive());
877 __uuidof(IMFTransform), 877 RETURN_ON_HR_FAILURE(hr, "Failed to get video device", false);
878 video_format_converter_mft_.ReceiveVoid());
879 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false);
880 878
881 base::win::ScopedComPtr<IMFAttributes> converter_attributes; 879 hr = device_context1.QueryInterface(__uuidof(ID3D11VideoContext1),
jbauman 2017/02/15 02:01:39 You could just do "hr = d3d11_device_context_.Quer
hubbe 2017/02/16 01:08:20 Done.
882 hr = video_format_converter_mft_->GetAttributes( 880 video_context_.ReceiveVoid());
883 converter_attributes.Receive()); 881 RETURN_ON_HR_FAILURE(hr, "Failed to get video context", false);
884 RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false);
885 882
886 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
887 RETURN_ON_HR_FAILURE(
888 hr, "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", false);
889
890 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE);
891 RETURN_ON_HR_FAILURE(
892 hr, "Failed to set MF_LOW_LATENCY attribute on converter", false);
893 return true; 883 return true;
894 } 884 }
895 885
896 void DXVAVideoDecodeAccelerator::Decode( 886 void DXVAVideoDecodeAccelerator::Decode(
897 const BitstreamBuffer& bitstream_buffer) { 887 const BitstreamBuffer& bitstream_buffer) {
898 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::Decode"); 888 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::Decode");
899 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); 889 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
900 890
901 // SharedMemory will take over the ownership of handle. 891 // SharedMemory will take over the ownership of handle.
902 base::SharedMemory shm(bitstream_buffer.handle(), true); 892 base::SharedMemory shm(bitstream_buffer.handle(), true);
(...skipping 1056 matching lines...) Expand 10 before | Expand all | Expand 10 after
1959 // resolution changes. We already handle that in the 1949 // resolution changes. We already handle that in the
1960 // HandleResolutionChanged() function. 1950 // HandleResolutionChanged() function.
1961 if (GetState() != kConfigChange) { 1951 if (GetState() != kConfigChange) {
1962 output_picture_buffers_.clear(); 1952 output_picture_buffers_.clear();
1963 stale_output_picture_buffers_.clear(); 1953 stale_output_picture_buffers_.clear();
1964 // We want to continue processing pending input after detecting a config 1954 // We want to continue processing pending input after detecting a config
1965 // change. 1955 // change.
1966 pending_input_buffers_.clear(); 1956 pending_input_buffers_.clear();
1967 pictures_requested_ = false; 1957 pictures_requested_ = false;
1968 if (use_dx11_) { 1958 if (use_dx11_) {
1969 if (video_format_converter_mft_.get()) { 1959 d3d11_processor_.Release();
1970 video_format_converter_mft_->ProcessMessage( 1960 enumerator_.Release();
1971 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); 1961 video_context_.Release();
1972 video_format_converter_mft_.Release(); 1962 video_device_.Release();
1973 }
1974 d3d11_device_context_.Release(); 1963 d3d11_device_context_.Release();
1975 d3d11_device_.Release(); 1964 d3d11_device_.Release();
1976 d3d11_device_manager_.Release(); 1965 d3d11_device_manager_.Release();
1977 d3d11_query_.Release(); 1966 d3d11_query_.Release();
1978 multi_threaded_.Release(); 1967 multi_threaded_.Release();
1979 dx11_video_format_converter_media_type_needs_init_ = true; 1968 processor_width_ = processor_height_ = 0;
1980 } else { 1969 } else {
1981 d3d9_.Release(); 1970 d3d9_.Release();
1982 d3d9_device_ex_.Release(); 1971 d3d9_device_ex_.Release();
1983 device_manager_.Release(); 1972 device_manager_.Release();
1984 query_.Release(); 1973 query_.Release();
1985 } 1974 }
1986 } 1975 }
1987 sent_drain_message_ = false; 1976 sent_drain_message_ = false;
1988 SetState(kUninitialized); 1977 SetState(kUninitialized);
1989 } 1978 }
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after
2270 // decoder to emit an output packet for every input packet. 2259 // decoder to emit an output packet for every input packet.
2271 // http://code.google.com/p/chromium/issues/detail?id=108121 2260 // http://code.google.com/p/chromium/issues/detail?id=108121
2272 // http://code.google.com/p/chromium/issues/detail?id=150925 2261 // http://code.google.com/p/chromium/issues/detail?id=150925
2273 main_thread_task_runner_->PostTask( 2262 main_thread_task_runner_->PostTask(
2274 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, 2263 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead,
2275 weak_ptr_, input_buffer_id)); 2264 weak_ptr_, input_buffer_id));
2276 } 2265 }
2277 2266
2278 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, 2267 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width,
2279 int height) { 2268 int height) {
2280 dx11_video_format_converter_media_type_needs_init_ = true;
2281
2282 main_thread_task_runner_->PostTask( 2269 main_thread_task_runner_->PostTask(
2283 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, 2270 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers,
2284 weak_ptr_, false)); 2271 weak_ptr_, false));
2285 2272
2286 main_thread_task_runner_->PostTask( 2273 main_thread_task_runner_->PostTask(
2287 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, 2274 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
2288 weak_ptr_, width, height)); 2275 weak_ptr_, width, height));
2289 } 2276 }
2290 2277
2291 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(bool force) { 2278 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(bool force) {
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after
2556 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::CopyTexture"); 2543 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::CopyTexture");
2557 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); 2544 DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
2558 2545
2559 DCHECK(use_dx11_); 2546 DCHECK(use_dx11_);
2560 2547
2561 // The media foundation H.264 decoder outputs YUV12 textures which we 2548 // The media foundation H.264 decoder outputs YUV12 textures which we
2562 // cannot copy into ANGLE as they expect ARGB textures. In D3D land 2549 // cannot copy into ANGLE as they expect ARGB textures. In D3D land
2563 // the StretchRect API in the IDirect3DDevice9Ex interface did the color 2550 // the StretchRect API in the IDirect3DDevice9Ex interface did the color
2564 // space conversion for us. Sadly in DX11 land the API does not provide 2551 // space conversion for us. Sadly in DX11 land the API does not provide
2565 // a straightforward way to do this. 2552 // a straightforward way to do this.
2566 // We use the video processor MFT.
2567 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx
2568 // This object implements a media foundation transform (IMFTransform)
2569 // which follows the same contract as the decoder. The color space
2570 // conversion as per msdn is done in the GPU.
2571 2553
2572 D3D11_TEXTURE2D_DESC source_desc; 2554 D3D11_TEXTURE2D_DESC source_desc;
2573 src_texture->GetDesc(&source_desc); 2555 src_texture->GetDesc(&source_desc);
2574 2556 InitializeI3D11VideoProcessor(source_desc.Width, source_desc.Height,
jbauman 2017/02/15 02:01:39 Check the return value and do RETURN_AND_NOTIFY_ON
hubbe 2017/02/16 01:08:20 Done.
2575 // Set up the input and output types for the video processor MFT. 2557 color_space);
2576 if (!InitializeDX11VideoFormatConverterMediaType(
2577 source_desc.Width, source_desc.Height, color_space)) {
2578 RETURN_AND_NOTIFY_ON_FAILURE(
2579 false, "Failed to initialize media types for convesion.",
2580 PLATFORM_FAILURE, );
2581 }
2582 2558
2583 // The input to the video processor is the output sample. 2559 // The input to the video processor is the output sample.
2584 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; 2560 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion;
2585 { 2561 {
2586 base::AutoLock lock(decoder_lock_); 2562 base::AutoLock lock(decoder_lock_);
2587 PendingSampleInfo& sample_info = pending_output_samples_.front(); 2563 PendingSampleInfo& sample_info = pending_output_samples_.front();
2588 input_sample_for_conversion = sample_info.output_sample; 2564 input_sample_for_conversion = sample_info.output_sample;
2589 } 2565 }
2590 2566
2591 decoder_thread_task_runner_->PostTask( 2567 decoder_thread_task_runner_->PostTask(
(...skipping 11 matching lines...) Expand all
2603 base::win::ScopedComPtr<IMFSample> input_sample, 2579 base::win::ScopedComPtr<IMFSample> input_sample,
2604 int picture_buffer_id, 2580 int picture_buffer_id,
2605 int input_buffer_id) { 2581 int input_buffer_id) {
2606 TRACE_EVENT0("media", 2582 TRACE_EVENT0("media",
2607 "DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread"); 2583 "DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread");
2608 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); 2584 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
2609 HRESULT hr = E_FAIL; 2585 HRESULT hr = E_FAIL;
2610 2586
2611 DCHECK(use_dx11_); 2587 DCHECK(use_dx11_);
2612 DCHECK(!!input_sample); 2588 DCHECK(!!input_sample);
2613 DCHECK(video_format_converter_mft_.get()); 2589 DCHECK(d3d11_processor_.get());
2614 2590
2615 if (dest_keyed_mutex) { 2591 if (dest_keyed_mutex) {
2616 HRESULT hr = 2592 HRESULT hr =
2617 dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs); 2593 dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs);
2618 RETURN_AND_NOTIFY_ON_FAILURE( 2594 RETURN_AND_NOTIFY_ON_FAILURE(
2619 hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", 2595 hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.",
2620 PLATFORM_FAILURE, ); 2596 PLATFORM_FAILURE, );
2621 } 2597 }
2622 // The video processor MFT requires output samples to be allocated by the
2623 // caller. We create a sample with a buffer backed with the ID3D11Texture2D
2624 // interface exposed by ANGLE. This works nicely as this ensures that the
2625 // video processor coverts the color space of the output frame and copies
2626 // the result into the ANGLE texture.
2627 base::win::ScopedComPtr<IMFSample> output_sample;
2628 hr = MFCreateSample(output_sample.Receive());
2629 if (FAILED(hr)) {
2630 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.",
2631 PLATFORM_FAILURE, );
2632 }
2633 2598
2634 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; 2599 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
2635 hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), dest_texture, 0, 2600 hr = input_sample->GetBufferByIndex(0, output_buffer.Receive());
2636 FALSE, output_buffer.Receive()); 2601 RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get buffer from output sample",
2637 if (FAILED(hr)) { 2602 PLATFORM_FAILURE, );
2638 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.",
2639 PLATFORM_FAILURE, );
2640 }
2641 2603
2642 output_sample->AddBuffer(output_buffer.get()); 2604 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer;
2605 hr = dxgi_buffer.QueryFrom(output_buffer.get());
2606 RETURN_AND_NOTIFY_ON_FAILURE(
2607 hr, "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE, );
2608 UINT index = 0;
2609 hr = dxgi_buffer->GetSubresourceIndex(&index);
2610 RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get resource index",
jbauman 2017/02/15 02:01:39 These should be RETURN_AND_NOTIFY_ON_HR_FAILURE.
hubbe 2017/02/16 01:08:20 Done.
2611 PLATFORM_FAILURE, );
2643 2612
2644 hr = video_format_converter_mft_->ProcessInput(0, input_sample.get(), 0); 2613 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture;
2645 if (FAILED(hr)) { 2614 hr = dxgi_buffer->GetResource(IID_PPV_ARGS(dx11_decoding_texture.Receive()));
2646 DCHECK(false); 2615 RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get resource from output sample",
2647 RETURN_AND_NOTIFY_ON_HR_FAILURE( 2616 PLATFORM_FAILURE, );
2648 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, );
2649 }
2650 2617
2651 input_sample.Release(); 2618 D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_view_desc = {
2619 D3D11_VPOV_DIMENSION_TEXTURE2D};
2620 output_view_desc.Texture2D.MipSlice = 0;
2621 base::win::ScopedComPtr<ID3D11VideoProcessorOutputView> output_view;
2622 hr = video_device_->CreateVideoProcessorOutputView(
2623 dest_texture, enumerator_.get(), &output_view_desc,
2624 output_view.Receive());
2625 RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get output view",
2626 PLATFORM_FAILURE, );
2652 2627
2653 DWORD status = 0; 2628 D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_view_desc = {0};
2654 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; 2629 input_view_desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
2655 format_converter_output.pSample = output_sample.get(); 2630 input_view_desc.Texture2D.ArraySlice = index;
2656 hr = video_format_converter_mft_->ProcessOutput( 2631 input_view_desc.Texture2D.MipSlice = 0;
2657 0, // No flags 2632 base::win::ScopedComPtr<ID3D11VideoProcessorInputView> input_view;
2658 1, // # of out streams to pull from 2633 hr = video_device_->CreateVideoProcessorInputView(
2659 &format_converter_output, &status); 2634 dx11_decoding_texture.get(), enumerator_.get(), &input_view_desc,
2635 input_view.Receive());
2636 RETURN_AND_NOTIFY_ON_FAILURE(hr, "Failed to get input view",
2637 PLATFORM_FAILURE, );
2660 2638
2661 if (FAILED(hr)) { 2639 D3D11_VIDEO_PROCESSOR_STREAM streams = {0};
2662 DCHECK(false); 2640 streams.Enable = TRUE;
2663 RETURN_AND_NOTIFY_ON_HR_FAILURE( 2641 streams.pInputSurface = input_view.get();
2664 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); 2642
2665 } 2643 hr = video_context_->VideoProcessorBlt(d3d11_processor_.get(),
2644 output_view.get(), 0, 1, &streams);
2645
2646 RETURN_AND_NOTIFY_ON_FAILURE(hr, "VideoProcessBlit failed",
2647 PLATFORM_FAILURE, );
2666 2648
2667 if (dest_keyed_mutex) { 2649 if (dest_keyed_mutex) {
2668 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); 2650 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1);
2669 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", 2651 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.",
2670 PLATFORM_FAILURE, ); 2652 PLATFORM_FAILURE, );
2671 2653
2672 main_thread_task_runner_->PostTask( 2654 main_thread_task_runner_->PostTask(
2673 FROM_HERE, 2655 FROM_HERE,
2674 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, weak_ptr_, 2656 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, weak_ptr_,
2675 nullptr, nullptr, picture_buffer_id, input_buffer_id)); 2657 nullptr, nullptr, picture_buffer_id, input_buffer_id));
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
2728 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); 2710 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs));
2729 return; 2711 return;
2730 } 2712 }
2731 2713
2732 main_thread_task_runner_->PostTask( 2714 main_thread_task_runner_->PostTask(
2733 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, 2715 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete,
2734 weak_ptr_, src_surface, dest_surface, 2716 weak_ptr_, src_surface, dest_surface,
2735 picture_buffer_id, input_buffer_id)); 2717 picture_buffer_id, input_buffer_id));
2736 } 2718 }
2737 2719
2738 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( 2720 bool DXVAVideoDecodeAccelerator::InitializeI3D11VideoProcessor(
2739 int width, 2721 int width,
2740 int height, 2722 int height,
2741 const gfx::ColorSpace& color_space) { 2723 const gfx::ColorSpace& color_space) {
2742 if (!dx11_video_format_converter_media_type_needs_init_ && 2724 if (width < processor_width_ || height != processor_height_) {
2743 (!use_color_info_ || color_space == dx11_converter_color_space_)) { 2725 d3d11_processor_.Release();
2744 return true; 2726 enumerator_.Release();
2727 processor_width_ = 0;
2728 processor_height_ = 0;
2729
2730 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc;
2731 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
2732 desc.InputFrameRate.Numerator = 60;
2733 desc.InputFrameRate.Denominator = 1;
2734 desc.InputWidth = width;
2735 desc.InputHeight = height;
2736 desc.OutputFrameRate.Numerator = 60;
2737 desc.OutputFrameRate.Denominator = 1;
2738 desc.OutputWidth = width;
2739 desc.OutputHeight = height;
2740 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
2741
2742 HRESULT hr = video_device_->CreateVideoProcessorEnumerator(
2743 &desc, enumerator_.Receive());
2744 RETURN_ON_HR_FAILURE(hr, "Failed to enumerate video processors", false);
2745
2746 // TODO(Hubbe): Find correct index
2747 hr = video_device_->CreateVideoProcessor(enumerator_.get(), 0,
2748 d3d11_processor_.Receive());
2749 RETURN_ON_HR_FAILURE(hr, "Failed to create video processor.", false);
2750 processor_width_ = width;
2751 processor_height_ = height;
2752
2753 video_context_->VideoProcessorSetStreamAutoProcessingMode(
2754 d3d11_processor_.get(), 0, false);
2755
2756 if (copy_nv12_textures_) {
2757 video_context_->VideoProcessorSetOutputColorSpace1(
2758 d3d11_processor_.get(),
2759 gfx::ColorSpaceWin::GetDXGIColorSpace(color_space));
2760 } else {
2761 video_context_->VideoProcessorSetOutputColorSpace1(
2762 d3d11_processor_.get(), DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709);
2763 }
2745 } 2764 }
2746 2765
2747 CHECK(video_format_converter_mft_.get()); 2766 // Not sure if this call is expensive, let's only do it if the color
2748 2767 // space changes.
2749 HRESULT hr = video_format_converter_mft_->ProcessMessage( 2768 if (use_color_info_ && dx11_converter_color_space_ != color_space) {
2750 MFT_MESSAGE_SET_D3D_MANAGER, 2769 video_context_->VideoProcessorSetStreamColorSpace1(
jbauman 2017/02/15 02:01:39 Use QueryInterface to get an ID3D11VideoContext1 h
hubbe 2017/02/16 01:08:20 Done.
2751 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get())); 2770 d3d11_processor_.get(), 0,
2752 2771 gfx::ColorSpaceWin::GetDXGIColorSpace(color_space));
2753 if (FAILED(hr))
2754 DCHECK(false);
2755
2756 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr,
2757 "Failed to initialize video format converter",
2758 PLATFORM_FAILURE, false);
2759
2760 video_format_converter_mft_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING,
2761 0);
2762
2763 base::win::ScopedComPtr<IMFMediaType> media_type;
2764 hr = MFCreateMediaType(media_type.Receive());
2765 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed",
2766 PLATFORM_FAILURE, false);
2767
2768 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
2769 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type",
2770 PLATFORM_FAILURE, false);
2771
2772 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
2773 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type",
2774 PLATFORM_FAILURE, false);
2775
2776 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height);
2777 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes",
2778 PLATFORM_FAILURE, false);
2779
2780 if (use_color_info_) {
2781 DXVA2_ExtendedFormat format =
2782 gfx::ColorSpaceWin::GetExtendedFormat(color_space);
2783 media_type->SetUINT32(MF_MT_YUV_MATRIX, format.VideoTransferMatrix);
2784 media_type->SetUINT32(MF_MT_VIDEO_NOMINAL_RANGE, format.NominalRange);
2785 media_type->SetUINT32(MF_MT_VIDEO_PRIMARIES, format.VideoPrimaries);
2786 media_type->SetUINT32(MF_MT_TRANSFER_FUNCTION,
2787 format.VideoTransferFunction);
2788 dx11_converter_color_space_ = color_space; 2772 dx11_converter_color_space_ = color_space;
2789 } 2773 }
2790
2791 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0);
2792 if (FAILED(hr))
2793 DCHECK(false);
2794
2795 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type",
2796 PLATFORM_FAILURE, false);
2797
2798 // It appears that we fail to set MFVideoFormat_ARGB32 as the output media
2799 // type in certain configurations. Try to fallback to MFVideoFormat_RGB32
2800 // in such cases. If both fail, then bail.
2801
2802 bool media_type_set = false;
2803 if (copy_nv12_textures_) {
2804 media_type_set = SetTransformOutputType(video_format_converter_mft_.get(),
2805 MFVideoFormat_NV12, width, height);
2806 RETURN_AND_NOTIFY_ON_FAILURE(media_type_set,
2807 "Failed to set NV12 converter output type",
2808 PLATFORM_FAILURE, false);
2809 }
2810
2811 if (!media_type_set) {
2812 media_type_set = SetTransformOutputType(
2813 video_format_converter_mft_.get(), MFVideoFormat_ARGB32, width, height);
2814 }
2815 if (!media_type_set) {
2816 media_type_set = SetTransformOutputType(video_format_converter_mft_.get(),
2817 MFVideoFormat_RGB32, width, height);
2818 }
2819
2820 if (!media_type_set) {
2821 LOG(ERROR) << "Failed to find a matching RGB output type in the converter";
2822 return false;
2823 }
2824
2825 dx11_video_format_converter_media_type_needs_init_ = false;
2826 return true; 2774 return true;
2827 } 2775 }
2828 2776
2829 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, 2777 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample,
2830 int* width, 2778 int* width,
2831 int* height) { 2779 int* height) {
2832 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; 2780 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
2833 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); 2781 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
2834 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); 2782 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
2835 2783
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
2924 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, 2872 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
2925 base::Unretained(this))); 2873 base::Unretained(this)));
2926 } 2874 }
2927 2875
2928 uint32_t DXVAVideoDecodeAccelerator::GetTextureTarget() const { 2876 uint32_t DXVAVideoDecodeAccelerator::GetTextureTarget() const {
2929 bool provide_nv12_textures = share_nv12_textures_ || copy_nv12_textures_; 2877 bool provide_nv12_textures = share_nv12_textures_ || copy_nv12_textures_;
2930 return provide_nv12_textures ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D; 2878 return provide_nv12_textures ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D;
2931 } 2879 }
2932 2880
2933 } // namespace media 2881 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698