OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/dxva_video_decode_accelerator_win.h" | 5 #include "media/gpu/dxva_video_decode_accelerator_win.h" |
6 | 6 |
7 #include <memory> | 7 #include <memory> |
8 | 8 |
9 #if !defined(OS_WIN) | 9 #if !defined(OS_WIN) |
10 #error This file should only be built on Windows. | 10 #error This file should only be built on Windows. |
(...skipping 482 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
493 codec_(kUnknownVideoCodec), | 493 codec_(kUnknownVideoCodec), |
494 decoder_thread_("DXVAVideoDecoderThread"), | 494 decoder_thread_("DXVAVideoDecoderThread"), |
495 pending_flush_(false), | 495 pending_flush_(false), |
496 enable_low_latency_(gpu_preferences.enable_low_latency_dxva), | 496 enable_low_latency_(gpu_preferences.enable_low_latency_dxva), |
497 share_nv12_textures_(gpu_preferences.enable_zero_copy_dxgi_video && | 497 share_nv12_textures_(gpu_preferences.enable_zero_copy_dxgi_video && |
498 !workarounds.disable_dxgi_zero_copy_video), | 498 !workarounds.disable_dxgi_zero_copy_video), |
499 copy_nv12_textures_(gpu_preferences.enable_nv12_dxgi_video && | 499 copy_nv12_textures_(gpu_preferences.enable_nv12_dxgi_video && |
500 !workarounds.disable_nv12_dxgi_video), | 500 !workarounds.disable_nv12_dxgi_video), |
501 use_dx11_(false), | 501 use_dx11_(false), |
502 use_keyed_mutex_(false), | 502 use_keyed_mutex_(false), |
| 503 dx11_video_format_converter_media_type_needs_init_(true), |
503 using_angle_device_(false), | 504 using_angle_device_(false), |
504 enable_accelerated_vpx_decode_( | 505 enable_accelerated_vpx_decode_( |
505 gpu_preferences.enable_accelerated_vpx_decode), | 506 gpu_preferences.enable_accelerated_vpx_decode), |
506 processing_config_changed_(false), | 507 processing_config_changed_(false), |
507 weak_this_factory_(this) { | 508 weak_this_factory_(this) { |
508 weak_ptr_ = weak_this_factory_.GetWeakPtr(); | 509 weak_ptr_ = weak_this_factory_.GetWeakPtr(); |
509 memset(&input_stream_info_, 0, sizeof(input_stream_info_)); | 510 memset(&input_stream_info_, 0, sizeof(input_stream_info_)); |
510 memset(&output_stream_info_, 0, sizeof(output_stream_info_)); | 511 memset(&output_stream_info_, 0, sizeof(output_stream_info_)); |
511 use_color_info_ = base::FeatureList::IsEnabled(kVideoBlitColorAccuracy); | 512 use_color_info_ = base::FeatureList::IsEnabled(kVideoBlitColorAccuracy); |
512 } | 513 } |
(...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
861 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), | 862 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), |
862 dx11_dev_manager_reset_token_); | 863 dx11_dev_manager_reset_token_); |
863 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | 864 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
864 | 865 |
865 D3D11_QUERY_DESC query_desc; | 866 D3D11_QUERY_DESC query_desc; |
866 query_desc.Query = D3D11_QUERY_EVENT; | 867 query_desc.Query = D3D11_QUERY_EVENT; |
867 query_desc.MiscFlags = 0; | 868 query_desc.MiscFlags = 0; |
868 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); | 869 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); |
869 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); | 870 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); |
870 | 871 |
871 hr = d3d11_device_.QueryInterface(video_device_.Receive()); | 872 HMODULE video_processor_dll = ::GetModuleHandle(L"msvproc.dll"); |
872 RETURN_ON_HR_FAILURE(hr, "Failed to get video device", false); | 873 RETURN_ON_FAILURE(video_processor_dll, "Failed to load video processor", |
| 874 false); |
873 | 875 |
874 hr = d3d11_device_context_.QueryInterface(video_context_.Receive()); | 876 hr = CreateCOMObjectFromDll(video_processor_dll, CLSID_VideoProcessorMFT, |
875 RETURN_ON_HR_FAILURE(hr, "Failed to get video context", false); | 877 __uuidof(IMFTransform), |
| 878 video_format_converter_mft_.ReceiveVoid()); |
| 879 RETURN_ON_HR_FAILURE(hr, "Failed to create video format converter", false); |
876 | 880 |
| 881 base::win::ScopedComPtr<IMFAttributes> converter_attributes; |
| 882 hr = video_format_converter_mft_->GetAttributes( |
| 883 converter_attributes.Receive()); |
| 884 RETURN_ON_HR_FAILURE(hr, "Failed to get converter attributes", false); |
| 885 |
| 886 hr = converter_attributes->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); |
| 887 RETURN_ON_HR_FAILURE( |
| 888 hr, "Failed to set MF_XVP_PLAYBACK_MODE attribute on converter", false); |
| 889 |
| 890 hr = converter_attributes->SetUINT32(MF_LOW_LATENCY, FALSE); |
| 891 RETURN_ON_HR_FAILURE( |
| 892 hr, "Failed to set MF_LOW_LATENCY attribute on converter", false); |
877 return true; | 893 return true; |
878 } | 894 } |
879 | 895 |
880 void DXVAVideoDecodeAccelerator::Decode( | 896 void DXVAVideoDecodeAccelerator::Decode( |
881 const BitstreamBuffer& bitstream_buffer) { | 897 const BitstreamBuffer& bitstream_buffer) { |
882 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::Decode"); | 898 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::Decode"); |
883 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 899 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
884 | 900 |
885 // SharedMemory will take over the ownership of handle. | 901 // SharedMemory will take over the ownership of handle. |
886 base::SharedMemory shm(bitstream_buffer.handle(), true); | 902 base::SharedMemory shm(bitstream_buffer.handle(), true); |
(...skipping 1056 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1943 // resolution changes. We already handle that in the | 1959 // resolution changes. We already handle that in the |
1944 // HandleResolutionChanged() function. | 1960 // HandleResolutionChanged() function. |
1945 if (GetState() != kConfigChange) { | 1961 if (GetState() != kConfigChange) { |
1946 output_picture_buffers_.clear(); | 1962 output_picture_buffers_.clear(); |
1947 stale_output_picture_buffers_.clear(); | 1963 stale_output_picture_buffers_.clear(); |
1948 // We want to continue processing pending input after detecting a config | 1964 // We want to continue processing pending input after detecting a config |
1949 // change. | 1965 // change. |
1950 pending_input_buffers_.clear(); | 1966 pending_input_buffers_.clear(); |
1951 pictures_requested_ = false; | 1967 pictures_requested_ = false; |
1952 if (use_dx11_) { | 1968 if (use_dx11_) { |
1953 d3d11_processor_.Release(); | 1969 if (video_format_converter_mft_.get()) { |
1954 enumerator_.Release(); | 1970 video_format_converter_mft_->ProcessMessage( |
1955 video_context_.Release(); | 1971 MFT_MESSAGE_NOTIFY_END_STREAMING, 0); |
1956 video_device_.Release(); | 1972 video_format_converter_mft_.Release(); |
| 1973 } |
1957 d3d11_device_context_.Release(); | 1974 d3d11_device_context_.Release(); |
1958 d3d11_device_.Release(); | 1975 d3d11_device_.Release(); |
1959 d3d11_device_manager_.Release(); | 1976 d3d11_device_manager_.Release(); |
1960 d3d11_query_.Release(); | 1977 d3d11_query_.Release(); |
1961 multi_threaded_.Release(); | 1978 multi_threaded_.Release(); |
1962 processor_width_ = processor_height_ = 0; | 1979 dx11_video_format_converter_media_type_needs_init_ = true; |
1963 } else { | 1980 } else { |
1964 d3d9_.Release(); | 1981 d3d9_.Release(); |
1965 d3d9_device_ex_.Release(); | 1982 d3d9_device_ex_.Release(); |
1966 device_manager_.Release(); | 1983 device_manager_.Release(); |
1967 query_.Release(); | 1984 query_.Release(); |
1968 } | 1985 } |
1969 } | 1986 } |
1970 sent_drain_message_ = false; | 1987 sent_drain_message_ = false; |
1971 SetState(kUninitialized); | 1988 SetState(kUninitialized); |
1972 } | 1989 } |
(...skipping 280 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2253 // decoder to emit an output packet for every input packet. | 2270 // decoder to emit an output packet for every input packet. |
2254 // http://code.google.com/p/chromium/issues/detail?id=108121 | 2271 // http://code.google.com/p/chromium/issues/detail?id=108121 |
2255 // http://code.google.com/p/chromium/issues/detail?id=150925 | 2272 // http://code.google.com/p/chromium/issues/detail?id=150925 |
2256 main_thread_task_runner_->PostTask( | 2273 main_thread_task_runner_->PostTask( |
2257 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, | 2274 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead, |
2258 weak_ptr_, input_buffer_id)); | 2275 weak_ptr_, input_buffer_id)); |
2259 } | 2276 } |
2260 | 2277 |
2261 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, | 2278 void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width, |
2262 int height) { | 2279 int height) { |
| 2280 dx11_video_format_converter_media_type_needs_init_ = true; |
| 2281 |
2263 main_thread_task_runner_->PostTask( | 2282 main_thread_task_runner_->PostTask( |
2264 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, | 2283 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers, |
2265 weak_ptr_, false)); | 2284 weak_ptr_, false)); |
2266 | 2285 |
2267 main_thread_task_runner_->PostTask( | 2286 main_thread_task_runner_->PostTask( |
2268 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 2287 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
2269 weak_ptr_, width, height)); | 2288 weak_ptr_, width, height)); |
2270 } | 2289 } |
2271 | 2290 |
2272 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(bool force) { | 2291 void DXVAVideoDecodeAccelerator::DismissStaleBuffers(bool force) { |
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2537 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::CopyTexture"); | 2556 TRACE_EVENT0("media", "DXVAVideoDecodeAccelerator::CopyTexture"); |
2538 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2557 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2539 | 2558 |
2540 DCHECK(use_dx11_); | 2559 DCHECK(use_dx11_); |
2541 | 2560 |
2542 // The media foundation H.264 decoder outputs YUV12 textures which we | 2561 // The media foundation H.264 decoder outputs YUV12 textures which we |
2543 // cannot copy into ANGLE as they expect ARGB textures. In D3D land | 2562 // cannot copy into ANGLE as they expect ARGB textures. In D3D land |
2544 // the StretchRect API in the IDirect3DDevice9Ex interface did the color | 2563 // the StretchRect API in the IDirect3DDevice9Ex interface did the color |
2545 // space conversion for us. Sadly in DX11 land the API does not provide | 2564 // space conversion for us. Sadly in DX11 land the API does not provide |
2546 // a straightforward way to do this. | 2565 // a straightforward way to do this. |
| 2566 // We use the video processor MFT. |
| 2567 // https://msdn.microsoft.com/en-us/library/hh162913(v=vs.85).aspx |
| 2568 // This object implements a media foundation transform (IMFTransform) |
| 2569 // which follows the same contract as the decoder. The color space |
| 2570 // conversion as per msdn is done in the GPU. |
2547 | 2571 |
2548 D3D11_TEXTURE2D_DESC source_desc; | 2572 D3D11_TEXTURE2D_DESC source_desc; |
2549 src_texture->GetDesc(&source_desc); | 2573 src_texture->GetDesc(&source_desc); |
2550 if (!InitializeID3D11VideoProcessor(source_desc.Width, source_desc.Height, | 2574 |
2551 color_space)) { | 2575 // Set up the input and output types for the video processor MFT. |
2552 RETURN_AND_NOTIFY_ON_FAILURE(false, | 2576 if (!InitializeDX11VideoFormatConverterMediaType( |
2553 "Failed to initialize D3D11 video processor.", | 2577 source_desc.Width, source_desc.Height, color_space)) { |
2554 PLATFORM_FAILURE, ); | 2578 RETURN_AND_NOTIFY_ON_FAILURE( |
| 2579 false, "Failed to initialize media types for convesion.", |
| 2580 PLATFORM_FAILURE, ); |
2555 } | 2581 } |
2556 | 2582 |
2557 // The input to the video processor is the output sample. | 2583 // The input to the video processor is the output sample. |
2558 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; | 2584 base::win::ScopedComPtr<IMFSample> input_sample_for_conversion; |
2559 { | 2585 { |
2560 base::AutoLock lock(decoder_lock_); | 2586 base::AutoLock lock(decoder_lock_); |
2561 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 2587 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
2562 input_sample_for_conversion = sample_info.output_sample; | 2588 input_sample_for_conversion = sample_info.output_sample; |
2563 } | 2589 } |
2564 | 2590 |
(...skipping 12 matching lines...) Expand all Loading... |
2577 base::win::ScopedComPtr<IMFSample> input_sample, | 2603 base::win::ScopedComPtr<IMFSample> input_sample, |
2578 int picture_buffer_id, | 2604 int picture_buffer_id, |
2579 int input_buffer_id) { | 2605 int input_buffer_id) { |
2580 TRACE_EVENT0("media", | 2606 TRACE_EVENT0("media", |
2581 "DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread"); | 2607 "DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread"); |
2582 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2608 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2583 HRESULT hr = E_FAIL; | 2609 HRESULT hr = E_FAIL; |
2584 | 2610 |
2585 DCHECK(use_dx11_); | 2611 DCHECK(use_dx11_); |
2586 DCHECK(!!input_sample); | 2612 DCHECK(!!input_sample); |
2587 DCHECK(d3d11_processor_.get()); | 2613 DCHECK(video_format_converter_mft_.get()); |
2588 | 2614 |
2589 if (dest_keyed_mutex) { | 2615 if (dest_keyed_mutex) { |
2590 HRESULT hr = | 2616 HRESULT hr = |
2591 dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs); | 2617 dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs); |
2592 RETURN_AND_NOTIFY_ON_FAILURE( | 2618 RETURN_AND_NOTIFY_ON_FAILURE( |
2593 hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", | 2619 hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", |
2594 PLATFORM_FAILURE, ); | 2620 PLATFORM_FAILURE, ); |
2595 } | 2621 } |
| 2622 // The video processor MFT requires output samples to be allocated by the |
| 2623 // caller. We create a sample with a buffer backed with the ID3D11Texture2D |
| 2624 // interface exposed by ANGLE. This works nicely as this ensures that the |
| 2625 // video processor coverts the color space of the output frame and copies |
| 2626 // the result into the ANGLE texture. |
| 2627 base::win::ScopedComPtr<IMFSample> output_sample; |
| 2628 hr = MFCreateSample(output_sample.Receive()); |
| 2629 if (FAILED(hr)) { |
| 2630 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
| 2631 PLATFORM_FAILURE, ); |
| 2632 } |
2596 | 2633 |
2597 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2634 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2598 hr = input_sample->GetBufferByIndex(0, output_buffer.Receive()); | 2635 hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), dest_texture, 0, |
2599 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", | 2636 FALSE, output_buffer.Receive()); |
2600 PLATFORM_FAILURE, ); | 2637 if (FAILED(hr)) { |
| 2638 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to create output sample.", |
| 2639 PLATFORM_FAILURE, ); |
| 2640 } |
2601 | 2641 |
2602 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 2642 output_sample->AddBuffer(output_buffer.get()); |
2603 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | |
2604 RETURN_AND_NOTIFY_ON_HR_FAILURE( | |
2605 hr, "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE, ); | |
2606 UINT index = 0; | |
2607 hr = dxgi_buffer->GetSubresourceIndex(&index); | |
2608 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get resource index", | |
2609 PLATFORM_FAILURE, ); | |
2610 | 2643 |
2611 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture; | 2644 hr = video_format_converter_mft_->ProcessInput(0, input_sample.get(), 0); |
2612 hr = dxgi_buffer->GetResource(IID_PPV_ARGS(dx11_decoding_texture.Receive())); | 2645 if (FAILED(hr)) { |
2613 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 2646 DCHECK(false); |
2614 hr, "Failed to get resource from output sample", PLATFORM_FAILURE, ); | 2647 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
| 2648 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
| 2649 } |
2615 | 2650 |
2616 D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_view_desc = { | 2651 input_sample.Release(); |
2617 D3D11_VPOV_DIMENSION_TEXTURE2D}; | |
2618 output_view_desc.Texture2D.MipSlice = 0; | |
2619 base::win::ScopedComPtr<ID3D11VideoProcessorOutputView> output_view; | |
2620 hr = video_device_->CreateVideoProcessorOutputView( | |
2621 dest_texture, enumerator_.get(), &output_view_desc, | |
2622 output_view.Receive()); | |
2623 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output view", | |
2624 PLATFORM_FAILURE, ); | |
2625 | 2652 |
2626 D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_view_desc = {0}; | 2653 DWORD status = 0; |
2627 input_view_desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; | 2654 MFT_OUTPUT_DATA_BUFFER format_converter_output = {}; |
2628 input_view_desc.Texture2D.ArraySlice = index; | 2655 format_converter_output.pSample = output_sample.get(); |
2629 input_view_desc.Texture2D.MipSlice = 0; | 2656 hr = video_format_converter_mft_->ProcessOutput( |
2630 base::win::ScopedComPtr<ID3D11VideoProcessorInputView> input_view; | 2657 0, // No flags |
2631 hr = video_device_->CreateVideoProcessorInputView( | 2658 1, // # of out streams to pull from |
2632 dx11_decoding_texture.get(), enumerator_.get(), &input_view_desc, | 2659 &format_converter_output, &status); |
2633 input_view.Receive()); | |
2634 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get input view", | |
2635 PLATFORM_FAILURE, ); | |
2636 | 2660 |
2637 D3D11_VIDEO_PROCESSOR_STREAM streams = {0}; | 2661 if (FAILED(hr)) { |
2638 streams.Enable = TRUE; | 2662 DCHECK(false); |
2639 streams.pInputSurface = input_view.get(); | 2663 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2640 | 2664 hr, "Failed to convert output sample format.", PLATFORM_FAILURE, ); |
2641 hr = video_context_->VideoProcessorBlt(d3d11_processor_.get(), | 2665 } |
2642 output_view.get(), 0, 1, &streams); | |
2643 | |
2644 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "VideoProcessBlit failed", | |
2645 PLATFORM_FAILURE, ); | |
2646 | 2666 |
2647 if (dest_keyed_mutex) { | 2667 if (dest_keyed_mutex) { |
2648 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); | 2668 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); |
2649 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", | 2669 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", |
2650 PLATFORM_FAILURE, ); | 2670 PLATFORM_FAILURE, ); |
2651 | 2671 |
2652 main_thread_task_runner_->PostTask( | 2672 main_thread_task_runner_->PostTask( |
2653 FROM_HERE, | 2673 FROM_HERE, |
2654 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, weak_ptr_, | 2674 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, weak_ptr_, |
2655 nullptr, nullptr, picture_buffer_id, input_buffer_id)); | 2675 nullptr, nullptr, picture_buffer_id, input_buffer_id)); |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2708 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2728 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2709 return; | 2729 return; |
2710 } | 2730 } |
2711 | 2731 |
2712 main_thread_task_runner_->PostTask( | 2732 main_thread_task_runner_->PostTask( |
2713 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, | 2733 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, |
2714 weak_ptr_, src_surface, dest_surface, | 2734 weak_ptr_, src_surface, dest_surface, |
2715 picture_buffer_id, input_buffer_id)); | 2735 picture_buffer_id, input_buffer_id)); |
2716 } | 2736 } |
2717 | 2737 |
2718 bool DXVAVideoDecodeAccelerator::InitializeID3D11VideoProcessor( | 2738 bool DXVAVideoDecodeAccelerator::InitializeDX11VideoFormatConverterMediaType( |
2719 int width, | 2739 int width, |
2720 int height, | 2740 int height, |
2721 const gfx::ColorSpace& color_space) { | 2741 const gfx::ColorSpace& color_space) { |
2722 if (width < processor_width_ || height != processor_height_) { | 2742 if (!dx11_video_format_converter_media_type_needs_init_ && |
2723 d3d11_processor_.Release(); | 2743 (!use_color_info_ || color_space == dx11_converter_color_space_)) { |
2724 enumerator_.Release(); | 2744 return true; |
2725 processor_width_ = 0; | |
2726 processor_height_ = 0; | |
2727 | |
2728 D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc; | |
2729 desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; | |
2730 desc.InputFrameRate.Numerator = 60; | |
2731 desc.InputFrameRate.Denominator = 1; | |
2732 desc.InputWidth = width; | |
2733 desc.InputHeight = height; | |
2734 desc.OutputFrameRate.Numerator = 60; | |
2735 desc.OutputFrameRate.Denominator = 1; | |
2736 desc.OutputWidth = width; | |
2737 desc.OutputHeight = height; | |
2738 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; | |
2739 | |
2740 HRESULT hr = video_device_->CreateVideoProcessorEnumerator( | |
2741 &desc, enumerator_.Receive()); | |
2742 RETURN_ON_HR_FAILURE(hr, "Failed to enumerate video processors", false); | |
2743 | |
2744 // TODO(Hubbe): Find correct index | |
2745 hr = video_device_->CreateVideoProcessor(enumerator_.get(), 0, | |
2746 d3d11_processor_.Receive()); | |
2747 RETURN_ON_HR_FAILURE(hr, "Failed to create video processor.", false); | |
2748 processor_width_ = width; | |
2749 processor_height_ = height; | |
2750 | |
2751 video_context_->VideoProcessorSetStreamAutoProcessingMode( | |
2752 d3d11_processor_.get(), 0, false); | |
2753 } | 2745 } |
2754 | 2746 |
| 2747 CHECK(video_format_converter_mft_.get()); |
| 2748 |
| 2749 HRESULT hr = video_format_converter_mft_->ProcessMessage( |
| 2750 MFT_MESSAGE_SET_D3D_MANAGER, |
| 2751 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get())); |
| 2752 |
| 2753 if (FAILED(hr)) |
| 2754 DCHECK(false); |
| 2755 |
| 2756 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, |
| 2757 "Failed to initialize video format converter", |
| 2758 PLATFORM_FAILURE, false); |
| 2759 |
| 2760 video_format_converter_mft_->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, |
| 2761 0); |
| 2762 |
| 2763 base::win::ScopedComPtr<IMFMediaType> media_type; |
| 2764 hr = MFCreateMediaType(media_type.Receive()); |
| 2765 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFCreateMediaType failed", |
| 2766 PLATFORM_FAILURE, false); |
| 2767 |
| 2768 hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
| 2769 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set major input type", |
| 2770 PLATFORM_FAILURE, false); |
| 2771 |
| 2772 hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12); |
| 2773 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set input sub type", |
| 2774 PLATFORM_FAILURE, false); |
| 2775 |
| 2776 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, height); |
| 2777 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set media type attributes", |
| 2778 PLATFORM_FAILURE, false); |
| 2779 |
| 2780 if (use_color_info_) { |
| 2781 DXVA2_ExtendedFormat format = |
| 2782 gfx::ColorSpaceWin::GetExtendedFormat(color_space); |
| 2783 media_type->SetUINT32(MF_MT_YUV_MATRIX, format.VideoTransferMatrix); |
| 2784 media_type->SetUINT32(MF_MT_VIDEO_NOMINAL_RANGE, format.NominalRange); |
| 2785 media_type->SetUINT32(MF_MT_VIDEO_PRIMARIES, format.VideoPrimaries); |
| 2786 media_type->SetUINT32(MF_MT_TRANSFER_FUNCTION, |
| 2787 format.VideoTransferFunction); |
| 2788 dx11_converter_color_space_ = color_space; |
| 2789 } |
| 2790 |
| 2791 hr = video_format_converter_mft_->SetInputType(0, media_type.get(), 0); |
| 2792 if (FAILED(hr)) |
| 2793 DCHECK(false); |
| 2794 |
| 2795 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to set converter input type", |
| 2796 PLATFORM_FAILURE, false); |
| 2797 |
| 2798 // It appears that we fail to set MFVideoFormat_ARGB32 as the output media |
| 2799 // type in certain configurations. Try to fallback to MFVideoFormat_RGB32 |
| 2800 // in such cases. If both fail, then bail. |
| 2801 |
| 2802 bool media_type_set = false; |
2755 if (copy_nv12_textures_) { | 2803 if (copy_nv12_textures_) { |
2756 // If we're copying NV12 textures, make sure we set the same | 2804 media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), |
2757 // color space on input and output. | 2805 MFVideoFormat_NV12, width, height); |
2758 D3D11_VIDEO_PROCESSOR_COLOR_SPACE d3d11_color_space = {0}; | 2806 RETURN_AND_NOTIFY_ON_FAILURE(media_type_set, |
2759 d3d11_color_space.RGB_Range = 1; | 2807 "Failed to set NV12 converter output type", |
2760 d3d11_color_space.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255; | 2808 PLATFORM_FAILURE, false); |
| 2809 } |
2761 | 2810 |
2762 video_context_->VideoProcessorSetOutputColorSpace(d3d11_processor_.get(), | 2811 if (!media_type_set) { |
2763 &d3d11_color_space); | 2812 media_type_set = SetTransformOutputType( |
| 2813 video_format_converter_mft_.get(), MFVideoFormat_ARGB32, width, height); |
| 2814 } |
| 2815 if (!media_type_set) { |
| 2816 media_type_set = SetTransformOutputType(video_format_converter_mft_.get(), |
| 2817 MFVideoFormat_RGB32, width, height); |
| 2818 } |
2764 | 2819 |
2765 video_context_->VideoProcessorSetStreamColorSpace(d3d11_processor_.get(), 0, | 2820 if (!media_type_set) { |
2766 &d3d11_color_space); | 2821 LOG(ERROR) << "Failed to find a matching RGB output type in the converter"; |
2767 } else { | 2822 return false; |
2768 // Not sure if this call is expensive, let's only do it if the color | |
2769 // space changes. | |
2770 gfx::ColorSpace output_color_space = gfx::ColorSpace::CreateSRGB(); | |
2771 if (use_color_info_ && dx11_converter_color_space_ != color_space) { | |
2772 base::win::ScopedComPtr<ID3D11VideoContext1> video_context1; | |
2773 HRESULT hr = video_context_.QueryInterface(video_context1.Receive()); | |
2774 if (SUCCEEDED(hr)) { | |
2775 video_context1->VideoProcessorSetStreamColorSpace1( | |
2776 d3d11_processor_.get(), 0, | |
2777 gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); | |
2778 video_context1->VideoProcessorSetOutputColorSpace1( | |
2779 d3d11_processor_.get(), | |
2780 gfx::ColorSpaceWin::GetDXGIColorSpace(output_color_space)); | |
2781 } else { | |
2782 D3D11_VIDEO_PROCESSOR_COLOR_SPACE d3d11_color_space = | |
2783 gfx::ColorSpaceWin::GetD3D11ColorSpace(color_space); | |
2784 video_context_->VideoProcessorSetStreamColorSpace( | |
2785 d3d11_processor_.get(), 0, &d3d11_color_space); | |
2786 d3d11_color_space = | |
2787 gfx::ColorSpaceWin::GetD3D11ColorSpace(output_color_space); | |
2788 video_context_->VideoProcessorSetOutputColorSpace( | |
2789 d3d11_processor_.get(), &d3d11_color_space); | |
2790 } | |
2791 dx11_converter_color_space_ = color_space; | |
2792 } | |
2793 } | 2823 } |
| 2824 |
| 2825 dx11_video_format_converter_media_type_needs_init_ = false; |
2794 return true; | 2826 return true; |
2795 } | 2827 } |
2796 | 2828 |
2797 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, | 2829 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, |
2798 int* width, | 2830 int* width, |
2799 int* height) { | 2831 int* height) { |
2800 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2832 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2801 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | 2833 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); |
2802 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | 2834 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); |
2803 | 2835 |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2892 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2924 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2893 base::Unretained(this))); | 2925 base::Unretained(this))); |
2894 } | 2926 } |
2895 | 2927 |
2896 uint32_t DXVAVideoDecodeAccelerator::GetTextureTarget() const { | 2928 uint32_t DXVAVideoDecodeAccelerator::GetTextureTarget() const { |
2897 bool provide_nv12_textures = share_nv12_textures_ || copy_nv12_textures_; | 2929 bool provide_nv12_textures = share_nv12_textures_ || copy_nv12_textures_; |
2898 return provide_nv12_textures ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D; | 2930 return provide_nv12_textures ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D; |
2899 } | 2931 } |
2900 | 2932 |
2901 } // namespace media | 2933 } // namespace media |
OLD | NEW |