OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/gpu/dxva_video_decode_accelerator_win.h" | 5 #include "media/gpu/dxva_video_decode_accelerator_win.h" |
6 | 6 |
7 #include <memory> | 7 #include <memory> |
8 | 8 |
9 #if !defined(OS_WIN) | 9 #if !defined(OS_WIN) |
10 #error This file should only be built on Windows. | 10 #error This file should only be built on Windows. |
(...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
279 // the decoder for input). If no alignment is required, provide 0. | 279 // the decoder for input). If no alignment is required, provide 0. |
280 static base::win::ScopedComPtr<IMFSample> CreateInputSample( | 280 static base::win::ScopedComPtr<IMFSample> CreateInputSample( |
281 const uint8_t* stream, | 281 const uint8_t* stream, |
282 uint32_t size, | 282 uint32_t size, |
283 uint32_t min_size, | 283 uint32_t min_size, |
284 int alignment) { | 284 int alignment) { |
285 CHECK(stream); | 285 CHECK(stream); |
286 CHECK_GT(size, 0U); | 286 CHECK_GT(size, 0U); |
287 base::win::ScopedComPtr<IMFSample> sample; | 287 base::win::ScopedComPtr<IMFSample> sample; |
288 sample = mf::CreateEmptySampleWithBuffer(std::max(min_size, size), alignment); | 288 sample = mf::CreateEmptySampleWithBuffer(std::max(min_size, size), alignment); |
289 RETURN_ON_FAILURE(sample.get(), "Failed to create empty sample", | 289 RETURN_ON_FAILURE(sample.Get(), "Failed to create empty sample", |
290 base::win::ScopedComPtr<IMFSample>()); | 290 base::win::ScopedComPtr<IMFSample>()); |
291 | 291 |
292 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | 292 base::win::ScopedComPtr<IMFMediaBuffer> buffer; |
293 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); | 293 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); |
294 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", | 294 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", |
295 base::win::ScopedComPtr<IMFSample>()); | 295 base::win::ScopedComPtr<IMFSample>()); |
296 | 296 |
297 DWORD max_length = 0; | 297 DWORD max_length = 0; |
298 DWORD current_length = 0; | 298 DWORD current_length = 0; |
299 uint8_t* destination = NULL; | 299 uint8_t* destination = NULL; |
(...skipping 342 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
642 config_change_detector_.reset(new H264ConfigChangeDetector); | 642 config_change_detector_.reset(new H264ConfigChangeDetector); |
643 | 643 |
644 SetState(kNormal); | 644 SetState(kNormal); |
645 | 645 |
646 return StartDecoderThread(); | 646 return StartDecoderThread(); |
647 } | 647 } |
648 | 648 |
649 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { | 649 bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() { |
650 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); | 650 TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager"); |
651 // The device may exist if the last state was a config change. | 651 // The device may exist if the last state was a config change. |
652 if (d3d9_.get()) | 652 if (d3d9_.Get()) |
653 return true; | 653 return true; |
654 | 654 |
655 HRESULT hr = E_FAIL; | 655 HRESULT hr = E_FAIL; |
656 | 656 |
657 hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); | 657 hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive()); |
658 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); | 658 RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false); |
659 | 659 |
660 hr = d3d9_->CheckDeviceFormatConversion( | 660 hr = d3d9_->CheckDeviceFormatConversion( |
661 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, | 661 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, |
662 static_cast<D3DFORMAT>(MAKEFOURCC('N', 'V', '1', '2')), D3DFMT_X8R8G8B8); | 662 static_cast<D3DFORMAT>(MAKEFOURCC('N', 'V', '1', '2')), D3DFMT_X8R8G8B8); |
663 RETURN_ON_HR_FAILURE(hr, "D3D9 driver does not support H/W format conversion", | 663 RETURN_ON_HR_FAILURE(hr, "D3D9 driver does not support H/W format conversion", |
664 false); | 664 false); |
665 | 665 |
666 base::win::ScopedComPtr<IDirect3DDevice9> angle_device = | 666 base::win::ScopedComPtr<IDirect3DDevice9> angle_device = |
667 gl::QueryD3D9DeviceObjectFromANGLE(); | 667 gl::QueryD3D9DeviceObjectFromANGLE(); |
668 if (angle_device.get()) | 668 if (angle_device.Get()) |
669 using_angle_device_ = true; | 669 using_angle_device_ = true; |
670 | 670 |
671 if (using_angle_device_) { | 671 if (using_angle_device_) { |
672 hr = d3d9_device_ex_.QueryFrom(angle_device.get()); | 672 hr = d3d9_device_ex_.QueryFrom(angle_device.Get()); |
673 RETURN_ON_HR_FAILURE( | 673 RETURN_ON_HR_FAILURE( |
674 hr, "QueryInterface for IDirect3DDevice9Ex from angle device failed", | 674 hr, "QueryInterface for IDirect3DDevice9Ex from angle device failed", |
675 false); | 675 false); |
676 } else { | 676 } else { |
677 D3DPRESENT_PARAMETERS present_params = {0}; | 677 D3DPRESENT_PARAMETERS present_params = {0}; |
678 present_params.BackBufferWidth = 1; | 678 present_params.BackBufferWidth = 1; |
679 present_params.BackBufferHeight = 1; | 679 present_params.BackBufferHeight = 1; |
680 present_params.BackBufferFormat = D3DFMT_UNKNOWN; | 680 present_params.BackBufferFormat = D3DFMT_UNKNOWN; |
681 present_params.BackBufferCount = 1; | 681 present_params.BackBufferCount = 1; |
682 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; | 682 present_params.SwapEffect = D3DSWAPEFFECT_DISCARD; |
683 present_params.hDeviceWindow = NULL; | 683 present_params.hDeviceWindow = NULL; |
684 present_params.Windowed = TRUE; | 684 present_params.Windowed = TRUE; |
685 present_params.Flags = D3DPRESENTFLAG_VIDEO; | 685 present_params.Flags = D3DPRESENTFLAG_VIDEO; |
686 present_params.FullScreen_RefreshRateInHz = 0; | 686 present_params.FullScreen_RefreshRateInHz = 0; |
687 present_params.PresentationInterval = 0; | 687 present_params.PresentationInterval = 0; |
688 | 688 |
689 hr = d3d9_->CreateDeviceEx( | 689 hr = d3d9_->CreateDeviceEx( |
690 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, NULL, | 690 D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, NULL, |
691 D3DCREATE_FPU_PRESERVE | D3DCREATE_MIXED_VERTEXPROCESSING | | 691 D3DCREATE_FPU_PRESERVE | D3DCREATE_MIXED_VERTEXPROCESSING | |
692 D3DCREATE_MULTITHREADED, | 692 D3DCREATE_MULTITHREADED, |
693 &present_params, NULL, d3d9_device_ex_.Receive()); | 693 &present_params, NULL, d3d9_device_ex_.Receive()); |
694 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); | 694 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false); |
695 } | 695 } |
696 | 696 |
697 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, | 697 hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_, |
698 device_manager_.Receive()); | 698 device_manager_.Receive()); |
699 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); | 699 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false); |
700 | 700 |
701 hr = device_manager_->ResetDevice(d3d9_device_ex_.get(), | 701 hr = device_manager_->ResetDevice(d3d9_device_ex_.Get(), |
702 dev_manager_reset_token_); | 702 dev_manager_reset_token_); |
703 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | 703 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
704 | 704 |
705 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); | 705 hr = d3d9_device_ex_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive()); |
706 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); | 706 RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false); |
707 // Ensure query_ API works (to avoid an infinite loop later in | 707 // Ensure query_ API works (to avoid an infinite loop later in |
708 // CopyOutputSampleDataToPictureBuffer). | 708 // CopyOutputSampleDataToPictureBuffer). |
709 hr = query_->Issue(D3DISSUE_END); | 709 hr = query_->Issue(D3DISSUE_END); |
710 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); | 710 RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false); |
711 | 711 |
712 CreateVideoProcessor(); | 712 CreateVideoProcessor(); |
713 return true; | 713 return true; |
714 } | 714 } |
715 | 715 |
716 bool DXVAVideoDecodeAccelerator::CreateVideoProcessor() { | 716 bool DXVAVideoDecodeAccelerator::CreateVideoProcessor() { |
717 if (!use_color_info_) | 717 if (!use_color_info_) |
718 return false; | 718 return false; |
719 | 719 |
720 // TODO(Hubbe): Don't try again if we tried and failed already. | 720 // TODO(Hubbe): Don't try again if we tried and failed already. |
721 if (video_processor_service_.get()) | 721 if (video_processor_service_.Get()) |
722 return true; | 722 return true; |
723 HRESULT hr = DXVA2CreateVideoService(d3d9_device_ex_.get(), | 723 HRESULT hr = DXVA2CreateVideoService(d3d9_device_ex_.Get(), |
724 IID_IDirectXVideoProcessorService, | 724 IID_IDirectXVideoProcessorService, |
725 video_processor_service_.ReceiveVoid()); | 725 video_processor_service_.ReceiveVoid()); |
726 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateVideoService failed", false); | 726 RETURN_ON_HR_FAILURE(hr, "DXVA2CreateVideoService failed", false); |
727 | 727 |
728 // TODO(Hubbe): Use actual video settings. | 728 // TODO(Hubbe): Use actual video settings. |
729 DXVA2_VideoDesc inputDesc; | 729 DXVA2_VideoDesc inputDesc; |
730 inputDesc.SampleWidth = 1920; | 730 inputDesc.SampleWidth = 1920; |
731 inputDesc.SampleHeight = 1080; | 731 inputDesc.SampleHeight = 1080; |
732 inputDesc.SampleFormat.VideoChromaSubsampling = | 732 inputDesc.SampleFormat.VideoChromaSubsampling = |
733 DXVA2_VideoChromaSubsampling_MPEG2; | 733 DXVA2_VideoChromaSubsampling_MPEG2; |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
791 processor_->GetProcAmpRange(DXVA2_ProcAmp_Saturation, &range); | 791 processor_->GetProcAmpRange(DXVA2_ProcAmp_Saturation, &range); |
792 default_procamp_values_.Saturation = range.DefaultValue; | 792 default_procamp_values_.Saturation = range.DefaultValue; |
793 | 793 |
794 return true; | 794 return true; |
795 } | 795 } |
796 return false; | 796 return false; |
797 } | 797 } |
798 | 798 |
799 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { | 799 bool DXVAVideoDecodeAccelerator::CreateDX11DevManager() { |
800 // The device may exist if the last state was a config change. | 800 // The device may exist if the last state was a config change. |
801 if (d3d11_device_.get()) | 801 if (d3d11_device_.Get()) |
802 return true; | 802 return true; |
803 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, | 803 HRESULT hr = create_dxgi_device_manager_(&dx11_dev_manager_reset_token_, |
804 d3d11_device_manager_.Receive()); | 804 d3d11_device_manager_.Receive()); |
805 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); | 805 RETURN_ON_HR_FAILURE(hr, "MFCreateDXGIDeviceManager failed", false); |
806 | 806 |
807 angle_device_ = gl::QueryD3D11DeviceObjectFromANGLE(); | 807 angle_device_ = gl::QueryD3D11DeviceObjectFromANGLE(); |
808 if (!angle_device_) | 808 if (!angle_device_) |
809 copy_nv12_textures_ = false; | 809 copy_nv12_textures_ = false; |
810 if (share_nv12_textures_) { | 810 if (share_nv12_textures_) { |
811 RETURN_ON_FAILURE(angle_device_.get(), "Failed to get d3d11 device", false); | 811 RETURN_ON_FAILURE(angle_device_.Get(), "Failed to get d3d11 device", false); |
812 | 812 |
813 using_angle_device_ = true; | 813 using_angle_device_ = true; |
814 d3d11_device_ = angle_device_; | 814 d3d11_device_ = angle_device_; |
815 } else { | 815 } else { |
816 // This array defines the set of DirectX hardware feature levels we support. | 816 // This array defines the set of DirectX hardware feature levels we support. |
817 // The ordering MUST be preserved. All applications are assumed to support | 817 // The ordering MUST be preserved. All applications are assumed to support |
818 // 9.1 unless otherwise stated by the application. | 818 // 9.1 unless otherwise stated by the application. |
819 D3D_FEATURE_LEVEL feature_levels[] = { | 819 D3D_FEATURE_LEVEL feature_levels[] = { |
820 D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, | 820 D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, |
821 D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_3, D3D_FEATURE_LEVEL_9_2, | 821 D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_9_3, D3D_FEATURE_LEVEL_9_2, |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
876 hr = d3d11_device_->CheckFormatSupport(DXGI_FORMAT_R16G16B16A16_FLOAT, | 876 hr = d3d11_device_->CheckFormatSupport(DXGI_FORMAT_R16G16B16A16_FLOAT, |
877 &fp16_format_support); | 877 &fp16_format_support); |
878 if (FAILED(hr) || | 878 if (FAILED(hr) || |
879 !(fp16_format_support & D3D11_FORMAT_SUPPORT_VIDEO_PROCESSOR_OUTPUT)) | 879 !(fp16_format_support & D3D11_FORMAT_SUPPORT_VIDEO_PROCESSOR_OUTPUT)) |
880 use_fp16_ = false; | 880 use_fp16_ = false; |
881 | 881 |
882 // Enable multithreaded mode on the device. This ensures that accesses to | 882 // Enable multithreaded mode on the device. This ensures that accesses to |
883 // context are synchronized across threads. We have multiple threads | 883 // context are synchronized across threads. We have multiple threads |
884 // accessing the context, the media foundation decoder threads and the | 884 // accessing the context, the media foundation decoder threads and the |
885 // decoder thread via the video format conversion transform. | 885 // decoder thread via the video format conversion transform. |
886 hr = multi_threaded_.QueryFrom(d3d11_device_.get()); | 886 hr = multi_threaded_.QueryFrom(d3d11_device_.Get()); |
887 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); | 887 RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D10Multithread", false); |
888 multi_threaded_->SetMultithreadProtected(TRUE); | 888 multi_threaded_->SetMultithreadProtected(TRUE); |
889 | 889 |
890 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.get(), | 890 hr = d3d11_device_manager_->ResetDevice(d3d11_device_.Get(), |
891 dx11_dev_manager_reset_token_); | 891 dx11_dev_manager_reset_token_); |
892 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); | 892 RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false); |
893 | 893 |
894 D3D11_QUERY_DESC query_desc; | 894 D3D11_QUERY_DESC query_desc; |
895 query_desc.Query = D3D11_QUERY_EVENT; | 895 query_desc.Query = D3D11_QUERY_EVENT; |
896 query_desc.MiscFlags = 0; | 896 query_desc.MiscFlags = 0; |
897 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); | 897 hr = d3d11_device_->CreateQuery(&query_desc, d3d11_query_.Receive()); |
898 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); | 898 RETURN_ON_HR_FAILURE(hr, "Failed to create DX11 device query", false); |
899 | 899 |
900 return true; | 900 return true; |
(...skipping 25 matching lines...) Expand all Loading... |
926 | 926 |
927 base::win::ScopedComPtr<IMFSample> sample; | 927 base::win::ScopedComPtr<IMFSample> sample; |
928 RETURN_AND_NOTIFY_ON_FAILURE(shm.Map(bitstream_buffer.size()), | 928 RETURN_AND_NOTIFY_ON_FAILURE(shm.Map(bitstream_buffer.size()), |
929 "Failed in base::SharedMemory::Map", | 929 "Failed in base::SharedMemory::Map", |
930 PLATFORM_FAILURE, ); | 930 PLATFORM_FAILURE, ); |
931 | 931 |
932 sample = CreateInputSample( | 932 sample = CreateInputSample( |
933 reinterpret_cast<const uint8_t*>(shm.memory()), bitstream_buffer.size(), | 933 reinterpret_cast<const uint8_t*>(shm.memory()), bitstream_buffer.size(), |
934 std::min<uint32_t>(bitstream_buffer.size(), input_stream_info_.cbSize), | 934 std::min<uint32_t>(bitstream_buffer.size(), input_stream_info_.cbSize), |
935 input_stream_info_.cbAlignment); | 935 input_stream_info_.cbAlignment); |
936 RETURN_AND_NOTIFY_ON_FAILURE(sample.get(), "Failed to create input sample", | 936 RETURN_AND_NOTIFY_ON_FAILURE(sample.Get(), "Failed to create input sample", |
937 PLATFORM_FAILURE, ); | 937 PLATFORM_FAILURE, ); |
938 | 938 |
939 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 939 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
940 sample->SetSampleTime(bitstream_buffer.id()), | 940 sample->SetSampleTime(bitstream_buffer.id()), |
941 "Failed to associate input buffer id with sample", PLATFORM_FAILURE, ); | 941 "Failed to associate input buffer id with sample", PLATFORM_FAILURE, ); |
942 | 942 |
943 decoder_thread_task_runner_->PostTask( | 943 decoder_thread_task_runner_->PostTask( |
944 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal, | 944 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::DecodeInternal, |
945 base::Unretained(this), sample)); | 945 base::Unretained(this), sample)); |
946 } | 946 } |
(...skipping 350 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1297 // a DXVA decoder instance for that resolution and profile. If that succeeds | 1297 // a DXVA decoder instance for that resolution and profile. If that succeeds |
1298 // we assume that the driver supports H/W H.264 decoding for that resolution. | 1298 // we assume that the driver supports H/W H.264 decoding for that resolution. |
1299 HRESULT hr = E_FAIL; | 1299 HRESULT hr = E_FAIL; |
1300 base::win::ScopedComPtr<ID3D11Device> device; | 1300 base::win::ScopedComPtr<ID3D11Device> device; |
1301 | 1301 |
1302 { | 1302 { |
1303 TRACE_EVENT0("gpu,startup", | 1303 TRACE_EVENT0("gpu,startup", |
1304 "GetMaxH264Resolution. QueryDeviceObjectFromANGLE"); | 1304 "GetMaxH264Resolution. QueryDeviceObjectFromANGLE"); |
1305 | 1305 |
1306 device = gl::QueryD3D11DeviceObjectFromANGLE(); | 1306 device = gl::QueryD3D11DeviceObjectFromANGLE(); |
1307 if (!device.get()) | 1307 if (!device.Get()) |
1308 return max_resolution; | 1308 return max_resolution; |
1309 } | 1309 } |
1310 | 1310 |
1311 base::win::ScopedComPtr<ID3D11VideoDevice> video_device; | 1311 base::win::ScopedComPtr<ID3D11VideoDevice> video_device; |
1312 hr = device.QueryInterface(__uuidof(ID3D11VideoDevice), | 1312 hr = device.QueryInterface(__uuidof(ID3D11VideoDevice), |
1313 video_device.ReceiveVoid()); | 1313 video_device.ReceiveVoid()); |
1314 if (FAILED(hr)) | 1314 if (FAILED(hr)) |
1315 return max_resolution; | 1315 return max_resolution; |
1316 | 1316 |
1317 GUID decoder_guid = {}; | 1317 GUID decoder_guid = {}; |
(...skipping 13 matching lines...) Expand all Loading... |
1331 found = true; | 1331 found = true; |
1332 break; | 1332 break; |
1333 } | 1333 } |
1334 } | 1334 } |
1335 if (!found) | 1335 if (!found) |
1336 return max_resolution; | 1336 return max_resolution; |
1337 } | 1337 } |
1338 | 1338 |
1339 // Legacy AMD drivers with UVD3 or earlier and some Intel GPU's crash while | 1339 // Legacy AMD drivers with UVD3 or earlier and some Intel GPU's crash while |
1340 // creating surfaces larger than 1920 x 1088. | 1340 // creating surfaces larger than 1920 x 1088. |
1341 if (IsLegacyGPU(device.get())) | 1341 if (IsLegacyGPU(device.Get())) |
1342 return max_resolution; | 1342 return max_resolution; |
1343 | 1343 |
1344 // We look for the following resolutions in the driver. | 1344 // We look for the following resolutions in the driver. |
1345 // TODO(ananta) | 1345 // TODO(ananta) |
1346 // Look into whether this list needs to be expanded. | 1346 // Look into whether this list needs to be expanded. |
1347 static std::pair<int, int> resolution_array[] = { | 1347 static std::pair<int, int> resolution_array[] = { |
1348 // Use 1088 to account for 16x16 macroblocks. | 1348 // Use 1088 to account for 16x16 macroblocks. |
1349 std::make_pair(1920, 1088), std::make_pair(2560, 1440), | 1349 std::make_pair(1920, 1088), std::make_pair(2560, 1440), |
1350 std::make_pair(3840, 2160), std::make_pair(4096, 2160), | 1350 std::make_pair(3840, 2160), std::make_pair(4096, 2160), |
1351 std::make_pair(4096, 2304), | 1351 std::make_pair(4096, 2304), |
(...skipping 15 matching lines...) Expand all Loading... |
1367 return max_resolution; | 1367 return max_resolution; |
1368 | 1368 |
1369 D3D11_VIDEO_DECODER_CONFIG config = {}; | 1369 D3D11_VIDEO_DECODER_CONFIG config = {}; |
1370 hr = video_device->GetVideoDecoderConfig(&desc, 0, &config); | 1370 hr = video_device->GetVideoDecoderConfig(&desc, 0, &config); |
1371 if (FAILED(hr)) | 1371 if (FAILED(hr)) |
1372 return max_resolution; | 1372 return max_resolution; |
1373 | 1373 |
1374 base::win::ScopedComPtr<ID3D11VideoDecoder> video_decoder; | 1374 base::win::ScopedComPtr<ID3D11VideoDecoder> video_decoder; |
1375 hr = video_device->CreateVideoDecoder(&desc, &config, | 1375 hr = video_device->CreateVideoDecoder(&desc, &config, |
1376 video_decoder.Receive()); | 1376 video_decoder.Receive()); |
1377 if (!video_decoder.get()) | 1377 if (!video_decoder.Get()) |
1378 return max_resolution; | 1378 return max_resolution; |
1379 | 1379 |
1380 max_resolution = resolution_array[res_idx]; | 1380 max_resolution = resolution_array[res_idx]; |
1381 } | 1381 } |
1382 } | 1382 } |
1383 return max_resolution; | 1383 return max_resolution; |
1384 } | 1384 } |
1385 | 1385 |
1386 // static | 1386 // static |
1387 bool DXVAVideoDecodeAccelerator::IsLegacyGPU(ID3D11Device* device) { | 1387 bool DXVAVideoDecodeAccelerator::IsLegacyGPU(ID3D11Device* device) { |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1516 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), | 1516 RETURN_ON_FAILURE(CheckDecoderDxvaSupport(), |
1517 "Failed to check decoder DXVA support", false); | 1517 "Failed to check decoder DXVA support", false); |
1518 | 1518 |
1519 ULONG_PTR device_manager_to_use = NULL; | 1519 ULONG_PTR device_manager_to_use = NULL; |
1520 if (use_dx11_) { | 1520 if (use_dx11_) { |
1521 CHECK(create_dxgi_device_manager_); | 1521 CHECK(create_dxgi_device_manager_); |
1522 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), | 1522 RETURN_AND_NOTIFY_ON_FAILURE(CreateDX11DevManager(), |
1523 "Failed to initialize DX11 device and manager", | 1523 "Failed to initialize DX11 device and manager", |
1524 PLATFORM_FAILURE, false); | 1524 PLATFORM_FAILURE, false); |
1525 device_manager_to_use = | 1525 device_manager_to_use = |
1526 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.get()); | 1526 reinterpret_cast<ULONG_PTR>(d3d11_device_manager_.Get()); |
1527 } else { | 1527 } else { |
1528 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), | 1528 RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(), |
1529 "Failed to initialize D3D device and manager", | 1529 "Failed to initialize D3D device and manager", |
1530 PLATFORM_FAILURE, false); | 1530 PLATFORM_FAILURE, false); |
1531 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.get()); | 1531 device_manager_to_use = reinterpret_cast<ULONG_PTR>(device_manager_.Get()); |
1532 } | 1532 } |
1533 | 1533 |
1534 hr = decoder_->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, | 1534 hr = decoder_->ProcessMessage(MFT_MESSAGE_SET_D3D_MANAGER, |
1535 device_manager_to_use); | 1535 device_manager_to_use); |
1536 if (use_dx11_) { | 1536 if (use_dx11_) { |
1537 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); | 1537 RETURN_ON_HR_FAILURE(hr, "Failed to pass DX11 manager to decoder", false); |
1538 } else { | 1538 } else { |
1539 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); | 1539 RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false); |
1540 } | 1540 } |
1541 | 1541 |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1656 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP80); | 1656 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP80); |
1657 } else if (codec_ == kCodecVP9) { | 1657 } else if (codec_ == kCodecVP9) { |
1658 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP90); | 1658 hr = media_type->SetGUID(MF_MT_SUBTYPE, MEDIASUBTYPE_VP90); |
1659 } else { | 1659 } else { |
1660 NOTREACHED(); | 1660 NOTREACHED(); |
1661 RETURN_ON_FAILURE(false, "Unsupported codec on input media type.", false); | 1661 RETURN_ON_FAILURE(false, "Unsupported codec on input media type.", false); |
1662 } | 1662 } |
1663 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false); | 1663 RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false); |
1664 | 1664 |
1665 if (using_ms_vp9_mft_) { | 1665 if (using_ms_vp9_mft_) { |
1666 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, | 1666 hr = MFSetAttributeSize(media_type.Get(), MF_MT_FRAME_SIZE, |
1667 config_.initial_expected_coded_size.width(), | 1667 config_.initial_expected_coded_size.width(), |
1668 config_.initial_expected_coded_size.height()); | 1668 config_.initial_expected_coded_size.height()); |
1669 RETURN_ON_HR_FAILURE(hr, "Failed to set attribute size", false); | 1669 RETURN_ON_HR_FAILURE(hr, "Failed to set attribute size", false); |
1670 | 1670 |
1671 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, | 1671 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, |
1672 MFVideoInterlace_Progressive); | 1672 MFVideoInterlace_Progressive); |
1673 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false); | 1673 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false); |
1674 } else { | 1674 } else { |
1675 // Not sure about this. msdn recommends setting this value on the input | 1675 // Not sure about this. msdn recommends setting this value on the input |
1676 // media type. | 1676 // media type. |
1677 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, | 1677 hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE, |
1678 MFVideoInterlace_MixedInterlaceOrProgressive); | 1678 MFVideoInterlace_MixedInterlaceOrProgressive); |
1679 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false); | 1679 RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false); |
1680 } | 1680 } |
1681 | 1681 |
1682 hr = decoder_->SetInputType(0, media_type.get(), 0); // No flags | 1682 hr = decoder_->SetInputType(0, media_type.Get(), 0); // No flags |
1683 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false); | 1683 RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false); |
1684 return true; | 1684 return true; |
1685 } | 1685 } |
1686 | 1686 |
1687 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType( | 1687 bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType( |
1688 const GUID& subtype) { | 1688 const GUID& subtype) { |
1689 bool result = SetTransformOutputType(decoder_.get(), subtype, 0, 0); | 1689 bool result = SetTransformOutputType(decoder_.Get(), subtype, 0, 0); |
1690 | 1690 |
1691 if (share_nv12_textures_) { | 1691 if (share_nv12_textures_) { |
1692 base::win::ScopedComPtr<IMFAttributes> out_attributes; | 1692 base::win::ScopedComPtr<IMFAttributes> out_attributes; |
1693 HRESULT hr = | 1693 HRESULT hr = |
1694 decoder_->GetOutputStreamAttributes(0, out_attributes.Receive()); | 1694 decoder_->GetOutputStreamAttributes(0, out_attributes.Receive()); |
1695 RETURN_ON_HR_FAILURE(hr, "Failed to get stream attributes", false); | 1695 RETURN_ON_HR_FAILURE(hr, "Failed to get stream attributes", false); |
1696 out_attributes->SetUINT32(MF_SA_D3D11_BINDFLAGS, | 1696 out_attributes->SetUINT32(MF_SA_D3D11_BINDFLAGS, |
1697 D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_DECODER); | 1697 D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_DECODER); |
1698 // For some reason newer Intel drivers need D3D11_BIND_DECODER textures to | 1698 // For some reason newer Intel drivers need D3D11_BIND_DECODER textures to |
1699 // be created with a share handle or they'll crash in | 1699 // be created with a share handle or they'll crash in |
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1839 DVLOG(1) << "Waiting for picture slots from the client."; | 1839 DVLOG(1) << "Waiting for picture slots from the client."; |
1840 main_thread_task_runner_->PostTask( | 1840 main_thread_task_runner_->PostTask( |
1841 FROM_HERE, | 1841 FROM_HERE, |
1842 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples, | 1842 base::Bind(&DXVAVideoDecodeAccelerator::ProcessPendingSamples, |
1843 weak_ptr_)); | 1843 weak_ptr_)); |
1844 return true; | 1844 return true; |
1845 } | 1845 } |
1846 | 1846 |
1847 int width = 0; | 1847 int width = 0; |
1848 int height = 0; | 1848 int height = 0; |
1849 if (!GetVideoFrameDimensions(sample.get(), &width, &height)) { | 1849 if (!GetVideoFrameDimensions(sample.Get(), &width, &height)) { |
1850 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", | 1850 RETURN_ON_FAILURE(false, "Failed to get D3D surface from output sample", |
1851 false); | 1851 false); |
1852 } | 1852 } |
1853 | 1853 |
1854 // Go ahead and request picture buffers. | 1854 // Go ahead and request picture buffers. |
1855 main_thread_task_runner_->PostTask( | 1855 main_thread_task_runner_->PostTask( |
1856 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, | 1856 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers, |
1857 weak_ptr_, width, height)); | 1857 weak_ptr_, width, height)); |
1858 | 1858 |
1859 pictures_requested_ = true; | 1859 pictures_requested_ = true; |
(...skipping 21 matching lines...) Expand all Loading... |
1881 { | 1881 { |
1882 base::AutoLock lock(decoder_lock_); | 1882 base::AutoLock lock(decoder_lock_); |
1883 PendingSampleInfo& sample_info = pending_output_samples_.front(); | 1883 PendingSampleInfo& sample_info = pending_output_samples_.front(); |
1884 if (sample_info.picture_buffer_id != -1) | 1884 if (sample_info.picture_buffer_id != -1) |
1885 continue; | 1885 continue; |
1886 pending_sample = &sample_info; | 1886 pending_sample = &sample_info; |
1887 } | 1887 } |
1888 | 1888 |
1889 int width = 0; | 1889 int width = 0; |
1890 int height = 0; | 1890 int height = 0; |
1891 if (!GetVideoFrameDimensions(pending_sample->output_sample.get(), &width, | 1891 if (!GetVideoFrameDimensions(pending_sample->output_sample.Get(), &width, |
1892 &height)) { | 1892 &height)) { |
1893 RETURN_AND_NOTIFY_ON_FAILURE( | 1893 RETURN_AND_NOTIFY_ON_FAILURE( |
1894 false, "Failed to get D3D surface from output sample", | 1894 false, "Failed to get D3D surface from output sample", |
1895 PLATFORM_FAILURE, ); | 1895 PLATFORM_FAILURE, ); |
1896 } | 1896 } |
1897 | 1897 |
1898 if (width != index->second->size().width() || | 1898 if (width != index->second->size().width() || |
1899 height != index->second->size().height()) { | 1899 height != index->second->size().height()) { |
1900 HandleResolutionChanged(width, height); | 1900 HandleResolutionChanged(width, height); |
1901 return; | 1901 return; |
(...skipping 17 matching lines...) Expand all Loading... |
1919 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( | 1919 HRESULT hr = pending_sample->output_sample->GetBufferByIndex( |
1920 0, output_buffer.Receive()); | 1920 0, output_buffer.Receive()); |
1921 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 1921 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
1922 hr, "Failed to get buffer from output sample", PLATFORM_FAILURE, ); | 1922 hr, "Failed to get buffer from output sample", PLATFORM_FAILURE, ); |
1923 | 1923 |
1924 base::win::ScopedComPtr<IDirect3DSurface9> surface; | 1924 base::win::ScopedComPtr<IDirect3DSurface9> surface; |
1925 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | 1925 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
1926 | 1926 |
1927 if (use_dx11_) { | 1927 if (use_dx11_) { |
1928 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 1928 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
1929 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | 1929 hr = dxgi_buffer.QueryFrom(output_buffer.Get()); |
1930 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 1930 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
1931 hr, "Failed to get DXGIBuffer from output sample", | 1931 hr, "Failed to get DXGIBuffer from output sample", |
1932 PLATFORM_FAILURE, ); | 1932 PLATFORM_FAILURE, ); |
1933 hr = dxgi_buffer->GetResource( | 1933 hr = dxgi_buffer->GetResource( |
1934 __uuidof(ID3D11Texture2D), | 1934 __uuidof(ID3D11Texture2D), |
1935 reinterpret_cast<void**>(d3d11_texture.Receive())); | 1935 reinterpret_cast<void**>(d3d11_texture.Receive())); |
1936 } else { | 1936 } else { |
1937 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | 1937 hr = MFGetService(output_buffer.Get(), MR_BUFFER_SERVICE, |
1938 IID_PPV_ARGS(surface.Receive())); | 1938 IID_PPV_ARGS(surface.Receive())); |
1939 } | 1939 } |
1940 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 1940 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
1941 hr, "Failed to get surface from output sample", PLATFORM_FAILURE, ); | 1941 hr, "Failed to get surface from output sample", PLATFORM_FAILURE, ); |
1942 | 1942 |
1943 RETURN_AND_NOTIFY_ON_FAILURE( | 1943 RETURN_AND_NOTIFY_ON_FAILURE( |
1944 index->second->CopyOutputSampleDataToPictureBuffer( | 1944 index->second->CopyOutputSampleDataToPictureBuffer( |
1945 this, surface.get(), d3d11_texture.get(), | 1945 this, surface.Get(), d3d11_texture.Get(), |
1946 pending_sample->input_buffer_id), | 1946 pending_sample->input_buffer_id), |
1947 "Failed to copy output sample", PLATFORM_FAILURE, ); | 1947 "Failed to copy output sample", PLATFORM_FAILURE, ); |
1948 } | 1948 } |
1949 } | 1949 } |
1950 } | 1950 } |
1951 | 1951 |
1952 void DXVAVideoDecodeAccelerator::StopOnError( | 1952 void DXVAVideoDecodeAccelerator::StopOnError( |
1953 VideoDecodeAccelerator::Error error) { | 1953 VideoDecodeAccelerator::Error error) { |
1954 if (!main_thread_task_runner_->BelongsToCurrentThread()) { | 1954 if (!main_thread_task_runner_->BelongsToCurrentThread()) { |
1955 main_thread_task_runner_->PostTask( | 1955 main_thread_task_runner_->PostTask( |
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2207 | 2207 |
2208 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) { | 2208 if (OutputSamplesPresent() || !pending_input_buffers_.empty()) { |
2209 pending_input_buffers_.push_back(sample); | 2209 pending_input_buffers_.push_back(sample); |
2210 return; | 2210 return; |
2211 } | 2211 } |
2212 | 2212 |
2213 // Check if the resolution, bit rate, etc changed in the stream. If yes we | 2213 // Check if the resolution, bit rate, etc changed in the stream. If yes we |
2214 // reinitialize the decoder to ensure that the stream decodes correctly. | 2214 // reinitialize the decoder to ensure that the stream decodes correctly. |
2215 bool config_changed = false; | 2215 bool config_changed = false; |
2216 | 2216 |
2217 HRESULT hr = CheckConfigChanged(sample.get(), &config_changed); | 2217 HRESULT hr = CheckConfigChanged(sample.Get(), &config_changed); |
2218 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to check video stream config", | 2218 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to check video stream config", |
2219 PLATFORM_FAILURE, ); | 2219 PLATFORM_FAILURE, ); |
2220 | 2220 |
2221 processing_config_changed_ = config_changed; | 2221 processing_config_changed_ = config_changed; |
2222 | 2222 |
2223 if (config_changed) { | 2223 if (config_changed) { |
2224 pending_input_buffers_.push_back(sample); | 2224 pending_input_buffers_.push_back(sample); |
2225 FlushInternal(); | 2225 FlushInternal(); |
2226 return; | 2226 return; |
2227 } | 2227 } |
2228 | 2228 |
2229 VideoColorSpace color_space = config_change_detector_->current_color_space(); | 2229 VideoColorSpace color_space = config_change_detector_->current_color_space(); |
2230 if (color_space == VideoColorSpace()) | 2230 if (color_space == VideoColorSpace()) |
2231 color_space = config_.color_space; | 2231 color_space = config_.color_space; |
2232 | 2232 |
2233 if (!inputs_before_decode_) { | 2233 if (!inputs_before_decode_) { |
2234 TRACE_EVENT_ASYNC_BEGIN0("gpu", "DXVAVideoDecodeAccelerator.Decoding", | 2234 TRACE_EVENT_ASYNC_BEGIN0("gpu", "DXVAVideoDecodeAccelerator.Decoding", |
2235 this); | 2235 this); |
2236 } | 2236 } |
2237 inputs_before_decode_++; | 2237 inputs_before_decode_++; |
2238 { | 2238 { |
2239 ScopedExceptionCatcher catcher(using_ms_vp9_mft_); | 2239 ScopedExceptionCatcher catcher(using_ms_vp9_mft_); |
2240 hr = decoder_->ProcessInput(0, sample.get(), 0); | 2240 hr = decoder_->ProcessInput(0, sample.Get(), 0); |
2241 } | 2241 } |
2242 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it | 2242 // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it |
2243 // has enough data to produce one or more output samples. In this case the | 2243 // has enough data to produce one or more output samples. In this case the |
2244 // recommended options are to | 2244 // recommended options are to |
2245 // 1. Generate new output by calling IMFTransform::ProcessOutput until it | 2245 // 1. Generate new output by calling IMFTransform::ProcessOutput until it |
2246 // returns MF_E_TRANSFORM_NEED_MORE_INPUT. | 2246 // returns MF_E_TRANSFORM_NEED_MORE_INPUT. |
2247 // 2. Flush the input data | 2247 // 2. Flush the input data |
2248 // We implement the first option, i.e to retrieve the output sample and then | 2248 // We implement the first option, i.e to retrieve the output sample and then |
2249 // process the input again. Failure in either of these steps is treated as a | 2249 // process the input again. Failure in either of these steps is treated as a |
2250 // decoder failure. | 2250 // decoder failure. |
2251 if (hr == MF_E_NOTACCEPTING) { | 2251 if (hr == MF_E_NOTACCEPTING) { |
2252 DoDecode(color_space.ToGfxColorSpace()); | 2252 DoDecode(color_space.ToGfxColorSpace()); |
2253 // If the DoDecode call resulted in an output frame then we should not | 2253 // If the DoDecode call resulted in an output frame then we should not |
2254 // process any more input until that frame is copied to the target surface. | 2254 // process any more input until that frame is copied to the target surface. |
2255 if (!OutputSamplesPresent()) { | 2255 if (!OutputSamplesPresent()) { |
2256 State state = GetState(); | 2256 State state = GetState(); |
2257 RETURN_AND_NOTIFY_ON_FAILURE( | 2257 RETURN_AND_NOTIFY_ON_FAILURE( |
2258 (state == kStopped || state == kNormal || state == kFlushing), | 2258 (state == kStopped || state == kNormal || state == kFlushing), |
2259 "Failed to process output. Unexpected decoder state: " << state, | 2259 "Failed to process output. Unexpected decoder state: " << state, |
2260 PLATFORM_FAILURE, ); | 2260 PLATFORM_FAILURE, ); |
2261 hr = decoder_->ProcessInput(0, sample.get(), 0); | 2261 hr = decoder_->ProcessInput(0, sample.Get(), 0); |
2262 } | 2262 } |
2263 // If we continue to get the MF_E_NOTACCEPTING error we do the following:- | 2263 // If we continue to get the MF_E_NOTACCEPTING error we do the following:- |
2264 // 1. Add the input sample to the pending queue. | 2264 // 1. Add the input sample to the pending queue. |
2265 // 2. If we don't have any output samples we post the | 2265 // 2. If we don't have any output samples we post the |
2266 // DecodePendingInputBuffers task to process the pending input samples. | 2266 // DecodePendingInputBuffers task to process the pending input samples. |
2267 // If we have an output sample then the above task is posted when the | 2267 // If we have an output sample then the above task is posted when the |
2268 // output samples are sent to the client. | 2268 // output samples are sent to the client. |
2269 // This is because we only support 1 pending output sample at any | 2269 // This is because we only support 1 pending output sample at any |
2270 // given time due to the limitation with the Microsoft media foundation | 2270 // given time due to the limitation with the Microsoft media foundation |
2271 // decoder where it recycles the output Decoder surfaces. | 2271 // decoder where it recycles the output Decoder surfaces. |
(...skipping 366 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2638 base::win::ScopedComPtr<IMFSample> input_sample, | 2638 base::win::ScopedComPtr<IMFSample> input_sample, |
2639 int picture_buffer_id, | 2639 int picture_buffer_id, |
2640 int input_buffer_id) { | 2640 int input_buffer_id) { |
2641 TRACE_EVENT0("media", | 2641 TRACE_EVENT0("media", |
2642 "DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread"); | 2642 "DXVAVideoDecodeAccelerator::CopyTextureOnDecoderThread"); |
2643 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); | 2643 DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread()); |
2644 HRESULT hr = E_FAIL; | 2644 HRESULT hr = E_FAIL; |
2645 | 2645 |
2646 DCHECK(use_dx11_); | 2646 DCHECK(use_dx11_); |
2647 DCHECK(!!input_sample); | 2647 DCHECK(!!input_sample); |
2648 DCHECK(d3d11_processor_.get()); | 2648 DCHECK(d3d11_processor_.Get()); |
2649 | 2649 |
2650 if (dest_keyed_mutex) { | 2650 if (dest_keyed_mutex) { |
2651 HRESULT hr = | 2651 HRESULT hr = |
2652 dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs); | 2652 dest_keyed_mutex->AcquireSync(keyed_mutex_value, kAcquireSyncWaitMs); |
2653 RETURN_AND_NOTIFY_ON_FAILURE( | 2653 RETURN_AND_NOTIFY_ON_FAILURE( |
2654 hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", | 2654 hr == S_OK, "D3D11 failed to acquire keyed mutex for texture.", |
2655 PLATFORM_FAILURE, ); | 2655 PLATFORM_FAILURE, ); |
2656 } | 2656 } |
2657 | 2657 |
2658 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2658 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2659 hr = input_sample->GetBufferByIndex(0, output_buffer.Receive()); | 2659 hr = input_sample->GetBufferByIndex(0, output_buffer.Receive()); |
2660 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", | 2660 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", |
2661 PLATFORM_FAILURE, ); | 2661 PLATFORM_FAILURE, ); |
2662 | 2662 |
2663 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 2663 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
2664 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | 2664 hr = dxgi_buffer.QueryFrom(output_buffer.Get()); |
2665 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 2665 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2666 hr, "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE, ); | 2666 hr, "Failed to get DXGIBuffer from output sample", PLATFORM_FAILURE, ); |
2667 UINT index = 0; | 2667 UINT index = 0; |
2668 hr = dxgi_buffer->GetSubresourceIndex(&index); | 2668 hr = dxgi_buffer->GetSubresourceIndex(&index); |
2669 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get resource index", | 2669 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get resource index", |
2670 PLATFORM_FAILURE, ); | 2670 PLATFORM_FAILURE, ); |
2671 | 2671 |
2672 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture; | 2672 base::win::ScopedComPtr<ID3D11Texture2D> dx11_decoding_texture; |
2673 hr = dxgi_buffer->GetResource(IID_PPV_ARGS(dx11_decoding_texture.Receive())); | 2673 hr = dxgi_buffer->GetResource(IID_PPV_ARGS(dx11_decoding_texture.Receive())); |
2674 RETURN_AND_NOTIFY_ON_HR_FAILURE( | 2674 RETURN_AND_NOTIFY_ON_HR_FAILURE( |
2675 hr, "Failed to get resource from output sample", PLATFORM_FAILURE, ); | 2675 hr, "Failed to get resource from output sample", PLATFORM_FAILURE, ); |
2676 | 2676 |
2677 D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_view_desc = { | 2677 D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_view_desc = { |
2678 D3D11_VPOV_DIMENSION_TEXTURE2D}; | 2678 D3D11_VPOV_DIMENSION_TEXTURE2D}; |
2679 output_view_desc.Texture2D.MipSlice = 0; | 2679 output_view_desc.Texture2D.MipSlice = 0; |
2680 base::win::ScopedComPtr<ID3D11VideoProcessorOutputView> output_view; | 2680 base::win::ScopedComPtr<ID3D11VideoProcessorOutputView> output_view; |
2681 hr = video_device_->CreateVideoProcessorOutputView( | 2681 hr = video_device_->CreateVideoProcessorOutputView( |
2682 dest_texture, enumerator_.get(), &output_view_desc, | 2682 dest_texture, enumerator_.Get(), &output_view_desc, |
2683 output_view.Receive()); | 2683 output_view.Receive()); |
2684 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output view", | 2684 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get output view", |
2685 PLATFORM_FAILURE, ); | 2685 PLATFORM_FAILURE, ); |
2686 | 2686 |
2687 D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_view_desc = {0}; | 2687 D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_view_desc = {0}; |
2688 input_view_desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; | 2688 input_view_desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; |
2689 input_view_desc.Texture2D.ArraySlice = index; | 2689 input_view_desc.Texture2D.ArraySlice = index; |
2690 input_view_desc.Texture2D.MipSlice = 0; | 2690 input_view_desc.Texture2D.MipSlice = 0; |
2691 base::win::ScopedComPtr<ID3D11VideoProcessorInputView> input_view; | 2691 base::win::ScopedComPtr<ID3D11VideoProcessorInputView> input_view; |
2692 hr = video_device_->CreateVideoProcessorInputView( | 2692 hr = video_device_->CreateVideoProcessorInputView( |
2693 dx11_decoding_texture.get(), enumerator_.get(), &input_view_desc, | 2693 dx11_decoding_texture.Get(), enumerator_.Get(), &input_view_desc, |
2694 input_view.Receive()); | 2694 input_view.Receive()); |
2695 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get input view", | 2695 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to get input view", |
2696 PLATFORM_FAILURE, ); | 2696 PLATFORM_FAILURE, ); |
2697 | 2697 |
2698 D3D11_VIDEO_PROCESSOR_STREAM streams = {0}; | 2698 D3D11_VIDEO_PROCESSOR_STREAM streams = {0}; |
2699 streams.Enable = TRUE; | 2699 streams.Enable = TRUE; |
2700 streams.pInputSurface = input_view.get(); | 2700 streams.pInputSurface = input_view.Get(); |
2701 | 2701 |
2702 hr = video_context_->VideoProcessorBlt(d3d11_processor_.get(), | 2702 hr = video_context_->VideoProcessorBlt(d3d11_processor_.Get(), |
2703 output_view.get(), 0, 1, &streams); | 2703 output_view.Get(), 0, 1, &streams); |
2704 | 2704 |
2705 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "VideoProcessBlit failed", | 2705 RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "VideoProcessBlit failed", |
2706 PLATFORM_FAILURE, ); | 2706 PLATFORM_FAILURE, ); |
2707 | 2707 |
2708 if (dest_keyed_mutex) { | 2708 if (dest_keyed_mutex) { |
2709 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); | 2709 HRESULT hr = dest_keyed_mutex->ReleaseSync(keyed_mutex_value + 1); |
2710 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", | 2710 RETURN_AND_NOTIFY_ON_FAILURE(hr == S_OK, "Failed to release keyed mutex.", |
2711 PLATFORM_FAILURE, ); | 2711 PLATFORM_FAILURE, ); |
2712 | 2712 |
2713 main_thread_task_runner_->PostTask( | 2713 main_thread_task_runner_->PostTask( |
2714 FROM_HERE, | 2714 FROM_HERE, |
2715 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, weak_ptr_, | 2715 base::Bind(&DXVAVideoDecodeAccelerator::CopySurfaceComplete, weak_ptr_, |
2716 nullptr, nullptr, picture_buffer_id, input_buffer_id)); | 2716 nullptr, nullptr, picture_buffer_id, input_buffer_id)); |
2717 } else { | 2717 } else { |
2718 d3d11_device_context_->Flush(); | 2718 d3d11_device_context_->Flush(); |
2719 d3d11_device_context_->End(d3d11_query_.get()); | 2719 d3d11_device_context_->End(d3d11_query_.Get()); |
2720 | 2720 |
2721 decoder_thread_task_runner_->PostDelayedTask( | 2721 decoder_thread_task_runner_->PostDelayedTask( |
2722 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 2722 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
2723 base::Unretained(this), 0, | 2723 base::Unretained(this), 0, |
2724 reinterpret_cast<IDirect3DSurface9*>(NULL), | 2724 reinterpret_cast<IDirect3DSurface9*>(NULL), |
2725 reinterpret_cast<IDirect3DSurface9*>(NULL), | 2725 reinterpret_cast<IDirect3DSurface9*>(NULL), |
2726 picture_buffer_id, input_buffer_id), | 2726 picture_buffer_id, input_buffer_id), |
2727 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); | 2727 base::TimeDelta::FromMilliseconds(kFlushDecoderSurfaceTimeoutMs)); |
2728 } | 2728 } |
2729 } | 2729 } |
(...skipping 16 matching lines...) Expand all Loading... |
2746 // We need to do this in a loop and call flush multiple times. | 2746 // We need to do this in a loop and call flush multiple times. |
2747 // We have seen the GetData call for flushing the command buffer fail to | 2747 // We have seen the GetData call for flushing the command buffer fail to |
2748 // return success occassionally on multi core machines, leading to an | 2748 // return success occassionally on multi core machines, leading to an |
2749 // infinite loop. | 2749 // infinite loop. |
2750 // Workaround is to have an upper limit of 4 on the number of iterations to | 2750 // Workaround is to have an upper limit of 4 on the number of iterations to |
2751 // wait for the Flush to finish. | 2751 // wait for the Flush to finish. |
2752 | 2752 |
2753 HRESULT hr = E_FAIL; | 2753 HRESULT hr = E_FAIL; |
2754 if (use_dx11_) { | 2754 if (use_dx11_) { |
2755 BOOL query_data = 0; | 2755 BOOL query_data = 0; |
2756 hr = d3d11_device_context_->GetData(d3d11_query_.get(), &query_data, | 2756 hr = d3d11_device_context_->GetData(d3d11_query_.Get(), &query_data, |
2757 sizeof(BOOL), 0); | 2757 sizeof(BOOL), 0); |
2758 if (FAILED(hr)) | 2758 if (FAILED(hr)) |
2759 DCHECK(false); | 2759 DCHECK(false); |
2760 } else { | 2760 } else { |
2761 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); | 2761 hr = query_->GetData(NULL, 0, D3DGETDATA_FLUSH); |
2762 } | 2762 } |
2763 | 2763 |
2764 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { | 2764 if ((hr == S_FALSE) && (++iterations < kMaxIterationsForD3DFlush)) { |
2765 decoder_thread_task_runner_->PostDelayedTask( | 2765 decoder_thread_task_runner_->PostDelayedTask( |
2766 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, | 2766 FROM_HERE, base::Bind(&DXVAVideoDecodeAccelerator::FlushDecoder, |
(...skipping 29 matching lines...) Expand all Loading... |
2796 desc.OutputFrameRate.Denominator = 1; | 2796 desc.OutputFrameRate.Denominator = 1; |
2797 desc.OutputWidth = width; | 2797 desc.OutputWidth = width; |
2798 desc.OutputHeight = height; | 2798 desc.OutputHeight = height; |
2799 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; | 2799 desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; |
2800 | 2800 |
2801 HRESULT hr = video_device_->CreateVideoProcessorEnumerator( | 2801 HRESULT hr = video_device_->CreateVideoProcessorEnumerator( |
2802 &desc, enumerator_.Receive()); | 2802 &desc, enumerator_.Receive()); |
2803 RETURN_ON_HR_FAILURE(hr, "Failed to enumerate video processors", false); | 2803 RETURN_ON_HR_FAILURE(hr, "Failed to enumerate video processors", false); |
2804 | 2804 |
2805 // TODO(Hubbe): Find correct index | 2805 // TODO(Hubbe): Find correct index |
2806 hr = video_device_->CreateVideoProcessor(enumerator_.get(), 0, | 2806 hr = video_device_->CreateVideoProcessor(enumerator_.Get(), 0, |
2807 d3d11_processor_.Receive()); | 2807 d3d11_processor_.Receive()); |
2808 RETURN_ON_HR_FAILURE(hr, "Failed to create video processor.", false); | 2808 RETURN_ON_HR_FAILURE(hr, "Failed to create video processor.", false); |
2809 processor_width_ = width; | 2809 processor_width_ = width; |
2810 processor_height_ = height; | 2810 processor_height_ = height; |
2811 | 2811 |
2812 video_context_->VideoProcessorSetStreamAutoProcessingMode( | 2812 video_context_->VideoProcessorSetStreamAutoProcessingMode( |
2813 d3d11_processor_.get(), 0, false); | 2813 d3d11_processor_.Get(), 0, false); |
2814 } | 2814 } |
2815 | 2815 |
2816 if (copy_nv12_textures_) { | 2816 if (copy_nv12_textures_) { |
2817 // If we're copying NV12 textures, make sure we set the same | 2817 // If we're copying NV12 textures, make sure we set the same |
2818 // color space on input and output. | 2818 // color space on input and output. |
2819 D3D11_VIDEO_PROCESSOR_COLOR_SPACE d3d11_color_space = {0}; | 2819 D3D11_VIDEO_PROCESSOR_COLOR_SPACE d3d11_color_space = {0}; |
2820 d3d11_color_space.RGB_Range = 1; | 2820 d3d11_color_space.RGB_Range = 1; |
2821 d3d11_color_space.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255; | 2821 d3d11_color_space.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255; |
2822 | 2822 |
2823 video_context_->VideoProcessorSetOutputColorSpace(d3d11_processor_.get(), | 2823 video_context_->VideoProcessorSetOutputColorSpace(d3d11_processor_.Get(), |
2824 &d3d11_color_space); | 2824 &d3d11_color_space); |
2825 | 2825 |
2826 video_context_->VideoProcessorSetStreamColorSpace(d3d11_processor_.get(), 0, | 2826 video_context_->VideoProcessorSetStreamColorSpace(d3d11_processor_.Get(), 0, |
2827 &d3d11_color_space); | 2827 &d3d11_color_space); |
2828 dx11_converter_output_color_space_ = color_space; | 2828 dx11_converter_output_color_space_ = color_space; |
2829 } else { | 2829 } else { |
2830 dx11_converter_output_color_space_ = gfx::ColorSpace::CreateSRGB(); | 2830 dx11_converter_output_color_space_ = gfx::ColorSpace::CreateSRGB(); |
2831 if (use_color_info_ || use_fp16_) { | 2831 if (use_color_info_ || use_fp16_) { |
2832 base::win::ScopedComPtr<ID3D11VideoContext1> video_context1; | 2832 base::win::ScopedComPtr<ID3D11VideoContext1> video_context1; |
2833 HRESULT hr = video_context_.QueryInterface(video_context1.Receive()); | 2833 HRESULT hr = video_context_.QueryInterface(video_context1.Receive()); |
2834 if (SUCCEEDED(hr)) { | 2834 if (SUCCEEDED(hr)) { |
2835 if (use_fp16_ && | 2835 if (use_fp16_ && |
2836 base::CommandLine::ForCurrentProcess()->HasSwitch( | 2836 base::CommandLine::ForCurrentProcess()->HasSwitch( |
2837 switches::kEnableHDR) && | 2837 switches::kEnableHDR) && |
2838 color_space.IsHDR()) { | 2838 color_space.IsHDR()) { |
2839 // Note, we only use the SCRGBLinear output color space when | 2839 // Note, we only use the SCRGBLinear output color space when |
2840 // the input is PQ, because nvidia drivers will not convert | 2840 // the input is PQ, because nvidia drivers will not convert |
2841 // G22 to G10 for some reason. | 2841 // G22 to G10 for some reason. |
2842 dx11_converter_output_color_space_ = | 2842 dx11_converter_output_color_space_ = |
2843 gfx::ColorSpace::CreateSCRGBLinear(); | 2843 gfx::ColorSpace::CreateSCRGBLinear(); |
2844 } | 2844 } |
2845 // Since the video processor doesn't support HLG, let's just do the | 2845 // Since the video processor doesn't support HLG, let's just do the |
2846 // YUV->RGB conversion and let the output color space be HLG. | 2846 // YUV->RGB conversion and let the output color space be HLG. |
2847 // This won't work well unless color management is on, but if color | 2847 // This won't work well unless color management is on, but if color |
2848 // management is off we don't support HLG anyways. | 2848 // management is off we don't support HLG anyways. |
2849 if (color_space == | 2849 if (color_space == |
2850 gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT2020, | 2850 gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT2020, |
2851 gfx::ColorSpace::TransferID::ARIB_STD_B67, | 2851 gfx::ColorSpace::TransferID::ARIB_STD_B67, |
2852 gfx::ColorSpace::MatrixID::BT709, | 2852 gfx::ColorSpace::MatrixID::BT709, |
2853 gfx::ColorSpace::RangeID::LIMITED)) { | 2853 gfx::ColorSpace::RangeID::LIMITED)) { |
2854 video_context1->VideoProcessorSetStreamColorSpace1( | 2854 video_context1->VideoProcessorSetStreamColorSpace1( |
2855 d3d11_processor_.get(), 0, | 2855 d3d11_processor_.Get(), 0, |
2856 DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020); | 2856 DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020); |
2857 video_context1->VideoProcessorSetOutputColorSpace1( | 2857 video_context1->VideoProcessorSetOutputColorSpace1( |
2858 d3d11_processor_.get(), | 2858 d3d11_processor_.Get(), |
2859 DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); | 2859 DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020); |
2860 dx11_converter_output_color_space_ = color_space.GetAsFullRangeRGB(); | 2860 dx11_converter_output_color_space_ = color_space.GetAsFullRangeRGB(); |
2861 } else { | 2861 } else { |
2862 DVLOG(2) << "input color space: " << color_space | 2862 DVLOG(2) << "input color space: " << color_space |
2863 << " DXGIColorSpace: " | 2863 << " DXGIColorSpace: " |
2864 << gfx::ColorSpaceWin::GetDXGIColorSpace(color_space); | 2864 << gfx::ColorSpaceWin::GetDXGIColorSpace(color_space); |
2865 DVLOG(2) << "output color space:" | 2865 DVLOG(2) << "output color space:" |
2866 << dx11_converter_output_color_space_ << " DXGIColorSpace: " | 2866 << dx11_converter_output_color_space_ << " DXGIColorSpace: " |
2867 << gfx::ColorSpaceWin::GetDXGIColorSpace( | 2867 << gfx::ColorSpaceWin::GetDXGIColorSpace( |
2868 dx11_converter_output_color_space_); | 2868 dx11_converter_output_color_space_); |
2869 video_context1->VideoProcessorSetStreamColorSpace1( | 2869 video_context1->VideoProcessorSetStreamColorSpace1( |
2870 d3d11_processor_.get(), 0, | 2870 d3d11_processor_.Get(), 0, |
2871 gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); | 2871 gfx::ColorSpaceWin::GetDXGIColorSpace(color_space)); |
2872 video_context1->VideoProcessorSetOutputColorSpace1( | 2872 video_context1->VideoProcessorSetOutputColorSpace1( |
2873 d3d11_processor_.get(), gfx::ColorSpaceWin::GetDXGIColorSpace( | 2873 d3d11_processor_.Get(), gfx::ColorSpaceWin::GetDXGIColorSpace( |
2874 dx11_converter_output_color_space_)); | 2874 dx11_converter_output_color_space_)); |
2875 } | 2875 } |
2876 } else { | 2876 } else { |
2877 D3D11_VIDEO_PROCESSOR_COLOR_SPACE d3d11_color_space = | 2877 D3D11_VIDEO_PROCESSOR_COLOR_SPACE d3d11_color_space = |
2878 gfx::ColorSpaceWin::GetD3D11ColorSpace(color_space); | 2878 gfx::ColorSpaceWin::GetD3D11ColorSpace(color_space); |
2879 video_context_->VideoProcessorSetStreamColorSpace( | 2879 video_context_->VideoProcessorSetStreamColorSpace( |
2880 d3d11_processor_.get(), 0, &d3d11_color_space); | 2880 d3d11_processor_.Get(), 0, &d3d11_color_space); |
2881 d3d11_color_space = gfx::ColorSpaceWin::GetD3D11ColorSpace( | 2881 d3d11_color_space = gfx::ColorSpaceWin::GetD3D11ColorSpace( |
2882 dx11_converter_output_color_space_); | 2882 dx11_converter_output_color_space_); |
2883 video_context_->VideoProcessorSetOutputColorSpace( | 2883 video_context_->VideoProcessorSetOutputColorSpace( |
2884 d3d11_processor_.get(), &d3d11_color_space); | 2884 d3d11_processor_.Get(), &d3d11_color_space); |
2885 } | 2885 } |
2886 } | 2886 } |
2887 } | 2887 } |
2888 return true; | 2888 return true; |
2889 } | 2889 } |
2890 | 2890 |
2891 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, | 2891 bool DXVAVideoDecodeAccelerator::GetVideoFrameDimensions(IMFSample* sample, |
2892 int* width, | 2892 int* width, |
2893 int* height) { | 2893 int* height) { |
2894 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 2894 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
2895 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); | 2895 HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive()); |
2896 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); | 2896 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false); |
2897 | 2897 |
2898 if (use_dx11_) { | 2898 if (use_dx11_) { |
2899 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; | 2899 base::win::ScopedComPtr<IMFDXGIBuffer> dxgi_buffer; |
2900 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; | 2900 base::win::ScopedComPtr<ID3D11Texture2D> d3d11_texture; |
2901 hr = dxgi_buffer.QueryFrom(output_buffer.get()); | 2901 hr = dxgi_buffer.QueryFrom(output_buffer.Get()); |
2902 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", | 2902 RETURN_ON_HR_FAILURE(hr, "Failed to get DXGIBuffer from output sample", |
2903 false); | 2903 false); |
2904 hr = dxgi_buffer->GetResource( | 2904 hr = dxgi_buffer->GetResource( |
2905 __uuidof(ID3D11Texture2D), | 2905 __uuidof(ID3D11Texture2D), |
2906 reinterpret_cast<void**>(d3d11_texture.Receive())); | 2906 reinterpret_cast<void**>(d3d11_texture.Receive())); |
2907 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer", | 2907 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D11Texture from output buffer", |
2908 false); | 2908 false); |
2909 D3D11_TEXTURE2D_DESC d3d11_texture_desc; | 2909 D3D11_TEXTURE2D_DESC d3d11_texture_desc; |
2910 d3d11_texture->GetDesc(&d3d11_texture_desc); | 2910 d3d11_texture->GetDesc(&d3d11_texture_desc); |
2911 *width = d3d11_texture_desc.Width; | 2911 *width = d3d11_texture_desc.Width; |
2912 *height = d3d11_texture_desc.Height; | 2912 *height = d3d11_texture_desc.Height; |
2913 output_array_size_ = d3d11_texture_desc.ArraySize; | 2913 output_array_size_ = d3d11_texture_desc.ArraySize; |
2914 } else { | 2914 } else { |
2915 base::win::ScopedComPtr<IDirect3DSurface9> surface; | 2915 base::win::ScopedComPtr<IDirect3DSurface9> surface; |
2916 hr = MFGetService(output_buffer.get(), MR_BUFFER_SERVICE, | 2916 hr = MFGetService(output_buffer.Get(), MR_BUFFER_SERVICE, |
2917 IID_PPV_ARGS(surface.Receive())); | 2917 IID_PPV_ARGS(surface.Receive())); |
2918 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", | 2918 RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample", |
2919 false); | 2919 false); |
2920 D3DSURFACE_DESC surface_desc; | 2920 D3DSURFACE_DESC surface_desc; |
2921 hr = surface->GetDesc(&surface_desc); | 2921 hr = surface->GetDesc(&surface_desc); |
2922 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); | 2922 RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false); |
2923 *width = surface_desc.Width; | 2923 *width = surface_desc.Width; |
2924 *height = surface_desc.Height; | 2924 *height = surface_desc.Height; |
2925 } | 2925 } |
2926 return true; | 2926 return true; |
2927 } | 2927 } |
2928 | 2928 |
2929 bool DXVAVideoDecodeAccelerator::SetTransformOutputType(IMFTransform* transform, | 2929 bool DXVAVideoDecodeAccelerator::SetTransformOutputType(IMFTransform* transform, |
2930 const GUID& output_type, | 2930 const GUID& output_type, |
2931 int width, | 2931 int width, |
2932 int height) { | 2932 int height) { |
2933 HRESULT hr = E_FAIL; | 2933 HRESULT hr = E_FAIL; |
2934 base::win::ScopedComPtr<IMFMediaType> media_type; | 2934 base::win::ScopedComPtr<IMFMediaType> media_type; |
2935 | 2935 |
2936 for (uint32_t i = 0; | 2936 for (uint32_t i = 0; |
2937 SUCCEEDED(transform->GetOutputAvailableType(0, i, media_type.Receive())); | 2937 SUCCEEDED(transform->GetOutputAvailableType(0, i, media_type.Receive())); |
2938 ++i) { | 2938 ++i) { |
2939 GUID out_subtype = {0}; | 2939 GUID out_subtype = {0}; |
2940 hr = media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); | 2940 hr = media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype); |
2941 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false); | 2941 RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false); |
2942 | 2942 |
2943 if (out_subtype == output_type) { | 2943 if (out_subtype == output_type) { |
2944 if (width && height) { | 2944 if (width && height) { |
2945 hr = MFSetAttributeSize(media_type.get(), MF_MT_FRAME_SIZE, width, | 2945 hr = MFSetAttributeSize(media_type.Get(), MF_MT_FRAME_SIZE, width, |
2946 height); | 2946 height); |
2947 RETURN_ON_HR_FAILURE(hr, "Failed to set media type attributes", false); | 2947 RETURN_ON_HR_FAILURE(hr, "Failed to set media type attributes", false); |
2948 } | 2948 } |
2949 hr = transform->SetOutputType(0, media_type.get(), 0); // No flags | 2949 hr = transform->SetOutputType(0, media_type.Get(), 0); // No flags |
2950 RETURN_ON_HR_FAILURE(hr, "Failed to set output type", false); | 2950 RETURN_ON_HR_FAILURE(hr, "Failed to set output type", false); |
2951 return true; | 2951 return true; |
2952 } | 2952 } |
2953 media_type.Reset(); | 2953 media_type.Reset(); |
2954 } | 2954 } |
2955 return false; | 2955 return false; |
2956 } | 2956 } |
2957 | 2957 |
2958 HRESULT DXVAVideoDecodeAccelerator::CheckConfigChanged(IMFSample* sample, | 2958 HRESULT DXVAVideoDecodeAccelerator::CheckConfigChanged(IMFSample* sample, |
2959 bool* config_changed) { | 2959 bool* config_changed) { |
2960 if (codec_ != kCodecH264) | 2960 if (codec_ != kCodecH264) |
2961 return S_FALSE; | 2961 return S_FALSE; |
2962 | 2962 |
2963 base::win::ScopedComPtr<IMFMediaBuffer> buffer; | 2963 base::win::ScopedComPtr<IMFMediaBuffer> buffer; |
2964 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); | 2964 HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive()); |
2965 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from input sample", hr); | 2965 RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from input sample", hr); |
2966 | 2966 |
2967 mf::MediaBufferScopedPointer scoped_media_buffer(buffer.get()); | 2967 mf::MediaBufferScopedPointer scoped_media_buffer(buffer.Get()); |
2968 | 2968 |
2969 if (!config_change_detector_->DetectConfig( | 2969 if (!config_change_detector_->DetectConfig( |
2970 scoped_media_buffer.get(), scoped_media_buffer.current_length())) { | 2970 scoped_media_buffer.get(), scoped_media_buffer.current_length())) { |
2971 RETURN_ON_HR_FAILURE(E_FAIL, "Failed to detect H.264 stream config", | 2971 RETURN_ON_HR_FAILURE(E_FAIL, "Failed to detect H.264 stream config", |
2972 E_FAIL); | 2972 E_FAIL); |
2973 } | 2973 } |
2974 *config_changed = config_change_detector_->config_changed(); | 2974 *config_changed = config_change_detector_->config_changed(); |
2975 return S_OK; | 2975 return S_OK; |
2976 } | 2976 } |
2977 | 2977 |
2978 void DXVAVideoDecodeAccelerator::ConfigChanged(const Config& config) { | 2978 void DXVAVideoDecodeAccelerator::ConfigChanged(const Config& config) { |
2979 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); | 2979 DCHECK(main_thread_task_runner_->BelongsToCurrentThread()); |
2980 | 2980 |
2981 SetState(kConfigChange); | 2981 SetState(kConfigChange); |
2982 Invalidate(); | 2982 Invalidate(); |
2983 Initialize(config_, client_); | 2983 Initialize(config_, client_); |
2984 decoder_thread_task_runner_->PostTask( | 2984 decoder_thread_task_runner_->PostTask( |
2985 FROM_HERE, | 2985 FROM_HERE, |
2986 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, | 2986 base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers, |
2987 base::Unretained(this))); | 2987 base::Unretained(this))); |
2988 } | 2988 } |
2989 | 2989 |
2990 uint32_t DXVAVideoDecodeAccelerator::GetTextureTarget() const { | 2990 uint32_t DXVAVideoDecodeAccelerator::GetTextureTarget() const { |
2991 bool provide_nv12_textures = share_nv12_textures_ || copy_nv12_textures_; | 2991 bool provide_nv12_textures = share_nv12_textures_ || copy_nv12_textures_; |
2992 return provide_nv12_textures ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D; | 2992 return provide_nv12_textures ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D; |
2993 } | 2993 } |
2994 | 2994 |
2995 } // namespace media | 2995 } // namespace media |
OLD | NEW |