| Index: media/gpu/media_foundation_video_encode_accelerator_win.cc
|
| diff --git a/media/gpu/media_foundation_video_encode_accelerator_win.cc b/media/gpu/media_foundation_video_encode_accelerator_win.cc
|
| index 2de7b014993b6b47972346bac32f3bfd53a05729..80e094378e9a626a598dc98dc1ff17b44b785369 100644
|
| --- a/media/gpu/media_foundation_video_encode_accelerator_win.cc
|
| +++ b/media/gpu/media_foundation_video_encode_accelerator_win.cc
|
| @@ -15,15 +15,22 @@
|
| #include <utility>
|
| #include <vector>
|
|
|
| +#include "base/feature_list.h"
|
| #include "base/threading/thread_task_runner_handle.h"
|
| #include "base/trace_event/trace_event.h"
|
| #include "base/win/scoped_co_mem.h"
|
| #include "base/win/scoped_variant.h"
|
| #include "base/win/windows_version.h"
|
| +#include "media/base/media_switches.h"
|
| #include "media/base/win/mf_helpers.h"
|
| #include "media/base/win/mf_initializer.h"
|
| #include "third_party/libyuv/include/libyuv.h"
|
|
|
| +#undef DLOG
|
| +#define DLOG(err) LOG(ERROR)
|
| +#undef DVLOG
|
| +#define DVLOG(err) LOG(ERROR)
|
| +
|
| using base::win::ScopedComPtr;
|
| using media::mf::MediaBufferScopedPointer;
|
|
|
| @@ -43,12 +50,31 @@ const size_t kOneMicrosecondInMFSampleTimeUnits = 10;
|
| const size_t kOutputSampleBufferSizeRatio = 4;
|
|
|
| constexpr const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = {
|
| - L"mf.dll", L"mfplat.dll",
|
| + L"mf.dll", L"mfplat.dll", L"nvEncMFTH264.dll",
|
| };
|
|
|
| // Resolutions that some platforms support, should be listed in ascending order.
|
| constexpr const gfx::Size kOptionalMaxResolutions[] = {gfx::Size(3840, 2176)};
|
|
|
| +constexpr const VideoCodecProfile kSupportedH264Profiles[] = {
|
| + H264PROFILE_BASELINE};
|
| +constexpr const VideoCodecProfile kSupportedVPXProfiles[] = {VP8PROFILE_MIN,
|
| + VP9PROFILE_MIN};
|
| +
|
| +GUID VideoCodecProfileToGUID(VideoCodecProfile profile) {
|
| + switch (profile) {
|
| + case VP8PROFILE_MIN:
|
| + return MFVideoFormat_VP80;
|
| + case VP9PROFILE_MIN:
|
| + return MFVideoFormat_VP90;
|
| + case H264PROFILE_BASELINE:
|
| + return MFVideoFormat_H264;
|
| + default:
|
| + NOTREACHED();
|
| + return MFVideoFormat_VP80;
|
| + }
|
| +}
|
| +
|
| } // namespace
|
|
|
| class MediaFoundationVideoEncodeAccelerator::EncodeOutput {
|
| @@ -83,7 +109,9 @@ struct MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef {
|
| };
|
|
|
| MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator()
|
| - : main_client_task_runner_(base::ThreadTaskRunnerHandle::Get()),
|
| + : h264_enabled_(base::FeatureList::IsEnabled(kMediaFoundationH264Encoding)),
|
| + vpx_enabled_(base::FeatureList::IsEnabled(kMediaFoundationVPXEncoding)),
|
| + main_client_task_runner_(base::ThreadTaskRunnerHandle::Get()),
|
| encoder_thread_("MFEncoderThread"),
|
| encoder_task_weak_factory_(this) {}
|
|
|
| @@ -104,34 +132,50 @@ MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() {
|
| DCHECK(main_client_task_runner_->BelongsToCurrentThread());
|
|
|
| SupportedProfiles profiles;
|
| - target_bitrate_ = kDefaultTargetBitrate;
|
| - frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator;
|
| - input_visible_size_ = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight);
|
| - if (!CreateHardwareEncoderMFT() || !SetEncoderModes() ||
|
| - !InitializeInputOutputSamples()) {
|
| - ReleaseEncoderResources();
|
| - DVLOG(1)
|
| - << "Hardware encode acceleration is not available on this platform.";
|
| - return profiles;
|
| + std::vector<VideoCodecProfile> candidate_codecs;
|
| + if (h264_enabled_) {
|
| + for (const auto codec : kSupportedH264Profiles)
|
| + candidate_codecs.push_back(codec);
|
| }
|
| -
|
| - gfx::Size highest_supported_resolution = input_visible_size_;
|
| - for (const auto& resolution : kOptionalMaxResolutions) {
|
| - DCHECK_GT(resolution.GetArea(), highest_supported_resolution.GetArea());
|
| - if (!IsResolutionSupported(resolution))
|
| - break;
|
| - highest_supported_resolution = resolution;
|
| + if (vpx_enabled_) {
|
| + for (const auto codec : kSupportedVPXProfiles)
|
| + candidate_codecs.push_back(codec);
|
| }
|
| - ReleaseEncoderResources();
|
|
|
| - SupportedProfile profile;
|
| - // More profiles can be supported here, but they should be available in SW
|
| - // fallback as well.
|
| - profile.profile = H264PROFILE_BASELINE;
|
| - profile.max_framerate_numerator = kMaxFrameRateNumerator;
|
| - profile.max_framerate_denominator = kMaxFrameRateDenominator;
|
| - profile.max_resolution = highest_supported_resolution;
|
| - profiles.push_back(profile);
|
| + for (const auto codec : candidate_codecs) {
|
| + output_profile_ = codec;
|
| + target_bitrate_ = kDefaultTargetBitrate;
|
| + frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator;
|
| + input_visible_size_ = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight);
|
| + if (!CreateHardwareEncoderMFT() || !SetEncoderModes() ||
|
| + !InitializeInputOutputSamples()) {
|
| + ReleaseEncoderResources();
|
| + DVLOG(1) << "Hardware encode is not available on this platform for "
|
| + << GetProfileName(codec);
|
| + continue;
|
| + }
|
| +
|
| + gfx::Size highest_supported_resolution = input_visible_size_;
|
| + for (const auto& resolution : kOptionalMaxResolutions) {
|
| + DCHECK_GT(resolution.GetArea(), highest_supported_resolution.GetArea());
|
| + if (!IsResolutionSupported(resolution))
|
| + break;
|
| + highest_supported_resolution = resolution;
|
| + }
|
| + ReleaseEncoderResources();
|
| +
|
| + SupportedProfile profile;
|
| + // More profiles can be supported here, but they should be available in SW
|
| + // fallback as well.
|
| + profile.profile = codec;
|
| + DVLOG(1) << "Hardware encode IS available on this platform for "
|
| + << GetProfileName(codec);
|
| + LOG(ERROR) << highest_supported_resolution.ToString();
|
| + profile.max_framerate_numerator = kMaxFrameRateNumerator;
|
| + profile.max_framerate_denominator = kMaxFrameRateDenominator;
|
| + profile.max_resolution = highest_supported_resolution;
|
| + profiles.push_back(profile);
|
| + }
|
| return profiles;
|
| }
|
|
|
| @@ -153,10 +197,14 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(
|
| return false;
|
| }
|
|
|
| - if (H264PROFILE_BASELINE != output_profile) {
|
| + if (!((H264PROFILE_BASELINE == output_profile && h264_enabled_) ||
|
| + ((VP8PROFILE_MIN == output_profile ||
|
| + VP9PROFILE_MIN == output_profile) &&
|
| + vpx_enabled_))) {
|
| DLOG(ERROR) << "Output profile not supported= " << output_profile;
|
| return false;
|
| }
|
| + output_profile_ = output_profile;
|
|
|
| encoder_thread_.init_com_with_mta(false);
|
| if (!encoder_thread_.Start()) {
|
| @@ -339,34 +387,56 @@ bool MediaFoundationVideoEncodeAccelerator::CreateHardwareEncoderMFT() {
|
|
|
| InitializeMediaFoundation();
|
|
|
| - uint32_t flags = MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER;
|
| + uint32_t flags = MFT_ENUM_FLAG_SORTANDFILTER | MFT_ENUM_FLAG_SYNCMFT;
|
| + if (output_profile_ == H264PROFILE_BASELINE)
|
| + flags |= MFT_ENUM_FLAG_HARDWARE;
|
| MFT_REGISTER_TYPE_INFO input_info;
|
| input_info.guidMajorType = MFMediaType_Video;
|
| input_info.guidSubtype = MFVideoFormat_NV12;
|
| MFT_REGISTER_TYPE_INFO output_info;
|
| output_info.guidMajorType = MFMediaType_Video;
|
| - output_info.guidSubtype = MFVideoFormat_H264;
|
| + output_info.guidSubtype = VideoCodecProfileToGUID(output_profile_);
|
|
|
| - base::win::ScopedCoMem<CLSID> CLSIDs;
|
| + base::win::ScopedCoMem<IMFActivate*> devices;
|
| uint32_t count = 0;
|
| - HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, &input_info,
|
| - &output_info, NULL, &CLSIDs, &count);
|
| + HRESULT hr = MFTEnumEx(MFT_CATEGORY_VIDEO_ENCODER, flags, &input_info,
|
| + &output_info, &devices, &count);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false);
|
| RETURN_ON_FAILURE((count > 0), "No HW encoder found", false);
|
| - DVLOG(3) << "HW encoder(s) found: " << count;
|
| - hr = encoder_.CreateInstance(CLSIDs[0]);
|
| - RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false);
|
| - RETURN_ON_FAILURE((encoder_.get() != nullptr),
|
| - "No HW encoder instance created", false);
|
| - return true;
|
| +
|
| + bool succeeded = true;
|
| + if (count < 2) return false;
|
| + // hr = devices[0]->ActivateObject(IID_PPV_ARGS(encoder_.Receive()));
|
| + GUID guid = {0};
|
| + hr = devices[1]->GetGUID(MFT_TRANSFORM_CLSID_Attribute, &guid);
|
| + LPOLESTR name;
|
| + StringFromCLSID(guid, &name);
|
| + LOG(ERROR) << __func__ << name;
|
| + RETURN_ON_HR_FAILURE(hr, "No HW encoder found", false);
|
| + hr = encoder_.CreateInstance(guid);
|
| +
|
| + if (!SUCCEEDED(hr) || encoder_.get() == nullptr) {
|
| + DLOG(ERROR) << "Couldn't activate hardware encoder, HRESULT: 0x" << std::hex
|
| + << hr;
|
| + succeeded = false;
|
| + }
|
| + for (uint32_t i = 0; i < count; ++i)
|
| + devices[i]->Release();
|
| + return succeeded;
|
| }
|
|
|
| bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() {
|
| DCHECK(main_client_task_runner_->BelongsToCurrentThread());
|
|
|
| + base::win::ScopedComPtr<IMFAttributes> attributes;
|
| + HRESULT hr = encoder_->GetAttributes(attributes.Receive());
|
| + RETURN_ON_HR_FAILURE(hr, "Couldn't get attributes", false);
|
| + hr = attributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE);
|
| + RETURN_ON_HR_FAILURE(hr, "Couldn't unlock", false);
|
| +
|
| DWORD input_count = 0;
|
| DWORD output_count = 0;
|
| - HRESULT hr = encoder_->GetStreamCount(&input_count, &output_count);
|
| + hr = encoder_->GetStreamCount(&input_count, &output_count);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't get stream count", false);
|
| if (input_count < 1 || output_count < 1) {
|
| LOG(ERROR) << "Stream count too few: input " << input_count << ", output "
|
| @@ -381,20 +451,26 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() {
|
| if (hr == S_OK) {
|
| input_stream_id_ = input_ids[0];
|
| output_stream_id_ = output_ids[0];
|
| - } else if (hr == E_NOTIMPL) {
|
| + } else {
|
| input_stream_id_ = 0;
|
| output_stream_id_ = 0;
|
| - } else {
|
| - LOG(ERROR) << "Couldn't find stream ids.";
|
| - return false;
|
| }
|
| + // } else if (hr == E_NOTIMPL) {
|
| + // input_stream_id_ = 0;
|
| + // output_stream_id_ = 0;
|
| + // } else {
|
| + // LOG(ERROR) << "Couldn't find stream ids, HRESULT: 0x" << std::hex <<
|
| + // hr;
|
| + // return false;
|
| + // }
|
|
|
| // Initialize output parameters.
|
| hr = MFCreateMediaType(imf_output_media_type_.Receive());
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false);
|
| hr = imf_output_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false);
|
| - hr = imf_output_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
|
| + hr = imf_output_media_type_->SetGUID(
|
| + MF_MT_SUBTYPE, VideoCodecProfileToGUID(output_profile_));
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false);
|
| hr = imf_output_media_type_->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false);
|
| @@ -408,9 +484,11 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() {
|
| hr = imf_output_media_type_->SetUINT32(MF_MT_INTERLACE_MODE,
|
| MFVideoInterlace_Progressive);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false);
|
| - hr = imf_output_media_type_->SetUINT32(MF_MT_MPEG2_PROFILE,
|
| - eAVEncH264VProfile_Base);
|
| - RETURN_ON_HR_FAILURE(hr, "Couldn't set codec profile", false);
|
| + if (output_profile_ == H264PROFILE_BASELINE) {
|
| + hr = imf_output_media_type_->SetUINT32(MF_MT_MPEG2_PROFILE,
|
| + eAVEncH264VProfile_Base);
|
| + RETURN_ON_HR_FAILURE(hr, "Couldn't set codec profile", false);
|
| + }
|
| hr = encoder_->SetOutputType(output_stream_id_, imf_output_media_type_.get(),
|
| 0);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false);
|
| @@ -420,7 +498,7 @@ bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() {
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false);
|
| hr = imf_input_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false);
|
| - hr = imf_input_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12);
|
| + hr = imf_input_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false);
|
| hr = MFSetAttributeRatio(imf_input_media_type_.get(), MF_MT_FRAME_RATE,
|
| frame_rate_, 1);
|
| @@ -453,13 +531,15 @@ bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() {
|
| var.ulVal = target_bitrate_;
|
| hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false);
|
| - var.ulVal = eAVEncAdaptiveMode_Resolution;
|
| - hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var);
|
| - RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false);
|
| var.vt = VT_BOOL;
|
| var.boolVal = VARIANT_TRUE;
|
| hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var);
|
| RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false);
|
| + // if (output_profile_ == H264PROFILE_BASELINE) {
|
| + // var.ulVal = eAVEncAdaptiveMode_Resolution;
|
| + // hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var);
|
| + // RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false);
|
| + // }
|
| return SUCCEEDED(hr);
|
| }
|
|
|
|
|