Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(994)

Unified Diff: media/gpu/media_foundation_video_encode_accelerator_win.cc

Issue 2058413003: H264 HW encode using MediaFoundation (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: media/gpu/media_foundation_video_encode_accelerator_win.cc
diff --git a/media/gpu/media_foundation_video_encode_accelerator_win.cc b/media/gpu/media_foundation_video_encode_accelerator_win.cc
new file mode 100644
index 0000000000000000000000000000000000000000..542938e16c984b793e4f1763b201b9b95913b2f6
--- /dev/null
+++ b/media/gpu/media_foundation_video_encode_accelerator_win.cc
@@ -0,0 +1,435 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/media_foundation_video_encode_accelerator_win.h"
+
+#if !defined(OS_WIN)
+// #error This file should only be built on Windows.
ananta 2016/06/28 22:50:01 Don't think we need this. The _win suffix should b
emircan 2016/07/02 00:07:38 Done.
+#else
+#pragma warning(push)
+#pragma warning(disable : 4800) // Disable warning for added padding.
+#endif // !defined(OS_WIN)
+
+#include <codecapi.h>
+#include <mferror.h>
+#include <mftransform.h>
+
+#include "base/threading/thread_task_runner_handle.h"
+#include "base/win/scoped_co_mem.h"
+#include "base/win/scoped_variant.h"
+#include "media/base/win/mf_helpers.h"
+#include "media/base/win/mf_initializer.h"
+#include "third_party/libyuv/include/libyuv.h"
+
+using base::win::ScopedComPtr;
+using media::mf::MediaBufferScopedPointer;
+
+namespace media {
+
+namespace {
+
+const size_t kMaxFrameRateNumerator = 30;
+const size_t kMaxFrameRateDenominator = 1;
+const size_t kMaxResolutionWidth = 4096;
+const size_t kMaxResolutionHeight = 2160;
+const size_t kNumInputBuffers = 3;
+const size_t kOneSecondInMicroseconds = 1000000;
+const size_t kOutputSampleBufferSizeRatio = 4;
+
+static const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = {
+ L"mf.dll", L"mfplat.dll",
+};
+
+} // namespace
+
+struct MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef {
+ BitstreamBufferRef(int32_t id,
+ std::unique_ptr<base::SharedMemory> shm,
+ size_t size)
+ : id(id), shm(std::move(shm)), size(size) {}
+ const int32_t id;
+ const std::unique_ptr<base::SharedMemory> shm;
+ const size_t size;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(BitstreamBufferRef);
+};
+
+MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator()
+ : win_version_(base::win::GetVersion()),
+ client_task_runner_(base::ThreadTaskRunnerHandle::Get()),
+ encoder_thread_("MFEncoderThread"),
+ encoder_task_weak_factory_(this) {
+ DVLOG(3) << __FUNCTION__;
+ encoder_weak_ptr_ = encoder_task_weak_factory_.GetWeakPtr();
+}
+
+MediaFoundationVideoEncodeAccelerator::
+ ~MediaFoundationVideoEncodeAccelerator() {
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ Destroy();
+ DCHECK(!encoder_thread_.IsRunning());
+ DCHECK(!encoder_task_weak_factory_.HasWeakPtrs());
+}
+
+media::VideoEncodeAccelerator::SupportedProfiles
+MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() {
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ SupportedProfiles profiles;
+ if (base::win::GetVersion() < base::win::VERSION_WIN8) {
+ DLOG(ERROR) << "Windows versions earlier than 8 are not supported.";
+ return profiles;
+ }
+
+ SupportedProfile profile;
+ profile.profile = media::H264PROFILE_BASELINE;
ananta 2016/06/28 22:50:01 Does this always have to be the baseline profile?.
emircan 2016/07/02 00:07:37 OpenH264 SW only supports baseline right now. In c
+ profile.max_framerate_numerator = kMaxFrameRateNumerator;
+ profile.max_framerate_denominator = kMaxFrameRateDenominator;
+ profile.max_resolution = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight);
+ profiles.push_back(profile);
+ return profiles;
+}
+
+bool MediaFoundationVideoEncodeAccelerator::Initialize(
+ media::VideoPixelFormat format,
+ const gfx::Size& input_visible_size,
+ media::VideoCodecProfile output_profile,
+ uint32_t initial_bitrate,
ananta 2016/06/28 22:50:01 The Initialize function is very big. Please split
emircan 2016/07/02 00:07:37 Done.
+ Client* client) {
+ DVLOG(3) << __FUNCTION__
+ << ": input_format=" << media::VideoPixelFormatToString(format)
+ << ", input_visible_size=" << input_visible_size.ToString()
+ << ", output_profile=" << output_profile
+ << ", initial_bitrate=" << initial_bitrate;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (media::PIXEL_FORMAT_I420 != format) {
+ DLOG(ERROR) << "Input format not supported= "
+ << media::VideoPixelFormatToString(format);
+ return false;
+ }
ananta 2016/06/28 22:50:01 newline here.
emircan 2016/07/02 00:07:37 Done.
+ if (media::H264PROFILE_BASELINE != output_profile) {
+ DLOG(ERROR) << "Output profile not supported= " << output_profile;
+ return false;
+ }
+
+ for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs) {
+ HMODULE dll = ::GetModuleHandle(mfdll);
+ if (!dll) {
+ DLOG(ERROR) << mfdll << " is required for encoding";
+ return false;
+ }
+ }
+ media::InitializeMediaFoundation();
ananta 2016/06/28 22:50:00 newline here.
emircan 2016/07/02 00:07:37 Done.
+
+ uint32_t flags = MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER;
+ MFT_REGISTER_TYPE_INFO input_info;
+ input_info.guidMajorType = MFMediaType_Video;
+ input_info.guidSubtype = MFVideoFormat_NV12;
+ MFT_REGISTER_TYPE_INFO output_info;
+ output_info.guidMajorType = MFMediaType_Video;
+ output_info.guidSubtype = MFVideoFormat_H264;
+
+ base::win::ScopedCoMem<CLSID> CLSIDs;
+ uint32_t count = 0;
+ HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, NULL, &output_info,
+ NULL, &CLSIDs, &count);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false);
+ RETURN_ON_FAILURE((count > 0), "No HW encoder found", false);
+ DVLOG(3) << "HW encoder(s) found: " << count;
+ hr = encoder_.CreateInstance(CLSIDs[0]);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false);
+
+ if (!encoder_thread_.Start()) {
+ DLOG(ERROR) << "Failed spawning encoder thread.";
+ return false;
+ }
+ encoder_thread_task_runner_ = encoder_thread_.task_runner();
ananta 2016/06/28 22:50:01 newline
emircan 2016/07/02 00:07:38 Done.
+
+ client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
+ client_ = client_ptr_factory_->GetWeakPtr();
+ input_visible_size_ = input_visible_size;
+ frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator;
+ target_bitrate_ = initial_bitrate;
+ bitstream_buffer_size_ = input_visible_size.GetArea();
+
+ u_plane_offset_ =
+ VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane,
+ input_visible_size_)
+ .GetArea();
+ v_plane_offset_ =
+ u_plane_offset_ +
+ VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane,
+ input_visible_size_)
+ .GetArea();
+
+ hr = encoder_->GetStreamLimits(
+ &input_stream_count_min_, &input_stream_count_max_,
+ &output_stream_count_min_, &output_stream_count_max_);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't query stream limits", false);
+ DVLOG(3) << "Stream limits: " << input_stream_count_min_ << ","
+ << input_stream_count_max_ << "," << output_stream_count_min_ << ","
+ << output_stream_count_max_;
+
+ base::win::ScopedComPtr<IMFMediaType> imf_output_media_type;
+ hr = MFCreateMediaType(imf_output_media_type.Receive());
+ hr &= imf_output_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
ananta 2016/06/28 22:50:01 what happens if MFCreateMediaType fails?. Addition
emircan 2016/07/02 00:07:38 I will add a return there if fails.
+ hr &= imf_output_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
+ hr &= imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_);
+ hr &= MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE,
+ frame_rate_, kMaxFrameRateDenominator);
+ hr &= MFSetAttributeSize(imf_output_media_type.get(), MF_MT_FRAME_SIZE,
ananta 2016/06/28 22:50:01 Move this to the next line for consistency
emircan 2016/07/02 00:07:37 Done.
+ input_visible_size_.width(),
+ input_visible_size_.height());
+ hr &= imf_output_media_type->SetUINT32(MF_MT_INTERLACE_MODE,
+ MFVideoInterlace_Progressive);
+ hr &= imf_output_media_type->SetUINT32(MF_MT_MPEG2_PROFILE,
+ eAVEncH264VProfile_Base);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set output params", false);
+ hr = encoder_->SetOutputType(0, imf_output_media_type.get(), 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false);
+
+ base::win::ScopedComPtr<IMFMediaType> imf_input_media_type;
+ hr = MFCreateMediaType(imf_input_media_type.Receive());
+ hr &= imf_input_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
ananta 2016/06/28 22:50:01 Ditto for MFCreateMediaType and the other Set oper
emircan 2016/07/02 00:07:38 Done.
+ hr &= imf_input_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12);
+ hr &= MFSetAttributeRatio(imf_input_media_type.get(), MF_MT_FRAME_RATE,
+ frame_rate_, kMaxFrameRateDenominator);
+ hr &= MFSetAttributeSize(imf_input_media_type.get(), MF_MT_FRAME_SIZE,
+ input_visible_size_.width(),
+ input_visible_size_.height());
+ hr &= imf_input_media_type->SetUINT32(MF_MT_INTERLACE_MODE,
+ MFVideoInterlace_Progressive);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set input params", false);
+ hr = encoder_->SetInputType(0, imf_input_media_type.get(), 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false);
+
+ hr = encoder_.QueryInterface(IID_ICodecAPI, codec_api_.ReceiveVoid());
+ RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false);
+ VARIANT var;
+ var.vt = VT_UI4;
+ var.ulVal = eAVEncCommonRateControlMode_CBR;
+ hr = codec_api_->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false);
+ var.ulVal = target_bitrate_;
+ hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false);
+ var.ulVal = eAVEncAdaptiveMode_FrameRate;
+ hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false);
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false);
+
+ input_sample_.Attach(mf::CreateEmptySampleWithBuffer(
+ VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_visible_size), 2));
+ output_sample_.Attach(mf::CreateEmptySampleWithBuffer(
+ bitstream_buffer_size_ * kOutputSampleBufferSizeRatio, 2));
+
+ hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set ProcessMessage", false);
+
+ client_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(&Client::RequireBitstreamBuffers, client_, kNumInputBuffers,
+ input_visible_size_, bitstream_buffer_size_));
+ return SUCCEEDED(hr);
+}
+
+void MediaFoundationVideoEncodeAccelerator::Encode(
+ const scoped_refptr<media::VideoFrame>& frame,
+ bool force_keyframe) {
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ encoder_thread_task_runner_->PostTask(
+ FROM_HERE, base::Bind(&MediaFoundationVideoEncodeAccelerator::EncodeTask,
+ encoder_weak_ptr_, frame, force_keyframe));
+}
+
+void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBuffer(
+ const media::BitstreamBuffer& buffer) {
+ DVLOG(3) << __FUNCTION__ << ": buffer size=" << buffer.size();
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (buffer.size() < bitstream_buffer_size_) {
+ DLOG(ERROR) << "Output BitstreamBuffer isn't big enough: " << buffer.size()
+ << " vs. " << bitstream_buffer_size_;
+ client_->NotifyError(kInvalidArgumentError);
+ return;
+ }
+
+ std::unique_ptr<base::SharedMemory> shm(
+ new base::SharedMemory(buffer.handle(), false));
+ if (!shm->Map(buffer.size())) {
+ DLOG(ERROR) << "Failed mapping shared memory.";
+ client_->NotifyError(kPlatformFailureError);
+ return;
+ }
+
+ std::unique_ptr<BitstreamBufferRef> buffer_ref(
+ new BitstreamBufferRef(buffer.id(), std::move(shm), buffer.size()));
+ encoder_thread_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(
+ &MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask,
+ encoder_weak_ptr_, base::Passed(&buffer_ref)));
+}
+
+void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChange(
+ uint32_t bitrate,
+ uint32_t framerate) {
+ DVLOG(3) << __FUNCTION__ << ": bitrate=" << bitrate
+ << ": framerate=" << framerate;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ encoder_thread_task_runner_->PostTask(
+ FROM_HERE, base::Bind(&MediaFoundationVideoEncodeAccelerator::
+ RequestEncodingParametersChangeTask,
+ encoder_weak_ptr_, bitrate, framerate));
+}
+
+void MediaFoundationVideoEncodeAccelerator::Destroy() {
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ // Cancel all callbacks.
+ client_ptr_factory_.reset();
+
+ if (encoder_thread_.IsRunning()) {
+ encoder_thread_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(&MediaFoundationVideoEncodeAccelerator::DestroyTask,
+ encoder_weak_ptr_));
+ encoder_thread_.Stop();
+ } else {
+ DestroyTask();
+ }
+}
+
+void MediaFoundationVideoEncodeAccelerator::EncodeTask(
+ const scoped_refptr<media::VideoFrame>& frame,
+ bool force_keyframe) {
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+
+ base::win::ScopedComPtr<IMFMediaBuffer> input_buffer;
+ input_sample_->GetBufferByIndex(0, input_buffer.Receive());
+ {
ananta 2016/06/28 22:50:01 newline here.
emircan 2016/07/02 00:07:37 Done.
+ MediaBufferScopedPointer scoped_buffer(input_buffer.get());
+ DCHECK(scoped_buffer.get());
+ libyuv::I420Copy(frame->visible_data(media::VideoFrame::kYPlane),
+ frame->stride(media::VideoFrame::kYPlane),
+ frame->visible_data(media::VideoFrame::kVPlane),
+ frame->stride(media::VideoFrame::kVPlane),
+ frame->visible_data(media::VideoFrame::kUPlane),
+ frame->stride(media::VideoFrame::kUPlane),
+ scoped_buffer.get(),
+ frame->stride(media::VideoFrame::kYPlane),
+ scoped_buffer.get() + u_plane_offset_,
+ frame->stride(media::VideoFrame::kUPlane),
+ scoped_buffer.get() + v_plane_offset_,
+ frame->stride(media::VideoFrame::kVPlane),
+ input_visible_size_.width(), input_visible_size_.height());
+ }
+ input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() * 10);
ananta 2016/06/28 22:50:00 newline here.
emircan 2016/07/02 00:07:37 Done.
+ input_sample_->SetSampleDuration(kOneSecondInMicroseconds / frame_rate_);
+ HRESULT hr = encoder_->ProcessInput(0, input_sample_.get(), 0);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't encode", );
+ DVLOG(3) << "Sent for encode " << hr;
+
+ ProcessOutput();
ananta 2016/06/28 22:50:01 Perhaps we need to check if ProcessOutput succeeds
emircan 2016/07/02 00:07:37 I answered below.
+}
+
+void MediaFoundationVideoEncodeAccelerator::ProcessOutput() {
ananta 2016/06/28 22:50:00 Should this function be returning void?. What happ
emircan 2016/07/02 00:07:37 We are fine if it fails. - If it says MF_E_TRANSF
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+
+ if (bitstream_buffer_queue_.empty()) {
+ DVLOG(3) << "No bitstream buffers.";
+ return;
+ }
+
+ MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
+ output_data_buffer.dwStreamID = 0;
+ output_data_buffer.dwStatus = 0;
+ output_data_buffer.pEvents = NULL;
+ output_data_buffer.pSample = output_sample_.get();
+ DWORD status = 0;
+ HRESULT hr = encoder_->ProcessOutput(0, 1, &output_data_buffer, &status);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ DVLOG(3) << "MF_E_TRANSFORM_NEED_MORE_INPUT";
+ return;
+ }
+ RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", );
+ DVLOG(3) << "Got encoded data " << hr;
+
+ std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef>
+ buffer_ref = std::move(bitstream_buffer_queue_.front());
+ bitstream_buffer_queue_.pop_front();
+
+ base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
+ output_sample_->GetBufferByIndex(0, output_buffer.Receive());
+ DWORD size = 0;
ananta 2016/06/28 22:50:01 What happens if GetBufferByIndex fails
emircan 2016/07/02 00:07:37 Adding hr checks.
+ output_buffer->GetCurrentLength(&size);
+ {
+ MediaBufferScopedPointer scoped_buffer(output_buffer.get());
+ memcpy(buffer_ref->shm->memory(), scoped_buffer.get(), size);
+ }
+
+ const bool keyframe = MFGetAttributeUINT32(
+ output_sample_.get(), MFSampleExtension_CleanPoint, false);
+ DVLOG(3) << "We HAVE encoded data with size:" << size << " keyframe "
+ << keyframe;
+
+ client_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, size,
+ keyframe, base::Time::Now() - base::Time()));
+ ProcessOutput();
+}
+
+void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
+ std::unique_ptr<BitstreamBufferRef> buffer_ref) {
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+
+ bitstream_buffer_queue_.push_back(std::move(buffer_ref));
+}
+
+void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
+ uint32_t bitrate,
+ uint32_t framerate) {
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+
+ frame_rate_ = framerate > 1 ? framerate : 1;
+ target_bitrate_ = bitrate > 1 ? bitrate : 1;
+
+ VARIANT var;
+ var.vt = VT_UI4;
+ var.ulVal = target_bitrate_;
+ HRESULT hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", );
+
+ base::win::ScopedComPtr<IMFMediaType> imf_output_media_type;
+ hr = MFCreateMediaType(imf_output_media_type.Receive());
+ hr &= imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_);
+ hr &= MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE,
+ framerate, kMaxFrameRateDenominator);
+}
+
+void MediaFoundationVideoEncodeAccelerator::DestroyTask() {
+ DVLOG(3) << __FUNCTION__;
+ DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
+
+ encoder_.Release();
+}
+
+} // namespace content

Powered by Google App Engine
This is Rietveld 408576698