Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "media/gpu/media_foundation_video_encode_accelerator_win.h" | |
| 6 | |
| 7 #if defined(OS_WIN) | |
| 8 #pragma warning(push) | |
| 9 #pragma warning(disable : 4800) // Disable warning for added padding. | |
| 10 #endif // !defined(OS_WIN) | |
| 11 | |
| 12 #include <codecapi.h> | |
| 13 #include <mferror.h> | |
| 14 #include <mftransform.h> | |
| 15 | |
| 16 #include <utility> | |
| 17 #include <vector> | |
| 18 | |
| 19 #include "base/threading/thread_task_runner_handle.h" | |
| 20 #include "base/win/scoped_co_mem.h" | |
| 21 #include "base/win/scoped_variant.h" | |
| 22 #include "base/win/windows_version.h" | |
| 23 #include "media/base/win/mf_helpers.h" | |
| 24 #include "media/base/win/mf_initializer.h" | |
| 25 #include "third_party/libyuv/include/libyuv.h" | |
| 26 | |
| 27 using base::win::ScopedComPtr; | |
| 28 using media::mf::MediaBufferScopedPointer; | |
| 29 | |
| 30 namespace media { | |
| 31 | |
| 32 namespace { | |
| 33 | |
| 34 const size_t kMaxFrameRateNumerator = 30; | |
| 35 const size_t kMaxFrameRateDenominator = 1; | |
| 36 const size_t kMaxResolutionWidth = 4096; | |
| 37 const size_t kMaxResolutionHeight = 2160; | |
| 38 const size_t kNumInputBuffers = 3; | |
| 39 const size_t kOneSecondInMicroseconds = 1000000; | |
| 40 const size_t kOutputSampleBufferSizeRatio = 4; | |
| 41 | |
| 42 static const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = { | |
| 43 L"mf.dll", L"mfplat.dll", | |
| 44 }; | |
| 45 | |
| 46 } // namespace | |
| 47 | |
| 48 class MediaFoundationVideoEncodeAccelerator::EncodeOutput { | |
| 49 public: | |
| 50 EncodeOutput(uint32_t size, bool key_frame, base::TimeDelta timestamp) | |
| 51 : keyframe(key_frame), capture_timestamp(timestamp), data_(size) {} | |
| 52 | |
| 53 uint8_t* memory() { return data_.data(); } | |
| 54 int size() const { return static_cast<int>(data_.size()); } | |
| 55 const bool keyframe; | |
| 56 const base::TimeDelta capture_timestamp; | |
| 57 | |
| 58 private: | |
| 59 std::vector<uint8_t> data_; | |
| 60 DISALLOW_COPY_AND_ASSIGN(EncodeOutput); | |
| 61 }; | |
| 62 | |
| 63 struct MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef { | |
| 64 BitstreamBufferRef(int32_t id, | |
| 65 std::unique_ptr<base::SharedMemory> shm, | |
| 66 size_t size) | |
| 67 : id(id), shm(std::move(shm)), size(size) {} | |
| 68 const int32_t id; | |
| 69 const std::unique_ptr<base::SharedMemory> shm; | |
| 70 const size_t size; | |
| 71 | |
| 72 private: | |
| 73 DISALLOW_IMPLICIT_CONSTRUCTORS(BitstreamBufferRef); | |
| 74 }; | |
| 75 | |
| 76 MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator() | |
| 77 : client_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 78 encoder_thread_("MFEncoderThread"), | |
| 79 encoder_task_weak_factory_(this) {} | |
| 80 | |
| 81 MediaFoundationVideoEncodeAccelerator:: | |
| 82 ~MediaFoundationVideoEncodeAccelerator() { | |
| 83 DVLOG(3) << __FUNCTION__; | |
| 84 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 85 | |
| 86 Destroy(); | |
| 87 DCHECK(!encoder_thread_.IsRunning()); | |
| 88 DCHECK(!encoder_task_weak_factory_.HasWeakPtrs()); | |
| 89 } | |
| 90 | |
| 91 VideoEncodeAccelerator::SupportedProfiles | |
| 92 MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() { | |
| 93 DVLOG(3) << __FUNCTION__; | |
| 94 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 95 | |
| 96 SupportedProfiles profiles; | |
| 97 if (base::win::GetVersion() < base::win::VERSION_WIN8) { | |
| 98 DLOG(ERROR) << "Windows versions earlier than 8 are not supported."; | |
| 99 return profiles; | |
| 100 } | |
| 101 | |
| 102 SupportedProfile profile; | |
| 103 // More profiles can be supported here, but they should be available in SW | |
| 104 // fallback as well. | |
| 105 profile.profile = H264PROFILE_BASELINE; | |
| 106 profile.max_framerate_numerator = kMaxFrameRateNumerator; | |
| 107 profile.max_framerate_denominator = kMaxFrameRateDenominator; | |
| 108 profile.max_resolution = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight); | |
| 109 profiles.push_back(profile); | |
| 110 return profiles; | |
| 111 } | |
| 112 | |
| 113 bool MediaFoundationVideoEncodeAccelerator::Initialize( | |
| 114 VideoPixelFormat format, | |
| 115 const gfx::Size& input_visible_size, | |
| 116 VideoCodecProfile output_profile, | |
| 117 uint32_t initial_bitrate, | |
| 118 Client* client) { | |
| 119 DVLOG(3) << __FUNCTION__ | |
| 120 << ": input_format=" << VideoPixelFormatToString(format) | |
| 121 << ", input_visible_size=" << input_visible_size.ToString() | |
| 122 << ", output_profile=" << output_profile | |
| 123 << ", initial_bitrate=" << initial_bitrate; | |
| 124 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 125 | |
| 126 if (PIXEL_FORMAT_I420 != format) { | |
| 127 DLOG(ERROR) << "Input format not supported= " | |
| 128 << VideoPixelFormatToString(format); | |
| 129 return false; | |
| 130 } | |
| 131 | |
| 132 if (H264PROFILE_BASELINE != output_profile) { | |
| 133 DLOG(ERROR) << "Output profile not supported= " << output_profile; | |
| 134 return false; | |
| 135 } | |
| 136 | |
| 137 for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs) { | |
| 138 HMODULE dll = ::GetModuleHandle(mfdll); | |
| 139 if (!dll) { | |
| 140 DLOG(ERROR) << mfdll << " is required for encoding"; | |
| 141 return false; | |
| 142 } | |
| 143 } | |
| 144 | |
| 145 InitializeMediaFoundation(); | |
| 146 | |
| 147 uint32_t flags = MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER; | |
| 148 MFT_REGISTER_TYPE_INFO input_info; | |
| 149 input_info.guidMajorType = MFMediaType_Video; | |
| 150 input_info.guidSubtype = MFVideoFormat_NV12; | |
| 151 MFT_REGISTER_TYPE_INFO output_info; | |
| 152 output_info.guidMajorType = MFMediaType_Video; | |
| 153 output_info.guidSubtype = MFVideoFormat_H264; | |
| 154 | |
| 155 base::win::ScopedCoMem<CLSID> CLSIDs; | |
| 156 uint32_t count = 0; | |
| 157 HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, NULL, &output_info, | |
|
grt (UTC plus 2)
2016/07/15 07:59:03
are we better off supporting asynchronous MFTs fro
emircan
2016/07/16 06:56:37
I found out that async is only available in the ne
| |
| 158 NULL, &CLSIDs, &count); | |
| 159 RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false); | |
| 160 RETURN_ON_FAILURE((count > 0), "No HW encoder found", false); | |
| 161 DVLOG(3) << "HW encoder(s) found: " << count; | |
| 162 hr = encoder_.CreateInstance(CLSIDs[0]); | |
| 163 RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false); | |
| 164 | |
|
grt (UTC plus 2)
2016/07/15 07:59:03
since you're using COM on the encoder thread, call
emircan
2016/07/16 06:56:38
Done.
| |
| 165 if (!encoder_thread_.Start()) { | |
| 166 DLOG(ERROR) << "Failed spawning encoder thread."; | |
| 167 return false; | |
| 168 } | |
| 169 | |
| 170 encoder_thread_task_runner_ = encoder_thread_.task_runner(); | |
| 171 | |
| 172 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | |
| 173 client_ = client_ptr_factory_->GetWeakPtr(); | |
| 174 input_visible_size_ = input_visible_size; | |
| 175 frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator; | |
| 176 target_bitrate_ = initial_bitrate; | |
| 177 bitstream_buffer_size_ = input_visible_size.GetArea(); | |
| 178 | |
| 179 u_plane_offset_ = | |
| 180 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane, | |
| 181 input_visible_size_) | |
| 182 .GetArea(); | |
| 183 v_plane_offset_ = | |
| 184 u_plane_offset_ + | |
| 185 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane, | |
| 186 input_visible_size_) | |
| 187 .GetArea(); | |
| 188 | |
| 189 if (!InitializeInputOutputSamples()) { | |
| 190 DLOG(ERROR) << "Failed initializing input-output samples."; | |
| 191 return false; | |
| 192 } | |
| 193 | |
| 194 if (!SetEncoderModes()) { | |
| 195 DLOG(ERROR) << "Failed setting encoder parameters."; | |
| 196 return false; | |
| 197 } | |
| 198 | |
| 199 hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL); | |
| 200 RETURN_ON_HR_FAILURE(hr, "Couldn't set ProcessMessage", false); | |
| 201 | |
| 202 client_task_runner_->PostTask( | |
| 203 FROM_HERE, | |
| 204 base::Bind(&Client::RequireBitstreamBuffers, client_, kNumInputBuffers, | |
| 205 input_visible_size_, bitstream_buffer_size_)); | |
| 206 return SUCCEEDED(hr); | |
| 207 } | |
| 208 | |
| 209 void MediaFoundationVideoEncodeAccelerator::Encode( | |
| 210 const scoped_refptr<VideoFrame>& frame, | |
| 211 bool force_keyframe) { | |
| 212 DVLOG(3) << __FUNCTION__; | |
| 213 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 214 | |
| 215 encoder_thread_task_runner_->PostTask( | |
| 216 FROM_HERE, base::Bind(&MediaFoundationVideoEncodeAccelerator::EncodeTask, | |
| 217 encoder_task_weak_factory_.GetWeakPtr(), frame, | |
| 218 force_keyframe)); | |
| 219 } | |
| 220 | |
| 221 void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
| 222 const BitstreamBuffer& buffer) { | |
| 223 DVLOG(3) << __FUNCTION__ << ": buffer size=" << buffer.size(); | |
| 224 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 225 | |
| 226 if (buffer.size() < bitstream_buffer_size_) { | |
| 227 DLOG(ERROR) << "Output BitstreamBuffer isn't big enough: " << buffer.size() | |
| 228 << " vs. " << bitstream_buffer_size_; | |
| 229 client_->NotifyError(kInvalidArgumentError); | |
| 230 return; | |
| 231 } | |
| 232 | |
| 233 std::unique_ptr<base::SharedMemory> shm( | |
| 234 new base::SharedMemory(buffer.handle(), false)); | |
| 235 if (!shm->Map(buffer.size())) { | |
| 236 DLOG(ERROR) << "Failed mapping shared memory."; | |
| 237 client_->NotifyError(kPlatformFailureError); | |
| 238 return; | |
| 239 } | |
| 240 | |
| 241 std::unique_ptr<BitstreamBufferRef> buffer_ref( | |
| 242 new BitstreamBufferRef(buffer.id(), std::move(shm), buffer.size())); | |
| 243 encoder_thread_task_runner_->PostTask( | |
| 244 FROM_HERE, | |
| 245 base::Bind( | |
| 246 &MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask, | |
| 247 encoder_task_weak_factory_.GetWeakPtr(), base::Passed(&buffer_ref))); | |
| 248 } | |
| 249 | |
| 250 void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChange( | |
| 251 uint32_t bitrate, | |
| 252 uint32_t framerate) { | |
| 253 DVLOG(3) << __FUNCTION__ << ": bitrate=" << bitrate | |
| 254 << ": framerate=" << framerate; | |
| 255 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 256 | |
| 257 encoder_thread_task_runner_->PostTask( | |
| 258 FROM_HERE, | |
| 259 base::Bind(&MediaFoundationVideoEncodeAccelerator:: | |
| 260 RequestEncodingParametersChangeTask, | |
| 261 encoder_task_weak_factory_.GetWeakPtr(), bitrate, framerate)); | |
| 262 } | |
| 263 | |
| 264 void MediaFoundationVideoEncodeAccelerator::Destroy() { | |
| 265 DVLOG(3) << __FUNCTION__; | |
| 266 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 267 | |
| 268 // Cancel all callbacks. | |
| 269 client_ptr_factory_.reset(); | |
| 270 | |
| 271 if (encoder_thread_.IsRunning()) { | |
| 272 encoder_thread_task_runner_->PostTask( | |
| 273 FROM_HERE, | |
| 274 base::Bind(&MediaFoundationVideoEncodeAccelerator::DestroyTask, | |
| 275 encoder_task_weak_factory_.GetWeakPtr())); | |
| 276 encoder_thread_.Stop(); | |
| 277 } else { | |
| 278 DestroyTask(); | |
|
grt (UTC plus 2)
2016/07/15 07:59:04
you can't call this function from the client's thr
emircan
2016/07/16 06:56:37
Done. I moved the encoder_thread_.Start() to the b
| |
| 279 } | |
| 280 } | |
|
grt (UTC plus 2)
2016/07/15 07:59:04
it seems to be the responsibility of the accelerat
emircan
2016/07/16 06:56:37
Added delete. I also read through the interface, b
| |
| 281 | |
| 282 // static | |
| 283 void MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization() { | |
| 284 for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs) | |
| 285 ::LoadLibrary(mfdll); | |
| 286 } | |
| 287 | |
| 288 bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() { | |
| 289 HRESULT hr = encoder_->GetStreamLimits( | |
| 290 &input_stream_count_min_, &input_stream_count_max_, | |
| 291 &output_stream_count_min_, &output_stream_count_max_); | |
| 292 RETURN_ON_HR_FAILURE(hr, "Couldn't query stream limits", false); | |
| 293 DVLOG(3) << "Stream limits: " << input_stream_count_min_ << "," | |
| 294 << input_stream_count_max_ << "," << output_stream_count_min_ << "," | |
| 295 << output_stream_count_max_; | |
| 296 | |
| 297 // Initialize output parameters. | |
| 298 base::win::ScopedComPtr<IMFMediaType> imf_output_media_type; | |
| 299 hr = MFCreateMediaType(imf_output_media_type.Receive()); | |
| 300 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); | |
| 301 hr &= imf_output_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
| 302 hr &= imf_output_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); | |
| 303 hr &= imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_); | |
| 304 hr &= MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE, | |
| 305 frame_rate_, kMaxFrameRateDenominator); | |
| 306 hr &= MFSetAttributeSize(imf_output_media_type.get(), MF_MT_FRAME_SIZE, | |
| 307 input_visible_size_.width(), | |
| 308 input_visible_size_.height()); | |
| 309 hr &= imf_output_media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 310 MFVideoInterlace_Progressive); | |
| 311 hr &= imf_output_media_type->SetUINT32(MF_MT_MPEG2_PROFILE, | |
| 312 eAVEncH264VProfile_Base); | |
| 313 RETURN_ON_HR_FAILURE(hr, "Couldn't set output params", false); | |
| 314 hr = encoder_->SetOutputType(0, imf_output_media_type.get(), 0); | |
| 315 RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false); | |
| 316 | |
| 317 // Initialize input parameters. | |
| 318 base::win::ScopedComPtr<IMFMediaType> imf_input_media_type; | |
| 319 hr = MFCreateMediaType(imf_input_media_type.Receive()); | |
| 320 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); | |
| 321 hr &= imf_input_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
| 322 hr &= imf_input_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12); | |
| 323 hr &= MFSetAttributeRatio(imf_input_media_type.get(), MF_MT_FRAME_RATE, | |
| 324 frame_rate_, kMaxFrameRateDenominator); | |
| 325 hr &= MFSetAttributeSize(imf_input_media_type.get(), MF_MT_FRAME_SIZE, | |
| 326 input_visible_size_.width(), | |
| 327 input_visible_size_.height()); | |
| 328 hr &= imf_input_media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 329 MFVideoInterlace_Progressive); | |
| 330 RETURN_ON_HR_FAILURE(hr, "Couldn't set input params", false); | |
| 331 hr = encoder_->SetInputType(0, imf_input_media_type.get(), 0); | |
| 332 RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false); | |
| 333 | |
| 334 input_sample_.Attach(mf::CreateEmptySampleWithBuffer( | |
| 335 VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_visible_size_), 2)); | |
| 336 output_sample_.Attach(mf::CreateEmptySampleWithBuffer( | |
| 337 bitstream_buffer_size_ * kOutputSampleBufferSizeRatio, 2)); | |
| 338 | |
| 339 return SUCCEEDED(hr); | |
| 340 } | |
| 341 | |
| 342 bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() { | |
| 343 HRESULT hr = encoder_.QueryInterface(IID_ICodecAPI, codec_api_.ReceiveVoid()); | |
| 344 RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false); | |
| 345 VARIANT var; | |
| 346 var.vt = VT_UI4; | |
| 347 var.ulVal = eAVEncCommonRateControlMode_CBR; | |
| 348 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var); | |
| 349 RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false); | |
| 350 var.ulVal = target_bitrate_; | |
| 351 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); | |
| 352 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); | |
| 353 var.ulVal = eAVEncAdaptiveMode_FrameRate; | |
| 354 hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var); | |
| 355 RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false); | |
| 356 var.vt = VT_BOOL; | |
| 357 var.boolVal = VARIANT_TRUE; | |
| 358 hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var); | |
| 359 RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false); | |
| 360 return SUCCEEDED(hr); | |
| 361 } | |
| 362 | |
| 363 void MediaFoundationVideoEncodeAccelerator::EncodeTask( | |
| 364 const scoped_refptr<VideoFrame>& frame, | |
| 365 bool force_keyframe) { | |
| 366 DVLOG(3) << __FUNCTION__; | |
| 367 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 368 | |
| 369 if (!encoder_) | |
| 370 return; | |
| 371 | |
| 372 base::win::ScopedComPtr<IMFMediaBuffer> input_buffer; | |
| 373 input_sample_->GetBufferByIndex(0, input_buffer.Receive()); | |
| 374 | |
| 375 { | |
| 376 MediaBufferScopedPointer scoped_buffer(input_buffer.get()); | |
| 377 DCHECK(scoped_buffer.get()); | |
| 378 libyuv::I420Copy(frame->visible_data(VideoFrame::kYPlane), | |
| 379 frame->stride(VideoFrame::kYPlane), | |
| 380 frame->visible_data(VideoFrame::kVPlane), | |
| 381 frame->stride(VideoFrame::kVPlane), | |
| 382 frame->visible_data(VideoFrame::kUPlane), | |
| 383 frame->stride(VideoFrame::kUPlane), scoped_buffer.get(), | |
| 384 frame->stride(VideoFrame::kYPlane), | |
| 385 scoped_buffer.get() + u_plane_offset_, | |
| 386 frame->stride(VideoFrame::kUPlane), | |
| 387 scoped_buffer.get() + v_plane_offset_, | |
| 388 frame->stride(VideoFrame::kVPlane), | |
| 389 input_visible_size_.width(), input_visible_size_.height()); | |
| 390 } | |
| 391 | |
| 392 input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() * 10); | |
| 393 input_sample_->SetSampleDuration(kOneSecondInMicroseconds / frame_rate_); | |
| 394 HRESULT hr = encoder_->ProcessInput(0, input_sample_.get(), 0); | |
| 395 // According to MSDN, if encoder returns MF_E_NOTACCEPTING, we need to try | |
| 396 // processing the output. This error indicates that encoder does not accept | |
| 397 // any more input data. | |
| 398 if (hr == MF_E_NOTACCEPTING) { | |
| 399 DVLOG(3) << "MF_E_NOTACCEPTING"; | |
| 400 ProcessOutput(); | |
| 401 hr = encoder_->ProcessInput(0, input_sample_.get(), 0); | |
| 402 if (hr == MF_E_NOTACCEPTING) { | |
| 403 encoder_thread_task_runner_->PostTask( | |
| 404 FROM_HERE, | |
| 405 base::Bind(&MediaFoundationVideoEncodeAccelerator::EncodeTask, | |
| 406 encoder_task_weak_factory_.GetWeakPtr(), frame, | |
| 407 force_keyframe)); | |
| 408 } else { | |
| 409 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); | |
| 410 } | |
| 411 } else { | |
| 412 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); | |
| 413 } | |
| 414 DVLOG(3) << "Sent for encode " << hr; | |
| 415 | |
| 416 ProcessOutput(); | |
| 417 } | |
| 418 | |
| 419 void MediaFoundationVideoEncodeAccelerator::ProcessOutput() { | |
| 420 DVLOG(3) << __FUNCTION__; | |
| 421 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 422 | |
| 423 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; | |
| 424 output_data_buffer.dwStreamID = 0; | |
| 425 output_data_buffer.dwStatus = 0; | |
| 426 output_data_buffer.pEvents = NULL; | |
| 427 output_data_buffer.pSample = output_sample_.get(); | |
| 428 DWORD status = 0; | |
| 429 HRESULT hr = encoder_->ProcessOutput(0, 1, &output_data_buffer, &status); | |
| 430 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { | |
| 431 DVLOG(3) << "MF_E_TRANSFORM_NEED_MORE_INPUT"; | |
| 432 return; | |
| 433 } | |
| 434 RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", ); | |
| 435 DVLOG(3) << "Got encoded data " << hr; | |
| 436 | |
| 437 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
| 438 hr = output_sample_->GetBufferByIndex(0, output_buffer.Receive()); | |
| 439 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer by index", ); | |
| 440 DWORD size = 0; | |
| 441 hr = output_buffer->GetCurrentLength(&size); | |
| 442 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer length", ); | |
| 443 | |
| 444 const bool keyframe = MFGetAttributeUINT32( | |
| 445 output_sample_.get(), MFSampleExtension_CleanPoint, false); | |
| 446 DVLOG(3) << "We HAVE encoded data with size:" << size << " keyframe " | |
| 447 << keyframe; | |
| 448 | |
| 449 if (bitstream_buffer_queue_.empty()) { | |
| 450 DVLOG(3) << "No bitstream buffers."; | |
| 451 // We need to copy the output so that encoding can continue. | |
| 452 std::unique_ptr<EncodeOutput> encode_output( | |
| 453 new EncodeOutput(size, keyframe, base::Time::Now() - base::Time())); | |
| 454 { | |
| 455 MediaBufferScopedPointer scoped_buffer(output_buffer.get()); | |
| 456 memcpy(encode_output->memory(), scoped_buffer.get(), size); | |
| 457 } | |
| 458 encoder_output_queue_.push_back(std::move(encode_output)); | |
| 459 return; | |
| 460 } | |
| 461 | |
| 462 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> | |
| 463 buffer_ref = std::move(bitstream_buffer_queue_.front()); | |
| 464 bitstream_buffer_queue_.pop_front(); | |
| 465 | |
| 466 { | |
| 467 MediaBufferScopedPointer scoped_buffer(output_buffer.get()); | |
| 468 memcpy(buffer_ref->shm->memory(), scoped_buffer.get(), size); | |
| 469 } | |
| 470 | |
| 471 client_task_runner_->PostTask( | |
| 472 FROM_HERE, | |
| 473 base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, size, | |
| 474 keyframe, base::Time::Now() - base::Time())); | |
| 475 | |
| 476 // Keep calling ProcessOutput recursively until MF_E_TRANSFORM_NEED_MORE_INPUT | |
| 477 // is returned to flush out all the output. | |
| 478 ProcessOutput(); | |
| 479 } | |
| 480 | |
| 481 void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask( | |
| 482 std::unique_ptr<BitstreamBufferRef> buffer_ref) { | |
| 483 DVLOG(3) << __FUNCTION__; | |
| 484 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 485 | |
| 486 // If there is already EncodeOutput waiting, copy its output first. | |
| 487 if (!encoder_output_queue_.empty()) { | |
| 488 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::EncodeOutput> | |
| 489 encode_output = std::move(encoder_output_queue_.front()); | |
| 490 encoder_output_queue_.pop_front(); | |
| 491 ReturnBitstreamBuffer(std::move(encode_output), std::move(buffer_ref)); | |
| 492 return; | |
| 493 } | |
| 494 | |
| 495 bitstream_buffer_queue_.push_back(std::move(buffer_ref)); | |
| 496 } | |
| 497 | |
| 498 void MediaFoundationVideoEncodeAccelerator::ReturnBitstreamBuffer( | |
| 499 std::unique_ptr<EncodeOutput> encode_output, | |
| 500 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> | |
| 501 buffer_ref) { | |
| 502 DVLOG(3) << __FUNCTION__; | |
| 503 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 504 | |
| 505 memcpy(buffer_ref->shm->memory(), encode_output->memory(), | |
| 506 encode_output->size()); | |
| 507 client_task_runner_->PostTask( | |
| 508 FROM_HERE, | |
| 509 base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, | |
| 510 encode_output->size(), encode_output->keyframe, | |
| 511 encode_output->capture_timestamp)); | |
| 512 } | |
| 513 | |
| 514 void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask( | |
| 515 uint32_t bitrate, | |
| 516 uint32_t framerate) { | |
| 517 DVLOG(3) << __FUNCTION__; | |
| 518 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 519 | |
| 520 frame_rate_ = framerate ? framerate : 1; | |
| 521 target_bitrate_ = bitrate ? bitrate : 1; | |
| 522 | |
| 523 VARIANT var; | |
| 524 var.vt = VT_UI4; | |
| 525 var.ulVal = target_bitrate_; | |
| 526 HRESULT hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); | |
| 527 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", ); | |
| 528 | |
| 529 base::win::ScopedComPtr<IMFMediaType> imf_output_media_type; | |
| 530 hr = MFCreateMediaType(imf_output_media_type.Receive()); | |
| 531 RETURN_ON_HR_FAILURE(hr, "Couldn't create output media type", ); | |
| 532 hr &= imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_); | |
|
grt (UTC plus 2)
2016/07/15 07:59:03
&= is a bitwise AND, not a logical AND. is this re
emircan
2016/07/16 06:56:37
Thanks for pointing out. Changing them to bitwise
| |
| 533 hr &= MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE, | |
| 534 frame_rate_, kMaxFrameRateDenominator); | |
| 535 RETURN_ON_HR_FAILURE(hr, "Couldn't set output type params", ); | |
| 536 } | |
| 537 | |
| 538 void MediaFoundationVideoEncodeAccelerator::DestroyTask() { | |
| 539 DVLOG(3) << __FUNCTION__; | |
| 540 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 541 | |
| 542 encoder_.Release(); | |
|
grt (UTC plus 2)
2016/07/15 07:59:04
i think you should invalidate all weak ptrs here,
emircan
2016/07/16 06:56:37
Done.
| |
| 543 } | |
| 544 | |
| 545 } // namespace content | |
| OLD | NEW |