| OLD | NEW |
| (Empty) |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "media/gpu/media_foundation_video_encode_accelerator_win.h" | |
| 6 | |
| 7 #pragma warning(push) | |
| 8 #pragma warning(disable : 4800) // Disable warning for added padding. | |
| 9 | |
| 10 #include <codecapi.h> | |
| 11 #include <mferror.h> | |
| 12 #include <mftransform.h> | |
| 13 | |
| 14 #include <utility> | |
| 15 #include <vector> | |
| 16 | |
| 17 #include "base/threading/sequenced_task_runner_handle.h" | |
| 18 #include "base/win/scoped_co_mem.h" | |
| 19 #include "base/win/scoped_variant.h" | |
| 20 #include "base/win/windows_version.h" | |
| 21 #include "media/base/win/mf_helpers.h" | |
| 22 #include "media/base/win/mf_initializer.h" | |
| 23 #include "third_party/libyuv/include/libyuv.h" | |
| 24 | |
| 25 using base::win::ScopedComPtr; | |
| 26 using media::mf::MediaBufferScopedPointer; | |
| 27 | |
| 28 namespace media { | |
| 29 | |
| 30 namespace { | |
| 31 | |
| 32 const size_t kMaxFrameRateNumerator = 30; | |
| 33 const size_t kMaxFrameRateDenominator = 1; | |
| 34 const size_t kMaxResolutionWidth = 4096; | |
| 35 const size_t kMaxResolutionHeight = 2160; | |
| 36 const size_t kNumInputBuffers = 3; | |
| 37 const size_t kOneSecondInMicroseconds = 1000000; | |
| 38 const size_t kOutputSampleBufferSizeRatio = 4; | |
| 39 | |
| 40 constexpr const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = { | |
| 41 L"mf.dll", L"mfplat.dll", | |
| 42 }; | |
| 43 | |
| 44 } // namespace | |
| 45 | |
| 46 class MediaFoundationVideoEncodeAccelerator::EncodeOutput { | |
| 47 public: | |
| 48 EncodeOutput(uint32_t size, bool key_frame, base::TimeDelta timestamp) | |
| 49 : keyframe(key_frame), capture_timestamp(timestamp), data_(size) {} | |
| 50 | |
| 51 uint8_t* memory() { return data_.data(); } | |
| 52 | |
| 53 int size() const { return static_cast<int>(data_.size()); } | |
| 54 | |
| 55 const bool keyframe; | |
| 56 const base::TimeDelta capture_timestamp; | |
| 57 | |
| 58 private: | |
| 59 std::vector<uint8_t> data_; | |
| 60 | |
| 61 DISALLOW_COPY_AND_ASSIGN(EncodeOutput); | |
| 62 }; | |
| 63 | |
| 64 struct MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef { | |
| 65 BitstreamBufferRef(int32_t id, | |
| 66 std::unique_ptr<base::SharedMemory> shm, | |
| 67 size_t size) | |
| 68 : id(id), shm(std::move(shm)), size(size) {} | |
| 69 const int32_t id; | |
| 70 const std::unique_ptr<base::SharedMemory> shm; | |
| 71 const size_t size; | |
| 72 | |
| 73 private: | |
| 74 DISALLOW_IMPLICIT_CONSTRUCTORS(BitstreamBufferRef); | |
| 75 }; | |
| 76 | |
| 77 MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator() | |
| 78 : client_task_runner_(base::SequencedTaskRunnerHandle::Get()), | |
| 79 encoder_thread_("MFEncoderThread"), | |
| 80 encoder_task_weak_factory_(this) {} | |
| 81 | |
| 82 MediaFoundationVideoEncodeAccelerator:: | |
| 83 ~MediaFoundationVideoEncodeAccelerator() { | |
| 84 DVLOG(3) << __FUNCTION__; | |
| 85 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 86 | |
| 87 DCHECK(!encoder_thread_.IsRunning()); | |
| 88 DCHECK(!encoder_task_weak_factory_.HasWeakPtrs()); | |
| 89 } | |
| 90 | |
| 91 VideoEncodeAccelerator::SupportedProfiles | |
| 92 MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() { | |
| 93 DVLOG(3) << __FUNCTION__; | |
| 94 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 95 | |
| 96 SupportedProfiles profiles; | |
| 97 if (base::win::GetVersion() < base::win::VERSION_WIN8) { | |
| 98 DLOG(ERROR) << "Windows versions earlier than 8 are not supported."; | |
| 99 return profiles; | |
| 100 } | |
| 101 | |
| 102 SupportedProfile profile; | |
| 103 // More profiles can be supported here, but they should be available in SW | |
| 104 // fallback as well. | |
| 105 profile.profile = H264PROFILE_BASELINE; | |
| 106 profile.max_framerate_numerator = kMaxFrameRateNumerator; | |
| 107 profile.max_framerate_denominator = kMaxFrameRateDenominator; | |
| 108 profile.max_resolution = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight); | |
| 109 profiles.push_back(profile); | |
| 110 return profiles; | |
| 111 } | |
| 112 | |
| 113 bool MediaFoundationVideoEncodeAccelerator::Initialize( | |
| 114 VideoPixelFormat format, | |
| 115 const gfx::Size& input_visible_size, | |
| 116 VideoCodecProfile output_profile, | |
| 117 uint32_t initial_bitrate, | |
| 118 Client* client) { | |
| 119 DVLOG(3) << __FUNCTION__ | |
| 120 << ": input_format=" << VideoPixelFormatToString(format) | |
| 121 << ", input_visible_size=" << input_visible_size.ToString() | |
| 122 << ", output_profile=" << output_profile | |
| 123 << ", initial_bitrate=" << initial_bitrate; | |
| 124 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 125 | |
| 126 if (PIXEL_FORMAT_I420 != format) { | |
| 127 DLOG(ERROR) << "Input format not supported= " | |
| 128 << VideoPixelFormatToString(format); | |
| 129 return false; | |
| 130 } | |
| 131 | |
| 132 if (H264PROFILE_BASELINE != output_profile) { | |
| 133 DLOG(ERROR) << "Output profile not supported= " << output_profile; | |
| 134 return false; | |
| 135 } | |
| 136 | |
| 137 for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs) { | |
| 138 if (!::GetModuleHandle(mfdll)) { | |
| 139 DLOG(ERROR) << mfdll << " is required for encoding"; | |
| 140 return false; | |
| 141 } | |
| 142 } | |
| 143 | |
| 144 encoder_thread_.init_com_with_mta(false); | |
| 145 if (!encoder_thread_.Start()) { | |
| 146 DLOG(ERROR) << "Failed spawning encoder thread."; | |
| 147 return false; | |
| 148 } | |
| 149 encoder_thread_task_runner_ = encoder_thread_.task_runner(); | |
| 150 | |
| 151 InitializeMediaFoundation(); | |
| 152 | |
| 153 uint32_t flags = MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER; | |
| 154 MFT_REGISTER_TYPE_INFO input_info; | |
| 155 input_info.guidMajorType = MFMediaType_Video; | |
| 156 input_info.guidSubtype = MFVideoFormat_NV12; | |
| 157 MFT_REGISTER_TYPE_INFO output_info; | |
| 158 output_info.guidMajorType = MFMediaType_Video; | |
| 159 output_info.guidSubtype = MFVideoFormat_H264; | |
| 160 | |
| 161 base::win::ScopedCoMem<CLSID> CLSIDs; | |
| 162 uint32_t count = 0; | |
| 163 HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, NULL, &output_info, | |
| 164 NULL, &CLSIDs, &count); | |
| 165 RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false); | |
| 166 RETURN_ON_FAILURE((count > 0), "No HW encoder found", false); | |
| 167 DVLOG(3) << "HW encoder(s) found: " << count; | |
| 168 hr = encoder_.CreateInstance(CLSIDs[0]); | |
| 169 RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false); | |
| 170 | |
| 171 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | |
| 172 client_ = client_ptr_factory_->GetWeakPtr(); | |
| 173 input_visible_size_ = input_visible_size; | |
| 174 frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator; | |
| 175 target_bitrate_ = initial_bitrate; | |
| 176 bitstream_buffer_size_ = input_visible_size.GetArea(); | |
| 177 | |
| 178 u_plane_offset_ = | |
| 179 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane, | |
| 180 input_visible_size_) | |
| 181 .GetArea(); | |
| 182 v_plane_offset_ = | |
| 183 u_plane_offset_ + | |
| 184 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane, | |
| 185 input_visible_size_) | |
| 186 .GetArea(); | |
| 187 | |
| 188 if (!InitializeInputOutputSamples()) { | |
| 189 DLOG(ERROR) << "Failed initializing input-output samples."; | |
| 190 return false; | |
| 191 } | |
| 192 | |
| 193 if (!SetEncoderModes()) { | |
| 194 DLOG(ERROR) << "Failed setting encoder parameters."; | |
| 195 return false; | |
| 196 } | |
| 197 | |
| 198 hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL); | |
| 199 RETURN_ON_HR_FAILURE(hr, "Couldn't set ProcessMessage", false); | |
| 200 | |
| 201 client_task_runner_->PostTask( | |
| 202 FROM_HERE, | |
| 203 base::Bind(&Client::RequireBitstreamBuffers, client_, kNumInputBuffers, | |
| 204 input_visible_size_, bitstream_buffer_size_)); | |
| 205 return SUCCEEDED(hr); | |
| 206 } | |
| 207 | |
| 208 void MediaFoundationVideoEncodeAccelerator::Encode( | |
| 209 const scoped_refptr<VideoFrame>& frame, | |
| 210 bool force_keyframe) { | |
| 211 DVLOG(3) << __FUNCTION__; | |
| 212 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 213 | |
| 214 encoder_thread_task_runner_->PostTask( | |
| 215 FROM_HERE, base::Bind(&MediaFoundationVideoEncodeAccelerator::EncodeTask, | |
| 216 encoder_task_weak_factory_.GetWeakPtr(), frame, | |
| 217 force_keyframe)); | |
| 218 } | |
| 219 | |
| 220 void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
| 221 const BitstreamBuffer& buffer) { | |
| 222 DVLOG(3) << __FUNCTION__ << ": buffer size=" << buffer.size(); | |
| 223 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 224 | |
| 225 if (buffer.size() < bitstream_buffer_size_) { | |
| 226 DLOG(ERROR) << "Output BitstreamBuffer isn't big enough: " << buffer.size() | |
| 227 << " vs. " << bitstream_buffer_size_; | |
| 228 client_->NotifyError(kInvalidArgumentError); | |
| 229 return; | |
| 230 } | |
| 231 | |
| 232 std::unique_ptr<base::SharedMemory> shm( | |
| 233 new base::SharedMemory(buffer.handle(), false)); | |
| 234 if (!shm->Map(buffer.size())) { | |
| 235 DLOG(ERROR) << "Failed mapping shared memory."; | |
| 236 client_->NotifyError(kPlatformFailureError); | |
| 237 return; | |
| 238 } | |
| 239 | |
| 240 std::unique_ptr<BitstreamBufferRef> buffer_ref( | |
| 241 new BitstreamBufferRef(buffer.id(), std::move(shm), buffer.size())); | |
| 242 encoder_thread_task_runner_->PostTask( | |
| 243 FROM_HERE, | |
| 244 base::Bind( | |
| 245 &MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask, | |
| 246 encoder_task_weak_factory_.GetWeakPtr(), base::Passed(&buffer_ref))); | |
| 247 } | |
| 248 | |
| 249 void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChange( | |
| 250 uint32_t bitrate, | |
| 251 uint32_t framerate) { | |
| 252 DVLOG(3) << __FUNCTION__ << ": bitrate=" << bitrate | |
| 253 << ": framerate=" << framerate; | |
| 254 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 255 | |
| 256 encoder_thread_task_runner_->PostTask( | |
| 257 FROM_HERE, | |
| 258 base::Bind(&MediaFoundationVideoEncodeAccelerator:: | |
| 259 RequestEncodingParametersChangeTask, | |
| 260 encoder_task_weak_factory_.GetWeakPtr(), bitrate, framerate)); | |
| 261 } | |
| 262 | |
| 263 void MediaFoundationVideoEncodeAccelerator::Destroy() { | |
| 264 DVLOG(3) << __FUNCTION__; | |
| 265 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 266 | |
| 267 // Cancel all callbacks. | |
| 268 client_ptr_factory_.reset(); | |
| 269 | |
| 270 if (encoder_thread_.IsRunning()) { | |
| 271 encoder_thread_task_runner_->PostTask( | |
| 272 FROM_HERE, | |
| 273 base::Bind(&MediaFoundationVideoEncodeAccelerator::DestroyTask, | |
| 274 encoder_task_weak_factory_.GetWeakPtr())); | |
| 275 encoder_thread_.Stop(); | |
| 276 } | |
| 277 | |
| 278 delete this; | |
| 279 } | |
| 280 | |
| 281 // static | |
| 282 void MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization() { | |
| 283 for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs) | |
| 284 ::LoadLibrary(mfdll); | |
| 285 } | |
| 286 | |
| 287 bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() { | |
| 288 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 289 | |
| 290 HRESULT hr = encoder_->GetStreamLimits( | |
| 291 &input_stream_count_min_, &input_stream_count_max_, | |
| 292 &output_stream_count_min_, &output_stream_count_max_); | |
| 293 RETURN_ON_HR_FAILURE(hr, "Couldn't query stream limits", false); | |
| 294 DVLOG(3) << "Stream limits: " << input_stream_count_min_ << "," | |
| 295 << input_stream_count_max_ << "," << output_stream_count_min_ << "," | |
| 296 << output_stream_count_max_; | |
| 297 | |
| 298 // Initialize output parameters. | |
| 299 base::win::ScopedComPtr<IMFMediaType> imf_output_media_type; | |
| 300 hr = MFCreateMediaType(imf_output_media_type.Receive()); | |
| 301 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); | |
| 302 hr = imf_output_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
| 303 RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); | |
| 304 hr = imf_output_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); | |
| 305 RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); | |
| 306 hr = imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_); | |
| 307 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); | |
| 308 hr = MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE, | |
| 309 frame_rate_, kMaxFrameRateDenominator); | |
| 310 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate", false); | |
| 311 hr = MFSetAttributeSize(imf_output_media_type.get(), MF_MT_FRAME_SIZE, | |
| 312 input_visible_size_.width(), | |
| 313 input_visible_size_.height()); | |
| 314 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); | |
| 315 hr = imf_output_media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 316 MFVideoInterlace_Progressive); | |
| 317 RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false); | |
| 318 hr = imf_output_media_type->SetUINT32(MF_MT_MPEG2_PROFILE, | |
| 319 eAVEncH264VProfile_Base); | |
| 320 RETURN_ON_HR_FAILURE(hr, "Couldn't set codec profile", false); | |
| 321 hr = encoder_->SetOutputType(0, imf_output_media_type.get(), 0); | |
| 322 RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false); | |
| 323 | |
| 324 // Initialize input parameters. | |
| 325 base::win::ScopedComPtr<IMFMediaType> imf_input_media_type; | |
| 326 hr = MFCreateMediaType(imf_input_media_type.Receive()); | |
| 327 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); | |
| 328 hr = imf_input_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | |
| 329 RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); | |
| 330 hr = imf_input_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12); | |
| 331 RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); | |
| 332 hr = MFSetAttributeRatio(imf_input_media_type.get(), MF_MT_FRAME_RATE, | |
| 333 frame_rate_, kMaxFrameRateDenominator); | |
| 334 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate", false); | |
| 335 hr = MFSetAttributeSize(imf_input_media_type.get(), MF_MT_FRAME_SIZE, | |
| 336 input_visible_size_.width(), | |
| 337 input_visible_size_.height()); | |
| 338 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); | |
| 339 hr = imf_input_media_type->SetUINT32(MF_MT_INTERLACE_MODE, | |
| 340 MFVideoInterlace_Progressive); | |
| 341 RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false); | |
| 342 hr = encoder_->SetInputType(0, imf_input_media_type.get(), 0); | |
| 343 RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false); | |
| 344 | |
| 345 input_sample_.Attach(mf::CreateEmptySampleWithBuffer( | |
| 346 VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_visible_size_), 2)); | |
| 347 output_sample_.Attach(mf::CreateEmptySampleWithBuffer( | |
| 348 bitstream_buffer_size_ * kOutputSampleBufferSizeRatio, 2)); | |
| 349 | |
| 350 return SUCCEEDED(hr); | |
| 351 } | |
| 352 | |
| 353 bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() { | |
| 354 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); | |
| 355 | |
| 356 HRESULT hr = encoder_.QueryInterface(IID_ICodecAPI, codec_api_.ReceiveVoid()); | |
| 357 RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false); | |
| 358 VARIANT var; | |
| 359 var.vt = VT_UI4; | |
| 360 var.ulVal = eAVEncCommonRateControlMode_CBR; | |
| 361 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var); | |
| 362 RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false); | |
| 363 var.ulVal = target_bitrate_; | |
| 364 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); | |
| 365 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); | |
| 366 var.ulVal = eAVEncAdaptiveMode_FrameRate; | |
| 367 hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var); | |
| 368 RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false); | |
| 369 var.vt = VT_BOOL; | |
| 370 var.boolVal = VARIANT_TRUE; | |
| 371 hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var); | |
| 372 RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false); | |
| 373 return SUCCEEDED(hr); | |
| 374 } | |
| 375 | |
| 376 void MediaFoundationVideoEncodeAccelerator::NotifyError( | |
| 377 VideoEncodeAccelerator::Error error) { | |
| 378 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 379 client_task_runner_->PostTask( | |
| 380 FROM_HERE, base::Bind(&Client::NotifyError, client_, error)); | |
| 381 } | |
| 382 | |
| 383 void MediaFoundationVideoEncodeAccelerator::EncodeTask( | |
| 384 const scoped_refptr<VideoFrame>& frame, | |
| 385 bool force_keyframe) { | |
| 386 DVLOG(3) << __FUNCTION__; | |
| 387 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 388 | |
| 389 base::win::ScopedComPtr<IMFMediaBuffer> input_buffer; | |
| 390 input_sample_->GetBufferByIndex(0, input_buffer.Receive()); | |
| 391 | |
| 392 { | |
| 393 MediaBufferScopedPointer scoped_buffer(input_buffer.get()); | |
| 394 DCHECK(scoped_buffer.get()); | |
| 395 libyuv::I420Copy(frame->visible_data(VideoFrame::kYPlane), | |
| 396 frame->stride(VideoFrame::kYPlane), | |
| 397 frame->visible_data(VideoFrame::kVPlane), | |
| 398 frame->stride(VideoFrame::kVPlane), | |
| 399 frame->visible_data(VideoFrame::kUPlane), | |
| 400 frame->stride(VideoFrame::kUPlane), scoped_buffer.get(), | |
| 401 frame->stride(VideoFrame::kYPlane), | |
| 402 scoped_buffer.get() + u_plane_offset_, | |
| 403 frame->stride(VideoFrame::kUPlane), | |
| 404 scoped_buffer.get() + v_plane_offset_, | |
| 405 frame->stride(VideoFrame::kVPlane), | |
| 406 input_visible_size_.width(), input_visible_size_.height()); | |
| 407 } | |
| 408 | |
| 409 input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() * 10); | |
| 410 input_sample_->SetSampleDuration(kOneSecondInMicroseconds / frame_rate_); | |
| 411 HRESULT hr = encoder_->ProcessInput(0, input_sample_.get(), 0); | |
| 412 // According to MSDN, if encoder returns MF_E_NOTACCEPTING, we need to try | |
| 413 // processing the output. This error indicates that encoder does not accept | |
| 414 // any more input data. | |
| 415 if (hr == MF_E_NOTACCEPTING) { | |
| 416 DVLOG(3) << "MF_E_NOTACCEPTING"; | |
| 417 ProcessOutput(); | |
| 418 hr = encoder_->ProcessInput(0, input_sample_.get(), 0); | |
| 419 if (!SUCCEEDED(hr)) { | |
| 420 NotifyError(kPlatformFailureError); | |
| 421 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); | |
| 422 } | |
| 423 } else if (!SUCCEEDED(hr)) { | |
| 424 NotifyError(kPlatformFailureError); | |
| 425 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); | |
| 426 } | |
| 427 DVLOG(3) << "Sent for encode " << hr; | |
| 428 | |
| 429 ProcessOutput(); | |
| 430 } | |
| 431 | |
| 432 void MediaFoundationVideoEncodeAccelerator::ProcessOutput() { | |
| 433 DVLOG(3) << __FUNCTION__; | |
| 434 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 435 | |
| 436 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; | |
| 437 output_data_buffer.dwStreamID = 0; | |
| 438 output_data_buffer.dwStatus = 0; | |
| 439 output_data_buffer.pEvents = NULL; | |
| 440 output_data_buffer.pSample = output_sample_.get(); | |
| 441 DWORD status = 0; | |
| 442 HRESULT hr = encoder_->ProcessOutput(0, 1, &output_data_buffer, &status); | |
| 443 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { | |
| 444 DVLOG(3) << "MF_E_TRANSFORM_NEED_MORE_INPUT"; | |
| 445 return; | |
| 446 } | |
| 447 RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", ); | |
| 448 DVLOG(3) << "Got encoded data " << hr; | |
| 449 | |
| 450 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | |
| 451 hr = output_sample_->GetBufferByIndex(0, output_buffer.Receive()); | |
| 452 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer by index", ); | |
| 453 DWORD size = 0; | |
| 454 hr = output_buffer->GetCurrentLength(&size); | |
| 455 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer length", ); | |
| 456 | |
| 457 const bool keyframe = MFGetAttributeUINT32( | |
| 458 output_sample_.get(), MFSampleExtension_CleanPoint, false); | |
| 459 DVLOG(3) << "We HAVE encoded data with size:" << size << " keyframe " | |
| 460 << keyframe; | |
| 461 | |
| 462 if (bitstream_buffer_queue_.empty()) { | |
| 463 DVLOG(3) << "No bitstream buffers."; | |
| 464 // We need to copy the output so that encoding can continue. | |
| 465 std::unique_ptr<EncodeOutput> encode_output( | |
| 466 new EncodeOutput(size, keyframe, base::Time::Now() - base::Time())); | |
| 467 { | |
| 468 MediaBufferScopedPointer scoped_buffer(output_buffer.get()); | |
| 469 memcpy(encode_output->memory(), scoped_buffer.get(), size); | |
| 470 } | |
| 471 encoder_output_queue_.push_back(std::move(encode_output)); | |
| 472 return; | |
| 473 } | |
| 474 | |
| 475 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> | |
| 476 buffer_ref = std::move(bitstream_buffer_queue_.front()); | |
| 477 bitstream_buffer_queue_.pop_front(); | |
| 478 | |
| 479 { | |
| 480 MediaBufferScopedPointer scoped_buffer(output_buffer.get()); | |
| 481 memcpy(buffer_ref->shm->memory(), scoped_buffer.get(), size); | |
| 482 } | |
| 483 | |
| 484 client_task_runner_->PostTask( | |
| 485 FROM_HERE, | |
| 486 base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, size, | |
| 487 keyframe, base::Time::Now() - base::Time())); | |
| 488 | |
| 489 // Keep calling ProcessOutput recursively until MF_E_TRANSFORM_NEED_MORE_INPUT | |
| 490 // is returned to flush out all the output. | |
| 491 ProcessOutput(); | |
| 492 } | |
| 493 | |
| 494 void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask( | |
| 495 std::unique_ptr<BitstreamBufferRef> buffer_ref) { | |
| 496 DVLOG(3) << __FUNCTION__; | |
| 497 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 498 | |
| 499 // If there is already EncodeOutput waiting, copy its output first. | |
| 500 if (!encoder_output_queue_.empty()) { | |
| 501 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::EncodeOutput> | |
| 502 encode_output = std::move(encoder_output_queue_.front()); | |
| 503 encoder_output_queue_.pop_front(); | |
| 504 ReturnBitstreamBuffer(std::move(encode_output), std::move(buffer_ref)); | |
| 505 return; | |
| 506 } | |
| 507 | |
| 508 bitstream_buffer_queue_.push_back(std::move(buffer_ref)); | |
| 509 } | |
| 510 | |
| 511 void MediaFoundationVideoEncodeAccelerator::ReturnBitstreamBuffer( | |
| 512 std::unique_ptr<EncodeOutput> encode_output, | |
| 513 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> | |
| 514 buffer_ref) { | |
| 515 DVLOG(3) << __FUNCTION__; | |
| 516 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 517 | |
| 518 memcpy(buffer_ref->shm->memory(), encode_output->memory(), | |
| 519 encode_output->size()); | |
| 520 client_task_runner_->PostTask( | |
| 521 FROM_HERE, | |
| 522 base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, | |
| 523 encode_output->size(), encode_output->keyframe, | |
| 524 encode_output->capture_timestamp)); | |
| 525 } | |
| 526 | |
| 527 void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask( | |
| 528 uint32_t bitrate, | |
| 529 uint32_t framerate) { | |
| 530 DVLOG(3) << __FUNCTION__; | |
| 531 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 532 | |
| 533 frame_rate_ = framerate ? framerate : 1; | |
| 534 target_bitrate_ = bitrate ? bitrate : 1; | |
| 535 | |
| 536 VARIANT var; | |
| 537 var.vt = VT_UI4; | |
| 538 var.ulVal = target_bitrate_; | |
| 539 HRESULT hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); | |
| 540 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", ); | |
| 541 | |
| 542 base::win::ScopedComPtr<IMFMediaType> imf_output_media_type; | |
| 543 hr = MFCreateMediaType(imf_output_media_type.Receive()); | |
| 544 RETURN_ON_HR_FAILURE(hr, "Couldn't create output media type", ); | |
| 545 hr = imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_); | |
| 546 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", ); | |
| 547 hr = MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE, | |
| 548 frame_rate_, kMaxFrameRateDenominator); | |
| 549 RETURN_ON_HR_FAILURE(hr, "Couldn't set output type params", ); | |
| 550 } | |
| 551 | |
| 552 void MediaFoundationVideoEncodeAccelerator::DestroyTask() { | |
| 553 DVLOG(3) << __FUNCTION__; | |
| 554 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
| 555 | |
| 556 // Cancel all encoder thread callbacks. | |
| 557 encoder_task_weak_factory_.InvalidateWeakPtrs(); | |
| 558 | |
| 559 encoder_.Release(); | |
| 560 } | |
| 561 | |
| 562 } // namespace content | |
| OLD | NEW |