OLD | NEW |
(Empty) | |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "media/gpu/media_foundation_video_encode_accelerator_win.h" |
| 6 |
| 7 #pragma warning(push) |
| 8 #pragma warning(disable : 4800) // Disable warning for added padding. |
| 9 |
| 10 #include <codecapi.h> |
| 11 #include <mferror.h> |
| 12 #include <mftransform.h> |
| 13 |
| 14 #include <utility> |
| 15 #include <vector> |
| 16 |
| 17 #include "base/threading/sequenced_task_runner_handle.h" |
| 18 #include "base/win/scoped_co_mem.h" |
| 19 #include "base/win/scoped_variant.h" |
| 20 #include "base/win/windows_version.h" |
| 21 #include "media/base/win/mf_helpers.h" |
| 22 #include "media/base/win/mf_initializer.h" |
| 23 #include "third_party/libyuv/include/libyuv.h" |
| 24 |
| 25 using base::win::ScopedComPtr; |
| 26 using media::mf::MediaBufferScopedPointer; |
| 27 |
| 28 namespace media { |
| 29 |
| 30 namespace { |
| 31 |
| 32 const size_t kMaxFrameRateNumerator = 30; |
| 33 const size_t kMaxFrameRateDenominator = 1; |
| 34 const size_t kMaxResolutionWidth = 4096; |
| 35 const size_t kMaxResolutionHeight = 2160; |
| 36 const size_t kNumInputBuffers = 3; |
| 37 const size_t kOneSecondInMicroseconds = 1000000; |
| 38 const size_t kOutputSampleBufferSizeRatio = 4; |
| 39 |
| 40 constexpr const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = { |
| 41 L"mf.dll", L"mfplat.dll", |
| 42 }; |
| 43 |
| 44 } // namespace |
| 45 |
| 46 class MediaFoundationVideoEncodeAccelerator::EncodeOutput { |
| 47 public: |
| 48 EncodeOutput(uint32_t size, bool key_frame, base::TimeDelta timestamp) |
| 49 : keyframe(key_frame), capture_timestamp(timestamp), data_(size) {} |
| 50 |
| 51 uint8_t* memory() { return data_.data(); } |
| 52 |
| 53 int size() const { return static_cast<int>(data_.size()); } |
| 54 |
| 55 const bool keyframe; |
| 56 const base::TimeDelta capture_timestamp; |
| 57 |
| 58 private: |
| 59 std::vector<uint8_t> data_; |
| 60 |
| 61 DISALLOW_COPY_AND_ASSIGN(EncodeOutput); |
| 62 }; |
| 63 |
| 64 struct MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef { |
| 65 BitstreamBufferRef(int32_t id, |
| 66 std::unique_ptr<base::SharedMemory> shm, |
| 67 size_t size) |
| 68 : id(id), shm(std::move(shm)), size(size) {} |
| 69 const int32_t id; |
| 70 const std::unique_ptr<base::SharedMemory> shm; |
| 71 const size_t size; |
| 72 |
| 73 private: |
| 74 DISALLOW_IMPLICIT_CONSTRUCTORS(BitstreamBufferRef); |
| 75 }; |
| 76 |
| 77 MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator() |
| 78 : client_task_runner_(base::SequencedTaskRunnerHandle::Get()), |
| 79 encoder_thread_("MFEncoderThread"), |
| 80 encoder_task_weak_factory_(this) {} |
| 81 |
| 82 MediaFoundationVideoEncodeAccelerator:: |
| 83 ~MediaFoundationVideoEncodeAccelerator() { |
| 84 DVLOG(3) << __FUNCTION__; |
| 85 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 86 |
| 87 DCHECK(!encoder_thread_.IsRunning()); |
| 88 DCHECK(!encoder_task_weak_factory_.HasWeakPtrs()); |
| 89 } |
| 90 |
| 91 VideoEncodeAccelerator::SupportedProfiles |
| 92 MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() { |
| 93 DVLOG(3) << __FUNCTION__; |
| 94 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 95 |
| 96 SupportedProfiles profiles; |
| 97 if (base::win::GetVersion() < base::win::VERSION_WIN8) { |
| 98 DLOG(ERROR) << "Windows versions earlier than 8 are not supported."; |
| 99 return profiles; |
| 100 } |
| 101 |
| 102 SupportedProfile profile; |
| 103 // More profiles can be supported here, but they should be available in SW |
| 104 // fallback as well. |
| 105 profile.profile = H264PROFILE_BASELINE; |
| 106 profile.max_framerate_numerator = kMaxFrameRateNumerator; |
| 107 profile.max_framerate_denominator = kMaxFrameRateDenominator; |
| 108 profile.max_resolution = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight); |
| 109 profiles.push_back(profile); |
| 110 return profiles; |
| 111 } |
| 112 |
| 113 bool MediaFoundationVideoEncodeAccelerator::Initialize( |
| 114 VideoPixelFormat format, |
| 115 const gfx::Size& input_visible_size, |
| 116 VideoCodecProfile output_profile, |
| 117 uint32_t initial_bitrate, |
| 118 Client* client) { |
| 119 DVLOG(3) << __FUNCTION__ |
| 120 << ": input_format=" << VideoPixelFormatToString(format) |
| 121 << ", input_visible_size=" << input_visible_size.ToString() |
| 122 << ", output_profile=" << output_profile |
| 123 << ", initial_bitrate=" << initial_bitrate; |
| 124 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 125 |
| 126 if (PIXEL_FORMAT_I420 != format) { |
| 127 DLOG(ERROR) << "Input format not supported= " |
| 128 << VideoPixelFormatToString(format); |
| 129 return false; |
| 130 } |
| 131 |
| 132 if (H264PROFILE_BASELINE != output_profile) { |
| 133 DLOG(ERROR) << "Output profile not supported= " << output_profile; |
| 134 return false; |
| 135 } |
| 136 |
| 137 for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs) { |
| 138 if (!::GetModuleHandle(mfdll)) { |
| 139 DLOG(ERROR) << mfdll << " is required for encoding"; |
| 140 return false; |
| 141 } |
| 142 } |
| 143 |
| 144 encoder_thread_.init_com_with_mta(false); |
| 145 if (!encoder_thread_.Start()) { |
| 146 DLOG(ERROR) << "Failed spawning encoder thread."; |
| 147 return false; |
| 148 } |
| 149 encoder_thread_task_runner_ = encoder_thread_.task_runner(); |
| 150 |
| 151 InitializeMediaFoundation(); |
| 152 |
| 153 uint32_t flags = MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER; |
| 154 MFT_REGISTER_TYPE_INFO input_info; |
| 155 input_info.guidMajorType = MFMediaType_Video; |
| 156 input_info.guidSubtype = MFVideoFormat_NV12; |
| 157 MFT_REGISTER_TYPE_INFO output_info; |
| 158 output_info.guidMajorType = MFMediaType_Video; |
| 159 output_info.guidSubtype = MFVideoFormat_H264; |
| 160 |
| 161 base::win::ScopedCoMem<CLSID> CLSIDs; |
| 162 uint32_t count = 0; |
| 163 HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, NULL, &output_info, |
| 164 NULL, &CLSIDs, &count); |
| 165 RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false); |
| 166 RETURN_ON_FAILURE((count > 0), "No HW encoder found", false); |
| 167 DVLOG(3) << "HW encoder(s) found: " << count; |
| 168 hr = encoder_.CreateInstance(CLSIDs[0]); |
| 169 RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false); |
| 170 |
| 171 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); |
| 172 client_ = client_ptr_factory_->GetWeakPtr(); |
| 173 input_visible_size_ = input_visible_size; |
| 174 frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator; |
| 175 target_bitrate_ = initial_bitrate; |
| 176 bitstream_buffer_size_ = input_visible_size.GetArea(); |
| 177 |
| 178 u_plane_offset_ = |
| 179 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane, |
| 180 input_visible_size_) |
| 181 .GetArea(); |
| 182 v_plane_offset_ = |
| 183 u_plane_offset_ + |
| 184 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane, |
| 185 input_visible_size_) |
| 186 .GetArea(); |
| 187 |
| 188 if (!InitializeInputOutputSamples()) { |
| 189 DLOG(ERROR) << "Failed initializing input-output samples."; |
| 190 return false; |
| 191 } |
| 192 |
| 193 if (!SetEncoderModes()) { |
| 194 DLOG(ERROR) << "Failed setting encoder parameters."; |
| 195 return false; |
| 196 } |
| 197 |
| 198 hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL); |
| 199 RETURN_ON_HR_FAILURE(hr, "Couldn't set ProcessMessage", false); |
| 200 |
| 201 client_task_runner_->PostTask( |
| 202 FROM_HERE, |
| 203 base::Bind(&Client::RequireBitstreamBuffers, client_, kNumInputBuffers, |
| 204 input_visible_size_, bitstream_buffer_size_)); |
| 205 return SUCCEEDED(hr); |
| 206 } |
| 207 |
| 208 void MediaFoundationVideoEncodeAccelerator::Encode( |
| 209 const scoped_refptr<VideoFrame>& frame, |
| 210 bool force_keyframe) { |
| 211 DVLOG(3) << __FUNCTION__; |
| 212 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 213 |
| 214 encoder_thread_task_runner_->PostTask( |
| 215 FROM_HERE, base::Bind(&MediaFoundationVideoEncodeAccelerator::EncodeTask, |
| 216 encoder_task_weak_factory_.GetWeakPtr(), frame, |
| 217 force_keyframe)); |
| 218 } |
| 219 |
| 220 void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBuffer( |
| 221 const BitstreamBuffer& buffer) { |
| 222 DVLOG(3) << __FUNCTION__ << ": buffer size=" << buffer.size(); |
| 223 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 224 |
| 225 if (buffer.size() < bitstream_buffer_size_) { |
| 226 DLOG(ERROR) << "Output BitstreamBuffer isn't big enough: " << buffer.size() |
| 227 << " vs. " << bitstream_buffer_size_; |
| 228 client_->NotifyError(kInvalidArgumentError); |
| 229 return; |
| 230 } |
| 231 |
| 232 std::unique_ptr<base::SharedMemory> shm( |
| 233 new base::SharedMemory(buffer.handle(), false)); |
| 234 if (!shm->Map(buffer.size())) { |
| 235 DLOG(ERROR) << "Failed mapping shared memory."; |
| 236 client_->NotifyError(kPlatformFailureError); |
| 237 return; |
| 238 } |
| 239 |
| 240 std::unique_ptr<BitstreamBufferRef> buffer_ref( |
| 241 new BitstreamBufferRef(buffer.id(), std::move(shm), buffer.size())); |
| 242 encoder_thread_task_runner_->PostTask( |
| 243 FROM_HERE, |
| 244 base::Bind( |
| 245 &MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask, |
| 246 encoder_task_weak_factory_.GetWeakPtr(), base::Passed(&buffer_ref))); |
| 247 } |
| 248 |
| 249 void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChange( |
| 250 uint32_t bitrate, |
| 251 uint32_t framerate) { |
| 252 DVLOG(3) << __FUNCTION__ << ": bitrate=" << bitrate |
| 253 << ": framerate=" << framerate; |
| 254 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 255 |
| 256 encoder_thread_task_runner_->PostTask( |
| 257 FROM_HERE, |
| 258 base::Bind(&MediaFoundationVideoEncodeAccelerator:: |
| 259 RequestEncodingParametersChangeTask, |
| 260 encoder_task_weak_factory_.GetWeakPtr(), bitrate, framerate)); |
| 261 } |
| 262 |
| 263 void MediaFoundationVideoEncodeAccelerator::Destroy() { |
| 264 DVLOG(3) << __FUNCTION__; |
| 265 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 266 |
| 267 // Cancel all callbacks. |
| 268 client_ptr_factory_.reset(); |
| 269 |
| 270 if (encoder_thread_.IsRunning()) { |
| 271 encoder_thread_task_runner_->PostTask( |
| 272 FROM_HERE, |
| 273 base::Bind(&MediaFoundationVideoEncodeAccelerator::DestroyTask, |
| 274 encoder_task_weak_factory_.GetWeakPtr())); |
| 275 encoder_thread_.Stop(); |
| 276 } |
| 277 |
| 278 delete this; |
| 279 } |
| 280 |
| 281 // static |
| 282 void MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization() { |
| 283 for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs) |
| 284 ::LoadLibrary(mfdll); |
| 285 } |
| 286 |
| 287 bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() { |
| 288 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 289 |
| 290 HRESULT hr = encoder_->GetStreamLimits( |
| 291 &input_stream_count_min_, &input_stream_count_max_, |
| 292 &output_stream_count_min_, &output_stream_count_max_); |
| 293 RETURN_ON_HR_FAILURE(hr, "Couldn't query stream limits", false); |
| 294 DVLOG(3) << "Stream limits: " << input_stream_count_min_ << "," |
| 295 << input_stream_count_max_ << "," << output_stream_count_min_ << "," |
| 296 << output_stream_count_max_; |
| 297 |
| 298 // Initialize output parameters. |
| 299 base::win::ScopedComPtr<IMFMediaType> imf_output_media_type; |
| 300 hr = MFCreateMediaType(imf_output_media_type.Receive()); |
| 301 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); |
| 302 hr = imf_output_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
| 303 RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); |
| 304 hr = imf_output_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); |
| 305 RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); |
| 306 hr = imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_); |
| 307 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); |
| 308 hr = MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE, |
| 309 frame_rate_, kMaxFrameRateDenominator); |
| 310 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate", false); |
| 311 hr = MFSetAttributeSize(imf_output_media_type.get(), MF_MT_FRAME_SIZE, |
| 312 input_visible_size_.width(), |
| 313 input_visible_size_.height()); |
| 314 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); |
| 315 hr = imf_output_media_type->SetUINT32(MF_MT_INTERLACE_MODE, |
| 316 MFVideoInterlace_Progressive); |
| 317 RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false); |
| 318 hr = imf_output_media_type->SetUINT32(MF_MT_MPEG2_PROFILE, |
| 319 eAVEncH264VProfile_Base); |
| 320 RETURN_ON_HR_FAILURE(hr, "Couldn't set codec profile", false); |
| 321 hr = encoder_->SetOutputType(0, imf_output_media_type.get(), 0); |
| 322 RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false); |
| 323 |
| 324 // Initialize input parameters. |
| 325 base::win::ScopedComPtr<IMFMediaType> imf_input_media_type; |
| 326 hr = MFCreateMediaType(imf_input_media_type.Receive()); |
| 327 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); |
| 328 hr = imf_input_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
| 329 RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); |
| 330 hr = imf_input_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12); |
| 331 RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); |
| 332 hr = MFSetAttributeRatio(imf_input_media_type.get(), MF_MT_FRAME_RATE, |
| 333 frame_rate_, kMaxFrameRateDenominator); |
| 334 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate", false); |
| 335 hr = MFSetAttributeSize(imf_input_media_type.get(), MF_MT_FRAME_SIZE, |
| 336 input_visible_size_.width(), |
| 337 input_visible_size_.height()); |
| 338 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); |
| 339 hr = imf_input_media_type->SetUINT32(MF_MT_INTERLACE_MODE, |
| 340 MFVideoInterlace_Progressive); |
| 341 RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false); |
| 342 hr = encoder_->SetInputType(0, imf_input_media_type.get(), 0); |
| 343 RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false); |
| 344 |
| 345 input_sample_.Attach(mf::CreateEmptySampleWithBuffer( |
| 346 VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_visible_size_), 2)); |
| 347 output_sample_.Attach(mf::CreateEmptySampleWithBuffer( |
| 348 bitstream_buffer_size_ * kOutputSampleBufferSizeRatio, 2)); |
| 349 |
| 350 return SUCCEEDED(hr); |
| 351 } |
| 352 |
| 353 bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() { |
| 354 DCHECK(sequence_checker_.CalledOnValidSequencedThread()); |
| 355 |
| 356 HRESULT hr = encoder_.QueryInterface(IID_ICodecAPI, codec_api_.ReceiveVoid()); |
| 357 RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false); |
| 358 VARIANT var; |
| 359 var.vt = VT_UI4; |
| 360 var.ulVal = eAVEncCommonRateControlMode_CBR; |
| 361 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var); |
| 362 RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false); |
| 363 var.ulVal = target_bitrate_; |
| 364 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); |
| 365 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); |
| 366 var.ulVal = eAVEncAdaptiveMode_FrameRate; |
| 367 hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var); |
| 368 RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false); |
| 369 var.vt = VT_BOOL; |
| 370 var.boolVal = VARIANT_TRUE; |
| 371 hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var); |
| 372 RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false); |
| 373 return SUCCEEDED(hr); |
| 374 } |
| 375 |
| 376 void MediaFoundationVideoEncodeAccelerator::NotifyError( |
| 377 VideoEncodeAccelerator::Error error) { |
| 378 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 379 client_task_runner_->PostTask( |
| 380 FROM_HERE, base::Bind(&Client::NotifyError, client_, error)); |
| 381 } |
| 382 |
| 383 void MediaFoundationVideoEncodeAccelerator::EncodeTask( |
| 384 const scoped_refptr<VideoFrame>& frame, |
| 385 bool force_keyframe) { |
| 386 DVLOG(3) << __FUNCTION__; |
| 387 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 388 |
| 389 if (!encoder_) |
| 390 return; |
| 391 |
| 392 base::win::ScopedComPtr<IMFMediaBuffer> input_buffer; |
| 393 input_sample_->GetBufferByIndex(0, input_buffer.Receive()); |
| 394 |
| 395 { |
| 396 MediaBufferScopedPointer scoped_buffer(input_buffer.get()); |
| 397 DCHECK(scoped_buffer.get()); |
| 398 libyuv::I420Copy(frame->visible_data(VideoFrame::kYPlane), |
| 399 frame->stride(VideoFrame::kYPlane), |
| 400 frame->visible_data(VideoFrame::kVPlane), |
| 401 frame->stride(VideoFrame::kVPlane), |
| 402 frame->visible_data(VideoFrame::kUPlane), |
| 403 frame->stride(VideoFrame::kUPlane), scoped_buffer.get(), |
| 404 frame->stride(VideoFrame::kYPlane), |
| 405 scoped_buffer.get() + u_plane_offset_, |
| 406 frame->stride(VideoFrame::kUPlane), |
| 407 scoped_buffer.get() + v_plane_offset_, |
| 408 frame->stride(VideoFrame::kVPlane), |
| 409 input_visible_size_.width(), input_visible_size_.height()); |
| 410 } |
| 411 |
| 412 input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() * 10); |
| 413 input_sample_->SetSampleDuration(kOneSecondInMicroseconds / frame_rate_); |
| 414 HRESULT hr = encoder_->ProcessInput(0, input_sample_.get(), 0); |
| 415 // According to MSDN, if encoder returns MF_E_NOTACCEPTING, we need to try |
| 416 // processing the output. This error indicates that encoder does not accept |
| 417 // any more input data. |
| 418 if (hr == MF_E_NOTACCEPTING) { |
| 419 DVLOG(3) << "MF_E_NOTACCEPTING"; |
| 420 ProcessOutput(); |
| 421 hr = encoder_->ProcessInput(0, input_sample_.get(), 0); |
| 422 if (hr == MF_E_NOTACCEPTING) { |
| 423 encoder_thread_task_runner_->PostTask( |
| 424 FROM_HERE, |
| 425 base::Bind(&MediaFoundationVideoEncodeAccelerator::EncodeTask, |
| 426 encoder_task_weak_factory_.GetWeakPtr(), frame, |
| 427 force_keyframe)); |
| 428 } else if (!SUCCEEDED(hr)) { |
| 429 NotifyError(kPlatformFailureError); |
| 430 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); |
| 431 } |
| 432 } else if (!SUCCEEDED(hr)) { |
| 433 NotifyError(kPlatformFailureError); |
| 434 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); |
| 435 } |
| 436 DVLOG(3) << "Sent for encode " << hr; |
| 437 |
| 438 ProcessOutput(); |
| 439 } |
| 440 |
| 441 void MediaFoundationVideoEncodeAccelerator::ProcessOutput() { |
| 442 DVLOG(3) << __FUNCTION__; |
| 443 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 444 |
| 445 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; |
| 446 output_data_buffer.dwStreamID = 0; |
| 447 output_data_buffer.dwStatus = 0; |
| 448 output_data_buffer.pEvents = NULL; |
| 449 output_data_buffer.pSample = output_sample_.get(); |
| 450 DWORD status = 0; |
| 451 HRESULT hr = encoder_->ProcessOutput(0, 1, &output_data_buffer, &status); |
| 452 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { |
| 453 DVLOG(3) << "MF_E_TRANSFORM_NEED_MORE_INPUT"; |
| 454 return; |
| 455 } |
| 456 RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", ); |
| 457 DVLOG(3) << "Got encoded data " << hr; |
| 458 |
| 459 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
| 460 hr = output_sample_->GetBufferByIndex(0, output_buffer.Receive()); |
| 461 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer by index", ); |
| 462 DWORD size = 0; |
| 463 hr = output_buffer->GetCurrentLength(&size); |
| 464 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer length", ); |
| 465 |
| 466 const bool keyframe = MFGetAttributeUINT32( |
| 467 output_sample_.get(), MFSampleExtension_CleanPoint, false); |
| 468 DVLOG(3) << "We HAVE encoded data with size:" << size << " keyframe " |
| 469 << keyframe; |
| 470 |
| 471 if (bitstream_buffer_queue_.empty()) { |
| 472 DVLOG(3) << "No bitstream buffers."; |
| 473 // We need to copy the output so that encoding can continue. |
| 474 std::unique_ptr<EncodeOutput> encode_output( |
| 475 new EncodeOutput(size, keyframe, base::Time::Now() - base::Time())); |
| 476 { |
| 477 MediaBufferScopedPointer scoped_buffer(output_buffer.get()); |
| 478 memcpy(encode_output->memory(), scoped_buffer.get(), size); |
| 479 } |
| 480 encoder_output_queue_.push_back(std::move(encode_output)); |
| 481 return; |
| 482 } |
| 483 |
| 484 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> |
| 485 buffer_ref = std::move(bitstream_buffer_queue_.front()); |
| 486 bitstream_buffer_queue_.pop_front(); |
| 487 |
| 488 { |
| 489 MediaBufferScopedPointer scoped_buffer(output_buffer.get()); |
| 490 memcpy(buffer_ref->shm->memory(), scoped_buffer.get(), size); |
| 491 } |
| 492 |
| 493 client_task_runner_->PostTask( |
| 494 FROM_HERE, |
| 495 base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, size, |
| 496 keyframe, base::Time::Now() - base::Time())); |
| 497 |
| 498 // Keep calling ProcessOutput recursively until MF_E_TRANSFORM_NEED_MORE_INPUT |
| 499 // is returned to flush out all the output. |
| 500 ProcessOutput(); |
| 501 } |
| 502 |
| 503 void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask( |
| 504 std::unique_ptr<BitstreamBufferRef> buffer_ref) { |
| 505 DVLOG(3) << __FUNCTION__; |
| 506 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 507 |
| 508 // If there is already EncodeOutput waiting, copy its output first. |
| 509 if (!encoder_output_queue_.empty()) { |
| 510 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::EncodeOutput> |
| 511 encode_output = std::move(encoder_output_queue_.front()); |
| 512 encoder_output_queue_.pop_front(); |
| 513 ReturnBitstreamBuffer(std::move(encode_output), std::move(buffer_ref)); |
| 514 return; |
| 515 } |
| 516 |
| 517 bitstream_buffer_queue_.push_back(std::move(buffer_ref)); |
| 518 } |
| 519 |
| 520 void MediaFoundationVideoEncodeAccelerator::ReturnBitstreamBuffer( |
| 521 std::unique_ptr<EncodeOutput> encode_output, |
| 522 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> |
| 523 buffer_ref) { |
| 524 DVLOG(3) << __FUNCTION__; |
| 525 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 526 |
| 527 memcpy(buffer_ref->shm->memory(), encode_output->memory(), |
| 528 encode_output->size()); |
| 529 client_task_runner_->PostTask( |
| 530 FROM_HERE, |
| 531 base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, |
| 532 encode_output->size(), encode_output->keyframe, |
| 533 encode_output->capture_timestamp)); |
| 534 } |
| 535 |
| 536 void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask( |
| 537 uint32_t bitrate, |
| 538 uint32_t framerate) { |
| 539 DVLOG(3) << __FUNCTION__; |
| 540 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 541 |
| 542 frame_rate_ = framerate ? framerate : 1; |
| 543 target_bitrate_ = bitrate ? bitrate : 1; |
| 544 |
| 545 VARIANT var; |
| 546 var.vt = VT_UI4; |
| 547 var.ulVal = target_bitrate_; |
| 548 HRESULT hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); |
| 549 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", ); |
| 550 |
| 551 base::win::ScopedComPtr<IMFMediaType> imf_output_media_type; |
| 552 hr = MFCreateMediaType(imf_output_media_type.Receive()); |
| 553 RETURN_ON_HR_FAILURE(hr, "Couldn't create output media type", ); |
| 554 hr = imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_); |
| 555 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", ); |
| 556 hr = MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE, |
| 557 frame_rate_, kMaxFrameRateDenominator); |
| 558 RETURN_ON_HR_FAILURE(hr, "Couldn't set output type params", ); |
| 559 } |
| 560 |
| 561 void MediaFoundationVideoEncodeAccelerator::DestroyTask() { |
| 562 DVLOG(3) << __FUNCTION__; |
| 563 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 564 |
| 565 // Cancel all encoder thread callbacks. |
| 566 encoder_task_weak_factory_.InvalidateWeakPtrs(); |
| 567 |
| 568 encoder_.Release(); |
| 569 } |
| 570 |
| 571 } // namespace content |
OLD | NEW |