Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(141)

Side by Side Diff: media/gpu/media_foundation_video_encode_accelerator_win.cc

Issue 2058413003: H264 HW encode using MediaFoundation (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Rebase Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/gpu/media_foundation_video_encode_accelerator_win.h"
6
7 #if defined(OS_WIN)
8 #pragma warning(push)
9 #pragma warning(disable : 4800) // Disable warning for added padding.
10 #endif // !defined(OS_WIN)
11
12 #include <codecapi.h>
13 #include <mferror.h>
14 #include <mftransform.h>
15
16 #include "base/threading/thread_task_runner_handle.h"
17 #include "base/win/scoped_co_mem.h"
18 #include "base/win/scoped_variant.h"
19 #include "base/win/windows_version.h"
20 #include "media/base/win/mf_helpers.h"
21 #include "media/base/win/mf_initializer.h"
22 #include "third_party/libyuv/include/libyuv.h"
23
24 using base::win::ScopedComPtr;
25 using media::mf::MediaBufferScopedPointer;
26
27 namespace media {
28
29 namespace {
30
31 const size_t kMaxFrameRateNumerator = 30;
32 const size_t kMaxFrameRateDenominator = 1;
33 const size_t kMaxResolutionWidth = 4096;
34 const size_t kMaxResolutionHeight = 2160;
35 const size_t kNumInputBuffers = 3;
36 const size_t kOneSecondInMicroseconds = 1000000;
37 const size_t kOutputSampleBufferSizeRatio = 4;
38
39 static const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = {
40 L"mf.dll", L"mfplat.dll",
41 };
42
43 } // namespace
44
45 class MediaFoundationVideoEncodeAccelerator::EncodeOutput {
46 public:
47 EncodeOutput(int buffer_size, bool key_frame, base::TimeDelta timestamp)
grt (UTC plus 2) 2016/07/14 20:07:24 a DWORD is passed in as the size, so i suspect uin
emircan 2016/07/15 04:58:30 Done.
48 : size(buffer_size), keyframe(key_frame), capture_timestamp(timestamp) {
grt (UTC plus 2) 2016/07/14 20:07:25 , data_(size) and remove data_.resize(size); below
emircan 2016/07/15 04:58:31 Done.
49 data_.resize(size);
50 }
51
52 uint8_t* memory() { return data_.data(); }
53 const int size;
grt (UTC plus 2) 2016/07/14 20:07:25 this member is redundant. remove it and replace it
emircan 2016/07/15 04:58:31 Done.
54 const bool keyframe;
55 const base::TimeDelta capture_timestamp;
56
57 private:
58 std::vector<uint8_t> data_;
grt (UTC plus 2) 2016/07/14 20:07:25 #include <vector>
emircan 2016/07/15 04:58:30 Done.
59 DISALLOW_IMPLICIT_CONSTRUCTORS(EncodeOutput);
grt (UTC plus 2) 2016/07/14 20:07:25 shouldn't this be COPY_AND_ASSIGN since the implic
emircan 2016/07/15 04:58:30 Done. I initially had this as a struct, and forgot
60 };
61
62 struct MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef {
63 BitstreamBufferRef(int32_t id,
64 std::unique_ptr<base::SharedMemory> shm,
65 size_t size)
66 : id(id), shm(std::move(shm)), size(size) {}
67 const int32_t id;
68 const std::unique_ptr<base::SharedMemory> shm;
69 const size_t size;
70
71 private:
72 DISALLOW_IMPLICIT_CONSTRUCTORS(BitstreamBufferRef);
73 };
74
75 MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator()
76 : client_task_runner_(base::ThreadTaskRunnerHandle::Get()),
77 encoder_thread_("MFEncoderThread"),
78 encoder_task_weak_factory_(this) {
79 DVLOG(3) << __FUNCTION__;
80 encoder_weak_ptr_ = encoder_task_weak_factory_.GetWeakPtr();
grt (UTC plus 2) 2016/07/14 20:07:24 remove this member and call encoder_task_weak_fact
emircan 2016/07/15 04:58:30 Done.
81 }
82
83 MediaFoundationVideoEncodeAccelerator::
84 ~MediaFoundationVideoEncodeAccelerator() {
85 DVLOG(3) << __FUNCTION__;
86 DCHECK(thread_checker_.CalledOnValidThread());
87
88 Destroy();
89 DCHECK(!encoder_thread_.IsRunning());
90 DCHECK(!encoder_task_weak_factory_.HasWeakPtrs());
91 }
92
93 media::VideoEncodeAccelerator::SupportedProfiles
94 MediaFoundationVideoEncodeAccelerator::GetSupportedProfiles() {
95 DVLOG(3) << __FUNCTION__;
96 DCHECK(thread_checker_.CalledOnValidThread());
97
98 SupportedProfiles profiles;
99 if (base::win::GetVersion() < base::win::VERSION_WIN8) {
100 DLOG(ERROR) << "Windows versions earlier than 8 are not supported.";
101 return profiles;
102 }
103
104 SupportedProfile profile;
105 // More profiles can be supported here, but they should be available in SW
106 // fallback as well.
107 profile.profile = media::H264PROFILE_BASELINE;
108 profile.max_framerate_numerator = kMaxFrameRateNumerator;
109 profile.max_framerate_denominator = kMaxFrameRateDenominator;
110 profile.max_resolution = gfx::Size(kMaxResolutionWidth, kMaxResolutionHeight);
111 profiles.push_back(profile);
112 return profiles;
113 }
114
115 bool MediaFoundationVideoEncodeAccelerator::Initialize(
116 media::VideoPixelFormat format,
117 const gfx::Size& input_visible_size,
118 media::VideoCodecProfile output_profile,
119 uint32_t initial_bitrate,
120 Client* client) {
121 DVLOG(3) << __FUNCTION__
122 << ": input_format=" << media::VideoPixelFormatToString(format)
123 << ", input_visible_size=" << input_visible_size.ToString()
124 << ", output_profile=" << output_profile
125 << ", initial_bitrate=" << initial_bitrate;
126 DCHECK(thread_checker_.CalledOnValidThread());
127
128 if (media::PIXEL_FORMAT_I420 != format) {
129 DLOG(ERROR) << "Input format not supported= "
130 << media::VideoPixelFormatToString(format);
131 return false;
132 }
133
134 if (media::H264PROFILE_BASELINE != output_profile) {
135 DLOG(ERROR) << "Output profile not supported= " << output_profile;
136 return false;
137 }
138
139 for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs) {
140 HMODULE dll = ::GetModuleHandle(mfdll);
141 if (!dll) {
142 DLOG(ERROR) << mfdll << " is required for encoding";
143 return false;
144 }
145 }
146
147 media::InitializeMediaFoundation();
148
149 uint32_t flags = MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER;
150 MFT_REGISTER_TYPE_INFO input_info;
151 input_info.guidMajorType = MFMediaType_Video;
152 input_info.guidSubtype = MFVideoFormat_NV12;
153 MFT_REGISTER_TYPE_INFO output_info;
154 output_info.guidMajorType = MFMediaType_Video;
155 output_info.guidSubtype = MFVideoFormat_H264;
156
157 base::win::ScopedCoMem<CLSID> CLSIDs;
158 uint32_t count = 0;
159 HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, NULL, &output_info,
160 NULL, &CLSIDs, &count);
161 RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false);
162 RETURN_ON_FAILURE((count > 0), "No HW encoder found", false);
163 DVLOG(3) << "HW encoder(s) found: " << count;
164 hr = encoder_.CreateInstance(CLSIDs[0]);
165 RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false);
166
167 if (!encoder_thread_.Start()) {
168 DLOG(ERROR) << "Failed spawning encoder thread.";
169 return false;
170 }
171
172 encoder_thread_task_runner_ = encoder_thread_.task_runner();
173
174 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
175 client_ = client_ptr_factory_->GetWeakPtr();
176 input_visible_size_ = input_visible_size;
177 frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator;
178 target_bitrate_ = initial_bitrate;
179 bitstream_buffer_size_ = input_visible_size.GetArea();
180
181 u_plane_offset_ =
182 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane,
183 input_visible_size_)
184 .GetArea();
185 v_plane_offset_ =
186 u_plane_offset_ +
187 VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane,
188 input_visible_size_)
189 .GetArea();
190
191 if (!InitializeInputOutputSamples()) {
192 DLOG(ERROR) << "Failed initializing input-output samples.";
193 return false;
194 }
195
196 if (!SetEncoderModes()) {
197 DLOG(ERROR) << "Failed setting encoder parameters.";
198 return false;
199 }
200
201 hr = encoder_->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, NULL);
202 RETURN_ON_HR_FAILURE(hr, "Couldn't set ProcessMessage", false);
203
204 client_task_runner_->PostTask(
205 FROM_HERE,
206 base::Bind(&Client::RequireBitstreamBuffers, client_, kNumInputBuffers,
207 input_visible_size_, bitstream_buffer_size_));
208 return SUCCEEDED(hr);
209 }
210
211 void MediaFoundationVideoEncodeAccelerator::Encode(
212 const scoped_refptr<media::VideoFrame>& frame,
213 bool force_keyframe) {
214 DVLOG(3) << __FUNCTION__;
215 DCHECK(thread_checker_.CalledOnValidThread());
216
217 encoder_thread_task_runner_->PostTask(
218 FROM_HERE, base::Bind(&MediaFoundationVideoEncodeAccelerator::EncodeTask,
219 encoder_weak_ptr_, frame, force_keyframe));
220 }
221
222 void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBuffer(
223 const media::BitstreamBuffer& buffer) {
224 DVLOG(3) << __FUNCTION__ << ": buffer size=" << buffer.size();
225 DCHECK(thread_checker_.CalledOnValidThread());
226
227 if (buffer.size() < bitstream_buffer_size_) {
228 DLOG(ERROR) << "Output BitstreamBuffer isn't big enough: " << buffer.size()
229 << " vs. " << bitstream_buffer_size_;
230 client_->NotifyError(kInvalidArgumentError);
231 return;
232 }
233
234 std::unique_ptr<base::SharedMemory> shm(
235 new base::SharedMemory(buffer.handle(), false));
236 if (!shm->Map(buffer.size())) {
237 DLOG(ERROR) << "Failed mapping shared memory.";
238 client_->NotifyError(kPlatformFailureError);
239 return;
240 }
241
242 std::unique_ptr<BitstreamBufferRef> buffer_ref(
243 new BitstreamBufferRef(buffer.id(), std::move(shm), buffer.size()));
grt (UTC plus 2) 2016/07/14 20:07:24 #include <utility>
emircan 2016/07/15 04:58:31 Done.
244 encoder_thread_task_runner_->PostTask(
245 FROM_HERE,
246 base::Bind(
247 &MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask,
248 encoder_weak_ptr_, base::Passed(&buffer_ref)));
249 }
250
251 void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChange(
252 uint32_t bitrate,
253 uint32_t framerate) {
254 DVLOG(3) << __FUNCTION__ << ": bitrate=" << bitrate
255 << ": framerate=" << framerate;
256 DCHECK(thread_checker_.CalledOnValidThread());
257
258 encoder_thread_task_runner_->PostTask(
259 FROM_HERE, base::Bind(&MediaFoundationVideoEncodeAccelerator::
260 RequestEncodingParametersChangeTask,
261 encoder_weak_ptr_, bitrate, framerate));
262 }
263
264 void MediaFoundationVideoEncodeAccelerator::Destroy() {
265 DVLOG(3) << __FUNCTION__;
266 DCHECK(thread_checker_.CalledOnValidThread());
267
268 // Cancel all callbacks.
269 client_ptr_factory_.reset();
270
271 if (encoder_thread_.IsRunning()) {
272 encoder_thread_task_runner_->PostTask(
273 FROM_HERE,
274 base::Bind(&MediaFoundationVideoEncodeAccelerator::DestroyTask,
275 encoder_weak_ptr_));
276 encoder_thread_.Stop();
277 } else {
278 DestroyTask();
279 }
280 }
281
282 // static
283 void MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization() {
284 for (const wchar_t* mfdll : kMediaFoundationVideoEncoderDLLs)
285 ::LoadLibrary(mfdll);
286 }
287
288 bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() {
289 HRESULT hr = encoder_->GetStreamLimits(
290 &input_stream_count_min_, &input_stream_count_max_,
291 &output_stream_count_min_, &output_stream_count_max_);
292 RETURN_ON_HR_FAILURE(hr, "Couldn't query stream limits", false);
293 DVLOG(3) << "Stream limits: " << input_stream_count_min_ << ","
294 << input_stream_count_max_ << "," << output_stream_count_min_ << ","
295 << output_stream_count_max_;
296
297 // Initialize output parameters.
298 base::win::ScopedComPtr<IMFMediaType> imf_output_media_type;
299 hr = MFCreateMediaType(imf_output_media_type.Receive());
300 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false);
301 hr &= imf_output_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
302 hr &= imf_output_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
303 hr &= imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_);
304 hr &= MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE,
305 frame_rate_, kMaxFrameRateDenominator);
306 hr &= MFSetAttributeSize(imf_output_media_type.get(), MF_MT_FRAME_SIZE,
307 input_visible_size_.width(),
308 input_visible_size_.height());
309 hr &= imf_output_media_type->SetUINT32(MF_MT_INTERLACE_MODE,
310 MFVideoInterlace_Progressive);
311 hr &= imf_output_media_type->SetUINT32(MF_MT_MPEG2_PROFILE,
312 eAVEncH264VProfile_Base);
313 RETURN_ON_HR_FAILURE(hr, "Couldn't set output params", false);
314 hr = encoder_->SetOutputType(0, imf_output_media_type.get(), 0);
315 RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false);
316
317 // Initialize input parameters.
318 base::win::ScopedComPtr<IMFMediaType> imf_input_media_type;
319 hr = MFCreateMediaType(imf_input_media_type.Receive());
320 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false);
321 hr &= imf_input_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
322 hr &= imf_input_media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12);
323 hr &= MFSetAttributeRatio(imf_input_media_type.get(), MF_MT_FRAME_RATE,
324 frame_rate_, kMaxFrameRateDenominator);
325 hr &= MFSetAttributeSize(imf_input_media_type.get(), MF_MT_FRAME_SIZE,
326 input_visible_size_.width(),
327 input_visible_size_.height());
328 hr &= imf_input_media_type->SetUINT32(MF_MT_INTERLACE_MODE,
329 MFVideoInterlace_Progressive);
330 RETURN_ON_HR_FAILURE(hr, "Couldn't set input params", false);
331 hr = encoder_->SetInputType(0, imf_input_media_type.get(), 0);
332 RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false);
333
334 input_sample_.Attach(mf::CreateEmptySampleWithBuffer(
335 VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_visible_size_), 2));
336 output_sample_.Attach(mf::CreateEmptySampleWithBuffer(
337 bitstream_buffer_size_ * kOutputSampleBufferSizeRatio, 2));
338
339 return SUCCEEDED(hr);
340 }
341
342 bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() {
343 HRESULT hr = encoder_.QueryInterface(IID_ICodecAPI, codec_api_.ReceiveVoid());
344 RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false);
345 VARIANT var;
346 var.vt = VT_UI4;
347 var.ulVal = eAVEncCommonRateControlMode_CBR;
348 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
349 RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false);
350 var.ulVal = target_bitrate_;
351 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
352 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false);
353 var.ulVal = eAVEncAdaptiveMode_FrameRate;
354 hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var);
355 RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false);
356 var.vt = VT_BOOL;
357 var.boolVal = VARIANT_TRUE;
358 hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var);
359 RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false);
360 return SUCCEEDED(hr);
361 }
362
363 void MediaFoundationVideoEncodeAccelerator::EncodeTask(
grt (UTC plus 2) 2016/07/14 20:07:24 to avoid the trap of accidentally touching member
emircan 2016/07/15 04:58:31 Sorry, I am a little confused by the split you are
364 const scoped_refptr<media::VideoFrame>& frame,
365 bool force_keyframe) {
366 DVLOG(3) << __FUNCTION__;
367 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
368
369 if (!encoder_)
370 return;
371
372 base::win::ScopedComPtr<IMFMediaBuffer> input_buffer;
373 input_sample_->GetBufferByIndex(0, input_buffer.Receive());
374
375 {
376 MediaBufferScopedPointer scoped_buffer(input_buffer.get());
377 DCHECK(scoped_buffer.get());
378 libyuv::I420Copy(frame->visible_data(media::VideoFrame::kYPlane),
379 frame->stride(media::VideoFrame::kYPlane),
380 frame->visible_data(media::VideoFrame::kVPlane),
381 frame->stride(media::VideoFrame::kVPlane),
382 frame->visible_data(media::VideoFrame::kUPlane),
383 frame->stride(media::VideoFrame::kUPlane),
384 scoped_buffer.get(),
385 frame->stride(media::VideoFrame::kYPlane),
386 scoped_buffer.get() + u_plane_offset_,
387 frame->stride(media::VideoFrame::kUPlane),
388 scoped_buffer.get() + v_plane_offset_,
389 frame->stride(media::VideoFrame::kVPlane),
390 input_visible_size_.width(), input_visible_size_.height());
391 }
392
393 input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() * 10);
394 input_sample_->SetSampleDuration(kOneSecondInMicroseconds / frame_rate_);
395 HRESULT hr = encoder_->ProcessInput(0, input_sample_.get(), 0);
396 // According to MSDN, if encoder returns MF_E_NOTACCEPTING, we need to try
397 // processing the output. This error indicates that encoder does not accept
398 // any more input data.
399 if (hr == MF_E_NOTACCEPTING) {
400 DVLOG(3) << "MF_E_NOTACCEPTING";
401 ProcessOutput();
402 hr = encoder_->ProcessInput(0, input_sample_.get(), 0);
403 if (hr == MF_E_NOTACCEPTING) {
404 encoder_thread_task_runner_->PostTask(
405 FROM_HERE,
406 base::Bind(&MediaFoundationVideoEncodeAccelerator::EncodeTask,
407 encoder_weak_ptr_, frame, force_keyframe));
408 } else {
409 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", );
410 }
411 } else {
412 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", );
413 }
414 DVLOG(3) << "Sent for encode " << hr;
415
416 ProcessOutput();
417 }
418
419 void MediaFoundationVideoEncodeAccelerator::ProcessOutput() {
420 DVLOG(3) << __FUNCTION__;
421 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
422
423 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
424 output_data_buffer.dwStreamID = 0;
425 output_data_buffer.dwStatus = 0;
426 output_data_buffer.pEvents = NULL;
427 output_data_buffer.pSample = output_sample_.get();
428 DWORD status = 0;
429 HRESULT hr = encoder_->ProcessOutput(0, 1, &output_data_buffer, &status);
430 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
431 DVLOG(3) << "MF_E_TRANSFORM_NEED_MORE_INPUT";
432 return;
433 }
434 RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", );
435 DVLOG(3) << "Got encoded data " << hr;
436
437 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
438 hr = output_sample_->GetBufferByIndex(0, output_buffer.Receive());
439 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer by index", );
440 DWORD size = 0;
441 hr = output_buffer->GetCurrentLength(&size);
442 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer length", );
443
444 const bool keyframe = MFGetAttributeUINT32(
445 output_sample_.get(), MFSampleExtension_CleanPoint, false);
446 DVLOG(3) << "We HAVE encoded data with size:" << size << " keyframe "
447 << keyframe;
448
449 if (bitstream_buffer_queue_.empty()) {
450 DVLOG(3) << "No bitstream buffers.";
451 // We need to copy the output so that encoding can continue.
452 std::unique_ptr<EncodeOutput> encode_output(
453 new EncodeOutput(size, keyframe, base::Time::Now() - base::Time()));
454 {
455 MediaBufferScopedPointer scoped_buffer(output_buffer.get());
456 memcpy(encode_output->memory(), scoped_buffer.get(), size);
457 }
458 encoder_output_queue_.push_back(std::move(encode_output));
459 return;
460 }
461
462 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef>
463 buffer_ref = std::move(bitstream_buffer_queue_.front());
464 bitstream_buffer_queue_.pop_front();
465
466 {
467 MediaBufferScopedPointer scoped_buffer(output_buffer.get());
468 memcpy(buffer_ref->shm->memory(), scoped_buffer.get(), size);
469 }
470
471 client_task_runner_->PostTask(
472 FROM_HERE,
473 base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, size,
474 keyframe, base::Time::Now() - base::Time()));
475
476 // Keep calling ProcessOutput recursively until MF_E_TRANSFORM_NEED_MORE_INPUT
477 // is returned to flush out all the output.
478 ProcessOutput();
479 }
480
481 void MediaFoundationVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
482 std::unique_ptr<BitstreamBufferRef> buffer_ref) {
483 DVLOG(3) << __FUNCTION__;
484 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
485
486 // If there is already EncodeOutput waiting, copy its output first.
487 if (!encoder_output_queue_.empty()) {
488 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::EncodeOutput>
489 encode_output = std::move(encoder_output_queue_.front());
490 encoder_output_queue_.pop_front();
491 ReturnBitstreamBuffer(std::move(encode_output), std::move(buffer_ref));
492 return;
493 }
494
495 bitstream_buffer_queue_.push_back(std::move(buffer_ref));
496 }
497
498 void MediaFoundationVideoEncodeAccelerator::ReturnBitstreamBuffer(
499 std::unique_ptr<EncodeOutput> encode_output,
500 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef>
501 buffer_ref) {
502 DVLOG(3) << __FUNCTION__;
503 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
504
505 memcpy(buffer_ref->shm->memory(), encode_output->memory(),
506 encode_output->size);
507 client_task_runner_->PostTask(
508 FROM_HERE,
509 base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id,
510 encode_output->size, encode_output->keyframe,
511 encode_output->capture_timestamp));
512 }
513
514 void MediaFoundationVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
515 uint32_t bitrate,
516 uint32_t framerate) {
517 DVLOG(3) << __FUNCTION__;
518 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
519
520 frame_rate_ = framerate > 1 ? framerate : 1;
grt (UTC plus 2) 2016/07/14 20:07:25 either: std::max(1, framerate); since it says wh
emircan 2016/07/15 04:58:31 Changed both lines: frame_rate_ = framerate ? fr
521 target_bitrate_ = bitrate > 1 ? bitrate : 1;
522
523 VARIANT var;
524 var.vt = VT_UI4;
525 var.ulVal = target_bitrate_;
526 HRESULT hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
527 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", );
528
529 base::win::ScopedComPtr<IMFMediaType> imf_output_media_type;
530 hr = MFCreateMediaType(imf_output_media_type.Receive());
531 hr &= imf_output_media_type->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_);
grt (UTC plus 2) 2016/07/14 20:07:25 do you really want to invoke a method on |imf_outp
emircan 2016/07/15 04:58:30 Added an early return for that case.
532 hr &= MFSetAttributeRatio(imf_output_media_type.get(), MF_MT_FRAME_RATE,
533 framerate, kMaxFrameRateDenominator);
534 RETURN_ON_HR_FAILURE(hr, "Couldn't set output type params", );
535 }
536
537 void MediaFoundationVideoEncodeAccelerator::DestroyTask() {
538 DVLOG(3) << __FUNCTION__;
539 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
540
541 encoder_.Release();
542 }
543
544 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698