OLD | NEW |
| (Empty) |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" | |
6 | |
7 #include <string.h> | |
8 | |
9 #include <memory> | |
10 #include <utility> | |
11 | |
12 #include "base/bind.h" | |
13 #include "base/callback.h" | |
14 #include "base/macros.h" | |
15 #include "base/metrics/histogram.h" | |
16 #include "base/numerics/safe_conversions.h" | |
17 #include "content/common/gpu/media/h264_dpb.h" | |
18 #include "content/common/gpu/media/shared_memory_region.h" | |
19 #include "media/base/bind_to_current_loop.h" | |
20 #include "third_party/libva/va/va_enc_h264.h" | |
21 | |
22 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " | |
23 | |
24 #define NOTIFY_ERROR(error, msg) \ | |
25 do { \ | |
26 SetState(kError); \ | |
27 LOG(ERROR) << msg; \ | |
28 LOG(ERROR) << "Calling NotifyError(" << error << ")";\ | |
29 NotifyError(error); \ | |
30 } while (0) | |
31 | |
32 namespace content { | |
33 | |
34 namespace { | |
35 // Need 2 surfaces for each frame: one for input data and one for | |
36 // reconstructed picture, which is later used for reference. | |
37 const size_t kMinSurfacesToEncode = 2; | |
38 | |
39 // Subjectively chosen. | |
40 const size_t kNumInputBuffers = 4; | |
41 const size_t kMaxNumReferenceFrames = 4; | |
42 | |
43 // We need up to kMaxNumReferenceFrames surfaces for reference, plus one | |
44 // for input and one for encode (which will be added to the set of reference | |
45 // frames for subsequent frames). Actual execution of HW encode is done | |
46 // in parallel, and we want to process more frames in the meantime. | |
47 // To have kNumInputBuffers in flight, we need a full set of reference + | |
48 // encode surfaces (i.e. kMaxNumReferenceFrames + kMinSurfacesToEncode), and | |
49 // (kNumInputBuffers - 1) of kMinSurfacesToEncode for the remaining frames | |
50 // in flight. | |
51 const size_t kNumSurfaces = kMaxNumReferenceFrames + kMinSurfacesToEncode + | |
52 kMinSurfacesToEncode * (kNumInputBuffers - 1); | |
53 | |
54 // An IDR every 2048 frames, an I frame every 256 and no B frames. | |
55 // We choose IDR period to equal MaxFrameNum so it must be a power of 2. | |
56 const int kIDRPeriod = 2048; | |
57 const int kIPeriod = 256; | |
58 const int kIPPeriod = 1; | |
59 | |
60 const int kDefaultFramerate = 30; | |
61 | |
62 // HRD parameters (ch. E.2.2 in spec). | |
63 const int kBitRateScale = 0; // bit_rate_scale for SPS HRD parameters. | |
64 const int kCPBSizeScale = 0; // cpb_size_scale for SPS HRD parameters. | |
65 | |
66 const int kDefaultQP = 26; | |
67 // All Intel codecs can do at least 4.1. | |
68 const int kDefaultLevelIDC = 41; | |
69 const int kChromaFormatIDC = 1; // 4:2:0 | |
70 | |
71 // Arbitrarily chosen bitrate window size for rate control, in ms. | |
72 const int kCPBWindowSizeMs = 1500; | |
73 | |
74 // UMA errors that the VaapiVideoEncodeAccelerator class reports. | |
75 enum VAVEAEncoderFailure { | |
76 VAAPI_ERROR = 0, | |
77 // UMA requires that max must be greater than 1. | |
78 VAVEA_ENCODER_FAILURES_MAX = 2, | |
79 }; | |
80 | |
81 } | |
82 | |
83 // Round |value| up to |alignment|, which must be a power of 2. | |
84 static inline size_t RoundUpToPowerOf2(size_t value, size_t alignment) { | |
85 // Check that |alignment| is a power of 2. | |
86 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); | |
87 return ((value + (alignment - 1)) & ~(alignment - 1)); | |
88 } | |
89 | |
90 static void ReportToUMA(VAVEAEncoderFailure failure) { | |
91 UMA_HISTOGRAM_ENUMERATION( | |
92 "Media.VAVEA.EncoderFailure", | |
93 failure, | |
94 VAVEA_ENCODER_FAILURES_MAX); | |
95 } | |
96 | |
97 struct VaapiVideoEncodeAccelerator::InputFrameRef { | |
98 InputFrameRef(const scoped_refptr<media::VideoFrame>& frame, | |
99 bool force_keyframe) | |
100 : frame(frame), force_keyframe(force_keyframe) {} | |
101 const scoped_refptr<media::VideoFrame> frame; | |
102 const bool force_keyframe; | |
103 }; | |
104 | |
105 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { | |
106 BitstreamBufferRef(int32_t id, std::unique_ptr<SharedMemoryRegion> shm) | |
107 : id(id), shm(std::move(shm)) {} | |
108 const int32_t id; | |
109 const std::unique_ptr<SharedMemoryRegion> shm; | |
110 }; | |
111 | |
112 media::VideoEncodeAccelerator::SupportedProfiles | |
113 VaapiVideoEncodeAccelerator::GetSupportedProfiles() { | |
114 return VaapiWrapper::GetSupportedEncodeProfiles(); | |
115 } | |
116 | |
117 static unsigned int Log2OfPowerOf2(unsigned int x) { | |
118 CHECK_GT(x, 0u); | |
119 DCHECK_EQ(x & (x - 1), 0u); | |
120 | |
121 int log = 0; | |
122 while (x > 1) { | |
123 x >>= 1; | |
124 ++log; | |
125 } | |
126 return log; | |
127 } | |
128 | |
129 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator() | |
130 : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | |
131 mb_width_(0), | |
132 mb_height_(0), | |
133 output_buffer_byte_size_(0), | |
134 state_(kUninitialized), | |
135 frame_num_(0), | |
136 idr_pic_id_(0), | |
137 bitrate_(0), | |
138 framerate_(0), | |
139 cpb_size_(0), | |
140 encoding_parameters_changed_(false), | |
141 encoder_thread_("VAVEAEncoderThread"), | |
142 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
143 weak_this_ptr_factory_(this) { | |
144 DVLOGF(4); | |
145 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); | |
146 | |
147 max_ref_idx_l0_size_ = kMaxNumReferenceFrames; | |
148 qp_ = kDefaultQP; | |
149 idr_period_ = kIDRPeriod; | |
150 i_period_ = kIPeriod; | |
151 ip_period_ = kIPPeriod; | |
152 } | |
153 | |
154 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() { | |
155 DVLOGF(4); | |
156 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
157 DCHECK(!encoder_thread_.IsRunning()); | |
158 } | |
159 | |
160 bool VaapiVideoEncodeAccelerator::Initialize( | |
161 media::VideoPixelFormat format, | |
162 const gfx::Size& input_visible_size, | |
163 media::VideoCodecProfile output_profile, | |
164 uint32_t initial_bitrate, | |
165 Client* client) { | |
166 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
167 DCHECK(!encoder_thread_.IsRunning()); | |
168 DCHECK_EQ(state_, kUninitialized); | |
169 | |
170 DVLOGF(1) << "Initializing VAVEA, input_format: " | |
171 << media::VideoPixelFormatToString(format) | |
172 << ", input_visible_size: " << input_visible_size.ToString() | |
173 << ", output_profile: " << output_profile | |
174 << ", initial_bitrate: " << initial_bitrate; | |
175 | |
176 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | |
177 client_ = client_ptr_factory_->GetWeakPtr(); | |
178 | |
179 const SupportedProfiles& profiles = GetSupportedProfiles(); | |
180 auto profile = find_if(profiles.begin(), profiles.end(), | |
181 [output_profile](const SupportedProfile& profile) { | |
182 return profile.profile == output_profile; | |
183 }); | |
184 if (profile == profiles.end()) { | |
185 DVLOGF(1) << "Unsupported output profile " << output_profile; | |
186 return false; | |
187 } | |
188 if (input_visible_size.width() > profile->max_resolution.width() || | |
189 input_visible_size.height() > profile->max_resolution.height()) { | |
190 DVLOGF(1) << "Input size too big: " << input_visible_size.ToString() | |
191 << ", max supported size: " << profile->max_resolution.ToString(); | |
192 return false; | |
193 } | |
194 | |
195 if (format != media::PIXEL_FORMAT_I420) { | |
196 DVLOGF(1) << "Unsupported input format: " | |
197 << media::VideoPixelFormatToString(format); | |
198 return false; | |
199 } | |
200 | |
201 profile_ = output_profile; | |
202 visible_size_ = input_visible_size; | |
203 // 4:2:0 format has to be 2-aligned. | |
204 DCHECK_EQ(visible_size_.width() % 2, 0); | |
205 DCHECK_EQ(visible_size_.height() % 2, 0); | |
206 coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16), | |
207 RoundUpToPowerOf2(visible_size_.height(), 16)); | |
208 mb_width_ = coded_size_.width() / 16; | |
209 mb_height_ = coded_size_.height() / 16; | |
210 output_buffer_byte_size_ = coded_size_.GetArea(); | |
211 | |
212 UpdateRates(initial_bitrate, kDefaultFramerate); | |
213 | |
214 vaapi_wrapper_ = | |
215 VaapiWrapper::CreateForVideoCodec(VaapiWrapper::kEncode, output_profile, | |
216 base::Bind(&ReportToUMA, VAAPI_ERROR)); | |
217 if (!vaapi_wrapper_.get()) { | |
218 DVLOGF(1) << "Failed initializing VAAPI for profile " << output_profile; | |
219 return false; | |
220 } | |
221 | |
222 if (!encoder_thread_.Start()) { | |
223 LOG(ERROR) << "Failed to start encoder thread"; | |
224 return false; | |
225 } | |
226 encoder_thread_task_runner_ = encoder_thread_.task_runner(); | |
227 | |
228 // Finish the remaining initialization on the encoder thread. | |
229 encoder_thread_task_runner_->PostTask( | |
230 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask, | |
231 base::Unretained(this))); | |
232 | |
233 return true; | |
234 } | |
235 | |
236 void VaapiVideoEncodeAccelerator::InitializeTask() { | |
237 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
238 DCHECK_EQ(state_, kUninitialized); | |
239 DVLOGF(4); | |
240 | |
241 va_surface_release_cb_ = media::BindToCurrentLoop( | |
242 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID, | |
243 base::Unretained(this))); | |
244 | |
245 if (!vaapi_wrapper_->CreateSurfaces(VA_RT_FORMAT_YUV420, coded_size_, | |
246 kNumSurfaces, | |
247 &available_va_surface_ids_)) { | |
248 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces"); | |
249 return; | |
250 } | |
251 | |
252 UpdateSPS(); | |
253 GeneratePackedSPS(); | |
254 | |
255 UpdatePPS(); | |
256 GeneratePackedPPS(); | |
257 | |
258 child_task_runner_->PostTask( | |
259 FROM_HERE, | |
260 base::Bind(&Client::RequireBitstreamBuffers, client_, kNumInputBuffers, | |
261 coded_size_, output_buffer_byte_size_)); | |
262 | |
263 SetState(kEncoding); | |
264 } | |
265 | |
266 void VaapiVideoEncodeAccelerator::RecycleVASurfaceID( | |
267 VASurfaceID va_surface_id) { | |
268 DVLOGF(4) << "va_surface_id: " << va_surface_id; | |
269 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
270 | |
271 available_va_surface_ids_.push_back(va_surface_id); | |
272 EncodeFrameTask(); | |
273 } | |
274 | |
275 void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) { | |
276 current_pic_ = new H264Picture(); | |
277 | |
278 // If the current picture is an IDR picture, frame_num shall be equal to 0. | |
279 if (force_keyframe) | |
280 frame_num_ = 0; | |
281 | |
282 current_pic_->frame_num = frame_num_++; | |
283 frame_num_ %= idr_period_; | |
284 | |
285 if (current_pic_->frame_num == 0) { | |
286 current_pic_->idr = true; | |
287 // H264 spec mandates idr_pic_id to differ between two consecutive IDRs. | |
288 idr_pic_id_ ^= 1; | |
289 ref_pic_list0_.clear(); | |
290 } | |
291 | |
292 if (current_pic_->frame_num % i_period_ == 0) | |
293 current_pic_->type = media::H264SliceHeader::kISlice; | |
294 else | |
295 current_pic_->type = media::H264SliceHeader::kPSlice; | |
296 | |
297 if (current_pic_->type != media::H264SliceHeader::kBSlice) | |
298 current_pic_->ref = true; | |
299 | |
300 current_pic_->pic_order_cnt = current_pic_->frame_num * 2; | |
301 current_pic_->top_field_order_cnt = current_pic_->pic_order_cnt; | |
302 current_pic_->pic_order_cnt_lsb = current_pic_->pic_order_cnt; | |
303 | |
304 current_encode_job_->keyframe = current_pic_->idr; | |
305 | |
306 DVLOGF(4) << "Starting a new frame, type: " << current_pic_->type | |
307 << (force_keyframe ? " (forced keyframe)" : "") | |
308 << " frame_num: " << current_pic_->frame_num | |
309 << " POC: " << current_pic_->pic_order_cnt; | |
310 } | |
311 | |
312 void VaapiVideoEncodeAccelerator::EndFrame() { | |
313 DCHECK(current_pic_); | |
314 // Store the picture on the list of reference pictures and keep the list | |
315 // below maximum size, dropping oldest references. | |
316 if (current_pic_->ref) | |
317 ref_pic_list0_.push_front(current_encode_job_->recon_surface); | |
318 size_t max_num_ref_frames = | |
319 base::checked_cast<size_t>(current_sps_.max_num_ref_frames); | |
320 while (ref_pic_list0_.size() > max_num_ref_frames) | |
321 ref_pic_list0_.pop_back(); | |
322 | |
323 submitted_encode_jobs_.push(make_linked_ptr(current_encode_job_.release())); | |
324 } | |
325 | |
326 static void InitVAPicture(VAPictureH264* va_pic) { | |
327 memset(va_pic, 0, sizeof(*va_pic)); | |
328 va_pic->picture_id = VA_INVALID_ID; | |
329 va_pic->flags = VA_PICTURE_H264_INVALID; | |
330 } | |
331 | |
332 bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() { | |
333 DCHECK(current_pic_); | |
334 VAEncSequenceParameterBufferH264 seq_param; | |
335 memset(&seq_param, 0, sizeof(seq_param)); | |
336 | |
337 #define SPS_TO_SP(a) seq_param.a = current_sps_.a; | |
338 SPS_TO_SP(seq_parameter_set_id); | |
339 SPS_TO_SP(level_idc); | |
340 | |
341 seq_param.intra_period = i_period_; | |
342 seq_param.intra_idr_period = idr_period_; | |
343 seq_param.ip_period = ip_period_; | |
344 seq_param.bits_per_second = bitrate_; | |
345 | |
346 SPS_TO_SP(max_num_ref_frames); | |
347 seq_param.picture_width_in_mbs = mb_width_; | |
348 seq_param.picture_height_in_mbs = mb_height_; | |
349 | |
350 #define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = current_sps_.a; | |
351 SPS_TO_SP_FS(chroma_format_idc); | |
352 SPS_TO_SP_FS(frame_mbs_only_flag); | |
353 SPS_TO_SP_FS(log2_max_frame_num_minus4); | |
354 SPS_TO_SP_FS(pic_order_cnt_type); | |
355 SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4); | |
356 #undef SPS_TO_SP_FS | |
357 | |
358 SPS_TO_SP(bit_depth_luma_minus8); | |
359 SPS_TO_SP(bit_depth_chroma_minus8); | |
360 | |
361 SPS_TO_SP(frame_cropping_flag); | |
362 if (current_sps_.frame_cropping_flag) { | |
363 SPS_TO_SP(frame_crop_left_offset); | |
364 SPS_TO_SP(frame_crop_right_offset); | |
365 SPS_TO_SP(frame_crop_top_offset); | |
366 SPS_TO_SP(frame_crop_bottom_offset); | |
367 } | |
368 | |
369 SPS_TO_SP(vui_parameters_present_flag); | |
370 #define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = current_sps_.a; | |
371 SPS_TO_SP_VF(timing_info_present_flag); | |
372 #undef SPS_TO_SP_VF | |
373 SPS_TO_SP(num_units_in_tick); | |
374 SPS_TO_SP(time_scale); | |
375 #undef SPS_TO_SP | |
376 | |
377 if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType, | |
378 sizeof(seq_param), | |
379 &seq_param)) | |
380 return false; | |
381 | |
382 VAEncPictureParameterBufferH264 pic_param; | |
383 memset(&pic_param, 0, sizeof(pic_param)); | |
384 | |
385 pic_param.CurrPic.picture_id = current_encode_job_->recon_surface->id(); | |
386 pic_param.CurrPic.TopFieldOrderCnt = current_pic_->top_field_order_cnt; | |
387 pic_param.CurrPic.BottomFieldOrderCnt = current_pic_->bottom_field_order_cnt; | |
388 pic_param.CurrPic.flags = 0; | |
389 | |
390 for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i) | |
391 InitVAPicture(&pic_param.ReferenceFrames[i]); | |
392 | |
393 DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames)); | |
394 RefPicList::const_iterator iter = ref_pic_list0_.begin(); | |
395 for (size_t i = 0; | |
396 i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end(); | |
397 ++iter, ++i) { | |
398 pic_param.ReferenceFrames[i].picture_id = (*iter)->id(); | |
399 pic_param.ReferenceFrames[i].flags = 0; | |
400 } | |
401 | |
402 pic_param.coded_buf = current_encode_job_->coded_buffer; | |
403 pic_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id; | |
404 pic_param.seq_parameter_set_id = current_pps_.seq_parameter_set_id; | |
405 pic_param.frame_num = current_pic_->frame_num; | |
406 pic_param.pic_init_qp = qp_; | |
407 pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1; | |
408 pic_param.pic_fields.bits.idr_pic_flag = current_pic_->idr; | |
409 pic_param.pic_fields.bits.reference_pic_flag = current_pic_->ref; | |
410 #define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = current_pps_.a; | |
411 PPS_TO_PP_PF(entropy_coding_mode_flag); | |
412 PPS_TO_PP_PF(transform_8x8_mode_flag); | |
413 PPS_TO_PP_PF(deblocking_filter_control_present_flag); | |
414 #undef PPS_TO_PP_PF | |
415 | |
416 if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType, | |
417 sizeof(pic_param), | |
418 &pic_param)) | |
419 return false; | |
420 | |
421 VAEncSliceParameterBufferH264 slice_param; | |
422 memset(&slice_param, 0, sizeof(slice_param)); | |
423 | |
424 slice_param.num_macroblocks = mb_width_ * mb_height_; | |
425 slice_param.macroblock_info = VA_INVALID_ID; | |
426 slice_param.slice_type = current_pic_->type; | |
427 slice_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id; | |
428 slice_param.idr_pic_id = idr_pic_id_; | |
429 slice_param.pic_order_cnt_lsb = current_pic_->pic_order_cnt_lsb; | |
430 slice_param.num_ref_idx_active_override_flag = true; | |
431 | |
432 for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i) | |
433 InitVAPicture(&slice_param.RefPicList0[i]); | |
434 | |
435 for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i) | |
436 InitVAPicture(&slice_param.RefPicList1[i]); | |
437 | |
438 DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0)); | |
439 iter = ref_pic_list0_.begin(); | |
440 for (size_t i = 0; | |
441 i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end(); | |
442 ++iter, ++i) { | |
443 InitVAPicture(&slice_param.RefPicList0[i]); | |
444 slice_param.RefPicList0[i].picture_id = (*iter)->id(); | |
445 slice_param.RefPicList0[i].flags = 0; | |
446 } | |
447 | |
448 if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType, | |
449 sizeof(slice_param), | |
450 &slice_param)) | |
451 return false; | |
452 | |
453 VAEncMiscParameterRateControl rate_control_param; | |
454 memset(&rate_control_param, 0, sizeof(rate_control_param)); | |
455 rate_control_param.bits_per_second = bitrate_; | |
456 rate_control_param.target_percentage = 90; | |
457 rate_control_param.window_size = kCPBWindowSizeMs; | |
458 rate_control_param.initial_qp = qp_; | |
459 rate_control_param.rc_flags.bits.disable_frame_skip = true; | |
460 | |
461 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( | |
462 VAEncMiscParameterTypeRateControl, | |
463 sizeof(rate_control_param), | |
464 &rate_control_param)) | |
465 return false; | |
466 | |
467 VAEncMiscParameterFrameRate framerate_param; | |
468 memset(&framerate_param, 0, sizeof(framerate_param)); | |
469 framerate_param.framerate = framerate_; | |
470 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( | |
471 VAEncMiscParameterTypeFrameRate, | |
472 sizeof(framerate_param), | |
473 &framerate_param)) | |
474 return false; | |
475 | |
476 VAEncMiscParameterHRD hrd_param; | |
477 memset(&hrd_param, 0, sizeof(hrd_param)); | |
478 hrd_param.buffer_size = cpb_size_; | |
479 hrd_param.initial_buffer_fullness = cpb_size_ / 2; | |
480 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(VAEncMiscParameterTypeHRD, | |
481 sizeof(hrd_param), | |
482 &hrd_param)) | |
483 return false; | |
484 | |
485 return true; | |
486 } | |
487 | |
488 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() { | |
489 DCHECK(current_pic_); | |
490 if (current_pic_->type != media::H264SliceHeader::kISlice) | |
491 return true; | |
492 | |
493 // Submit PPS. | |
494 VAEncPackedHeaderParameterBuffer par_buffer; | |
495 memset(&par_buffer, 0, sizeof(par_buffer)); | |
496 par_buffer.type = VAEncPackedHeaderSequence; | |
497 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8; | |
498 | |
499 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, | |
500 sizeof(par_buffer), | |
501 &par_buffer)) | |
502 return false; | |
503 | |
504 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, | |
505 packed_sps_.BytesInBuffer(), | |
506 packed_sps_.data())) | |
507 return false; | |
508 | |
509 // Submit PPS. | |
510 memset(&par_buffer, 0, sizeof(par_buffer)); | |
511 par_buffer.type = VAEncPackedHeaderPicture; | |
512 par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8; | |
513 | |
514 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, | |
515 sizeof(par_buffer), | |
516 &par_buffer)) | |
517 return false; | |
518 | |
519 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, | |
520 packed_pps_.BytesInBuffer(), | |
521 packed_pps_.data())) | |
522 return false; | |
523 | |
524 return true; | |
525 } | |
526 | |
527 bool VaapiVideoEncodeAccelerator::ExecuteEncode() { | |
528 DCHECK(current_pic_); | |
529 DVLOGF(3) << "Encoding frame_num: " << current_pic_->frame_num; | |
530 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( | |
531 current_encode_job_->input_surface->id()); | |
532 } | |
533 | |
534 bool VaapiVideoEncodeAccelerator::UploadFrame( | |
535 const scoped_refptr<media::VideoFrame>& frame) { | |
536 return vaapi_wrapper_->UploadVideoFrameToSurface( | |
537 frame, current_encode_job_->input_surface->id()); | |
538 } | |
539 | |
540 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { | |
541 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
542 | |
543 if (state_ != kEncoding) | |
544 return; | |
545 | |
546 if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty()) | |
547 return; | |
548 | |
549 linked_ptr<BitstreamBufferRef> buffer = available_bitstream_buffers_.front(); | |
550 available_bitstream_buffers_.pop(); | |
551 | |
552 uint8_t* target_data = reinterpret_cast<uint8_t*>(buffer->shm->memory()); | |
553 | |
554 linked_ptr<EncodeJob> encode_job = submitted_encode_jobs_.front(); | |
555 submitted_encode_jobs_.pop(); | |
556 | |
557 size_t data_size = 0; | |
558 if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer( | |
559 encode_job->coded_buffer, encode_job->input_surface->id(), | |
560 target_data, buffer->shm->size(), &data_size)) { | |
561 NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer"); | |
562 return; | |
563 } | |
564 | |
565 DVLOGF(3) << "Returning bitstream buffer " | |
566 << (encode_job->keyframe ? "(keyframe)" : "") | |
567 << " id: " << buffer->id << " size: " << data_size; | |
568 | |
569 child_task_runner_->PostTask( | |
570 FROM_HERE, base::Bind(&Client::BitstreamBufferReady, client_, buffer->id, | |
571 data_size, encode_job->keyframe)); | |
572 } | |
573 | |
574 void VaapiVideoEncodeAccelerator::Encode( | |
575 const scoped_refptr<media::VideoFrame>& frame, | |
576 bool force_keyframe) { | |
577 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds() | |
578 << " force_keyframe: " << force_keyframe; | |
579 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
580 | |
581 encoder_thread_task_runner_->PostTask( | |
582 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask, | |
583 base::Unretained(this), frame, force_keyframe)); | |
584 } | |
585 | |
586 bool VaapiVideoEncodeAccelerator::PrepareNextJob() { | |
587 if (available_va_surface_ids_.size() < kMinSurfacesToEncode) | |
588 return false; | |
589 | |
590 DCHECK(!current_encode_job_); | |
591 current_encode_job_.reset(new EncodeJob()); | |
592 | |
593 if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_, | |
594 ¤t_encode_job_->coded_buffer)) { | |
595 NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer"); | |
596 return false; | |
597 } | |
598 | |
599 current_encode_job_->input_surface = new VASurface( | |
600 available_va_surface_ids_.back(), coded_size_, | |
601 vaapi_wrapper_->va_surface_format(), va_surface_release_cb_); | |
602 available_va_surface_ids_.pop_back(); | |
603 | |
604 current_encode_job_->recon_surface = new VASurface( | |
605 available_va_surface_ids_.back(), coded_size_, | |
606 vaapi_wrapper_->va_surface_format(), va_surface_release_cb_); | |
607 available_va_surface_ids_.pop_back(); | |
608 | |
609 // Reference surfaces are needed until the job is done, but they get | |
610 // removed from ref_pic_list0_ when it's full at the end of job submission. | |
611 // Keep refs to them along with the job and only release after sync. | |
612 current_encode_job_->reference_surfaces = ref_pic_list0_; | |
613 | |
614 return true; | |
615 } | |
616 | |
617 void VaapiVideoEncodeAccelerator::EncodeTask( | |
618 const scoped_refptr<media::VideoFrame>& frame, | |
619 bool force_keyframe) { | |
620 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
621 DCHECK_NE(state_, kUninitialized); | |
622 | |
623 encoder_input_queue_.push( | |
624 make_linked_ptr(new InputFrameRef(frame, force_keyframe))); | |
625 EncodeFrameTask(); | |
626 } | |
627 | |
628 void VaapiVideoEncodeAccelerator::EncodeFrameTask() { | |
629 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
630 | |
631 if (state_ != kEncoding || encoder_input_queue_.empty()) | |
632 return; | |
633 | |
634 if (!PrepareNextJob()) { | |
635 DVLOGF(4) << "Not ready for next frame yet"; | |
636 return; | |
637 } | |
638 | |
639 linked_ptr<InputFrameRef> frame_ref = encoder_input_queue_.front(); | |
640 encoder_input_queue_.pop(); | |
641 | |
642 if (!UploadFrame(frame_ref->frame)) { | |
643 NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW."); | |
644 return; | |
645 } | |
646 | |
647 BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_); | |
648 encoding_parameters_changed_ = false; | |
649 | |
650 if (!SubmitFrameParameters()) { | |
651 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters."); | |
652 return; | |
653 } | |
654 | |
655 if (!SubmitHeadersIfNeeded()) { | |
656 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers."); | |
657 return; | |
658 } | |
659 | |
660 if (!ExecuteEncode()) { | |
661 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW."); | |
662 return; | |
663 } | |
664 | |
665 EndFrame(); | |
666 TryToReturnBitstreamBuffer(); | |
667 } | |
668 | |
669 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
670 const media::BitstreamBuffer& buffer) { | |
671 DVLOGF(4) << "id: " << buffer.id(); | |
672 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
673 | |
674 if (buffer.size() < output_buffer_byte_size_) { | |
675 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small"); | |
676 return; | |
677 } | |
678 | |
679 std::unique_ptr<SharedMemoryRegion> shm( | |
680 new SharedMemoryRegion(buffer, false)); | |
681 if (!shm->Map()) { | |
682 NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory."); | |
683 return; | |
684 } | |
685 | |
686 std::unique_ptr<BitstreamBufferRef> buffer_ref( | |
687 new BitstreamBufferRef(buffer.id(), std::move(shm))); | |
688 | |
689 encoder_thread_task_runner_->PostTask( | |
690 FROM_HERE, | |
691 base::Bind(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask, | |
692 base::Unretained(this), base::Passed(&buffer_ref))); | |
693 } | |
694 | |
695 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask( | |
696 std::unique_ptr<BitstreamBufferRef> buffer_ref) { | |
697 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
698 DCHECK_NE(state_, kUninitialized); | |
699 | |
700 available_bitstream_buffers_.push(make_linked_ptr(buffer_ref.release())); | |
701 TryToReturnBitstreamBuffer(); | |
702 } | |
703 | |
704 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange( | |
705 uint32_t bitrate, | |
706 uint32_t framerate) { | |
707 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; | |
708 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
709 | |
710 encoder_thread_task_runner_->PostTask( | |
711 FROM_HERE, | |
712 base::Bind( | |
713 &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask, | |
714 base::Unretained(this), bitrate, framerate)); | |
715 } | |
716 | |
717 void VaapiVideoEncodeAccelerator::UpdateRates(uint32_t bitrate, | |
718 uint32_t framerate) { | |
719 if (encoder_thread_.IsRunning()) | |
720 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
721 DCHECK_NE(bitrate, 0u); | |
722 DCHECK_NE(framerate, 0u); | |
723 bitrate_ = bitrate; | |
724 framerate_ = framerate; | |
725 cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000; | |
726 } | |
727 | |
728 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask( | |
729 uint32_t bitrate, | |
730 uint32_t framerate) { | |
731 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; | |
732 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
733 DCHECK_NE(state_, kUninitialized); | |
734 | |
735 // This is a workaround to zero being temporarily, as part of the initial | |
736 // setup, provided by the webrtc video encode and a zero bitrate and | |
737 // framerate not being accepted by VAAPI | |
738 // TODO: This code is common with v4l2_video_encode_accelerator.cc, perhaps | |
739 // it could be pulled up to RTCVideoEncoder | |
740 if (bitrate < 1) | |
741 bitrate = 1; | |
742 if (framerate < 1) | |
743 framerate = 1; | |
744 | |
745 if (bitrate_ == bitrate && framerate_ == framerate) | |
746 return; | |
747 | |
748 UpdateRates(bitrate, framerate); | |
749 | |
750 UpdateSPS(); | |
751 GeneratePackedSPS(); | |
752 | |
753 // Submit new parameters along with next frame that will be processed. | |
754 encoding_parameters_changed_ = true; | |
755 } | |
756 | |
757 void VaapiVideoEncodeAccelerator::Destroy() { | |
758 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
759 | |
760 // Can't call client anymore after Destroy() returns. | |
761 client_ptr_factory_.reset(); | |
762 weak_this_ptr_factory_.InvalidateWeakPtrs(); | |
763 | |
764 // Early-exit encoder tasks if they are running and join the thread. | |
765 if (encoder_thread_.IsRunning()) { | |
766 encoder_thread_.message_loop()->PostTask( | |
767 FROM_HERE, | |
768 base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask, | |
769 base::Unretained(this))); | |
770 encoder_thread_.Stop(); | |
771 } | |
772 | |
773 delete this; | |
774 } | |
775 | |
776 void VaapiVideoEncodeAccelerator::DestroyTask() { | |
777 DVLOGF(2); | |
778 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | |
779 SetState(kError); | |
780 } | |
781 | |
782 void VaapiVideoEncodeAccelerator::UpdateSPS() { | |
783 memset(¤t_sps_, 0, sizeof(media::H264SPS)); | |
784 | |
785 // Spec A.2 and A.3. | |
786 switch (profile_) { | |
787 case media::H264PROFILE_BASELINE: | |
788 // Due to crbug.com/345569, we don't distinguish between constrained | |
789 // and non-constrained baseline profiles. Since many codecs can't do | |
790 // non-constrained, and constrained is usually what we mean (and it's a | |
791 // subset of non-constrained), default to it. | |
792 current_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline; | |
793 current_sps_.constraint_set0_flag = true; | |
794 break; | |
795 case media::H264PROFILE_MAIN: | |
796 current_sps_.profile_idc = media::H264SPS::kProfileIDCMain; | |
797 current_sps_.constraint_set1_flag = true; | |
798 break; | |
799 case media::H264PROFILE_HIGH: | |
800 current_sps_.profile_idc = media::H264SPS::kProfileIDCHigh; | |
801 break; | |
802 default: | |
803 NOTIMPLEMENTED(); | |
804 return; | |
805 } | |
806 | |
807 current_sps_.level_idc = kDefaultLevelIDC; | |
808 current_sps_.seq_parameter_set_id = 0; | |
809 current_sps_.chroma_format_idc = kChromaFormatIDC; | |
810 | |
811 DCHECK_GE(idr_period_, 1u << 4); | |
812 current_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4; | |
813 current_sps_.pic_order_cnt_type = 0; | |
814 current_sps_.log2_max_pic_order_cnt_lsb_minus4 = | |
815 Log2OfPowerOf2(idr_period_ * 2) - 4; | |
816 current_sps_.max_num_ref_frames = max_ref_idx_l0_size_; | |
817 | |
818 current_sps_.frame_mbs_only_flag = true; | |
819 | |
820 DCHECK_GT(mb_width_, 0u); | |
821 DCHECK_GT(mb_height_, 0u); | |
822 current_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1; | |
823 DCHECK(current_sps_.frame_mbs_only_flag); | |
824 current_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1; | |
825 | |
826 if (visible_size_ != coded_size_) { | |
827 // Visible size differs from coded size, fill crop information. | |
828 current_sps_.frame_cropping_flag = true; | |
829 DCHECK(!current_sps_.separate_colour_plane_flag); | |
830 // Spec table 6-1. Only 4:2:0 for now. | |
831 DCHECK_EQ(current_sps_.chroma_format_idc, 1); | |
832 // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0. | |
833 const unsigned int crop_unit_x = 2; | |
834 const unsigned int crop_unit_y = 2 * (2 - current_sps_.frame_mbs_only_flag); | |
835 current_sps_.frame_crop_left_offset = 0; | |
836 current_sps_.frame_crop_right_offset = | |
837 (coded_size_.width() - visible_size_.width()) / crop_unit_x; | |
838 current_sps_.frame_crop_top_offset = 0; | |
839 current_sps_.frame_crop_bottom_offset = | |
840 (coded_size_.height() - visible_size_.height()) / crop_unit_y; | |
841 } | |
842 | |
843 current_sps_.vui_parameters_present_flag = true; | |
844 current_sps_.timing_info_present_flag = true; | |
845 current_sps_.num_units_in_tick = 1; | |
846 current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec. | |
847 current_sps_.fixed_frame_rate_flag = true; | |
848 | |
849 current_sps_.nal_hrd_parameters_present_flag = true; | |
850 // H.264 spec ch. E.2.2. | |
851 current_sps_.cpb_cnt_minus1 = 0; | |
852 current_sps_.bit_rate_scale = kBitRateScale; | |
853 current_sps_.cpb_size_scale = kCPBSizeScale; | |
854 current_sps_.bit_rate_value_minus1[0] = | |
855 (bitrate_ >> | |
856 (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - 1; | |
857 current_sps_.cpb_size_value_minus1[0] = | |
858 (cpb_size_ >> | |
859 (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - 1; | |
860 current_sps_.cbr_flag[0] = true; | |
861 current_sps_.initial_cpb_removal_delay_length_minus_1 = | |
862 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; | |
863 current_sps_.cpb_removal_delay_length_minus1 = | |
864 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; | |
865 current_sps_.dpb_output_delay_length_minus1 = | |
866 media::H264SPS::kDefaultDPBOutputDelayLength - 1; | |
867 current_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength; | |
868 current_sps_.low_delay_hrd_flag = false; | |
869 } | |
870 | |
871 void VaapiVideoEncodeAccelerator::GeneratePackedSPS() { | |
872 packed_sps_.Reset(); | |
873 | |
874 packed_sps_.BeginNALU(media::H264NALU::kSPS, 3); | |
875 | |
876 packed_sps_.AppendBits(8, current_sps_.profile_idc); | |
877 packed_sps_.AppendBool(current_sps_.constraint_set0_flag); | |
878 packed_sps_.AppendBool(current_sps_.constraint_set1_flag); | |
879 packed_sps_.AppendBool(current_sps_.constraint_set2_flag); | |
880 packed_sps_.AppendBool(current_sps_.constraint_set3_flag); | |
881 packed_sps_.AppendBool(current_sps_.constraint_set4_flag); | |
882 packed_sps_.AppendBool(current_sps_.constraint_set5_flag); | |
883 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits | |
884 packed_sps_.AppendBits(8, current_sps_.level_idc); | |
885 packed_sps_.AppendUE(current_sps_.seq_parameter_set_id); | |
886 | |
887 if (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) { | |
888 packed_sps_.AppendUE(current_sps_.chroma_format_idc); | |
889 if (current_sps_.chroma_format_idc == 3) | |
890 packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag); | |
891 packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8); | |
892 packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8); | |
893 packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag); | |
894 packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag); | |
895 CHECK(!current_sps_.seq_scaling_matrix_present_flag); | |
896 } | |
897 | |
898 packed_sps_.AppendUE(current_sps_.log2_max_frame_num_minus4); | |
899 packed_sps_.AppendUE(current_sps_.pic_order_cnt_type); | |
900 if (current_sps_.pic_order_cnt_type == 0) | |
901 packed_sps_.AppendUE(current_sps_.log2_max_pic_order_cnt_lsb_minus4); | |
902 else if (current_sps_.pic_order_cnt_type == 1) { | |
903 CHECK(1); | |
904 } | |
905 | |
906 packed_sps_.AppendUE(current_sps_.max_num_ref_frames); | |
907 packed_sps_.AppendBool(current_sps_.gaps_in_frame_num_value_allowed_flag); | |
908 packed_sps_.AppendUE(current_sps_.pic_width_in_mbs_minus1); | |
909 packed_sps_.AppendUE(current_sps_.pic_height_in_map_units_minus1); | |
910 | |
911 packed_sps_.AppendBool(current_sps_.frame_mbs_only_flag); | |
912 if (!current_sps_.frame_mbs_only_flag) | |
913 packed_sps_.AppendBool(current_sps_.mb_adaptive_frame_field_flag); | |
914 | |
915 packed_sps_.AppendBool(current_sps_.direct_8x8_inference_flag); | |
916 | |
917 packed_sps_.AppendBool(current_sps_.frame_cropping_flag); | |
918 if (current_sps_.frame_cropping_flag) { | |
919 packed_sps_.AppendUE(current_sps_.frame_crop_left_offset); | |
920 packed_sps_.AppendUE(current_sps_.frame_crop_right_offset); | |
921 packed_sps_.AppendUE(current_sps_.frame_crop_top_offset); | |
922 packed_sps_.AppendUE(current_sps_.frame_crop_bottom_offset); | |
923 } | |
924 | |
925 packed_sps_.AppendBool(current_sps_.vui_parameters_present_flag); | |
926 if (current_sps_.vui_parameters_present_flag) { | |
927 packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag | |
928 packed_sps_.AppendBool(false); // overscan_info_present_flag | |
929 packed_sps_.AppendBool(false); // video_signal_type_present_flag | |
930 packed_sps_.AppendBool(false); // chroma_loc_info_present_flag | |
931 | |
932 packed_sps_.AppendBool(current_sps_.timing_info_present_flag); | |
933 if (current_sps_.timing_info_present_flag) { | |
934 packed_sps_.AppendBits(32, current_sps_.num_units_in_tick); | |
935 packed_sps_.AppendBits(32, current_sps_.time_scale); | |
936 packed_sps_.AppendBool(current_sps_.fixed_frame_rate_flag); | |
937 } | |
938 | |
939 packed_sps_.AppendBool(current_sps_.nal_hrd_parameters_present_flag); | |
940 if (current_sps_.nal_hrd_parameters_present_flag) { | |
941 packed_sps_.AppendUE(current_sps_.cpb_cnt_minus1); | |
942 packed_sps_.AppendBits(4, current_sps_.bit_rate_scale); | |
943 packed_sps_.AppendBits(4, current_sps_.cpb_size_scale); | |
944 CHECK_LT(base::checked_cast<size_t>(current_sps_.cpb_cnt_minus1), | |
945 arraysize(current_sps_.bit_rate_value_minus1)); | |
946 for (int i = 0; i <= current_sps_.cpb_cnt_minus1; ++i) { | |
947 packed_sps_.AppendUE(current_sps_.bit_rate_value_minus1[i]); | |
948 packed_sps_.AppendUE(current_sps_.cpb_size_value_minus1[i]); | |
949 packed_sps_.AppendBool(current_sps_.cbr_flag[i]); | |
950 } | |
951 packed_sps_.AppendBits( | |
952 5, current_sps_.initial_cpb_removal_delay_length_minus_1); | |
953 packed_sps_.AppendBits(5, current_sps_.cpb_removal_delay_length_minus1); | |
954 packed_sps_.AppendBits(5, current_sps_.dpb_output_delay_length_minus1); | |
955 packed_sps_.AppendBits(5, current_sps_.time_offset_length); | |
956 } | |
957 | |
958 packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag | |
959 if (current_sps_.nal_hrd_parameters_present_flag) | |
960 packed_sps_.AppendBool(current_sps_.low_delay_hrd_flag); | |
961 | |
962 packed_sps_.AppendBool(false); // pic_struct_present_flag | |
963 packed_sps_.AppendBool(true); // bitstream_restriction_flag | |
964 | |
965 packed_sps_.AppendBool(false); // motion_vectors_over_pic_boundaries_flag | |
966 packed_sps_.AppendUE(2); // max_bytes_per_pic_denom | |
967 packed_sps_.AppendUE(1); // max_bits_per_mb_denom | |
968 packed_sps_.AppendUE(16); // log2_max_mv_length_horizontal | |
969 packed_sps_.AppendUE(16); // log2_max_mv_length_vertical | |
970 | |
971 // Explicitly set max_num_reorder_frames to 0 to allow the decoder to | |
972 // output pictures early. | |
973 packed_sps_.AppendUE(0); // max_num_reorder_frames | |
974 | |
975 // The value of max_dec_frame_buffering shall be greater than or equal to | |
976 // max_num_ref_frames. | |
977 const unsigned int max_dec_frame_buffering = | |
978 current_sps_.max_num_ref_frames; | |
979 packed_sps_.AppendUE(max_dec_frame_buffering); | |
980 } | |
981 | |
982 packed_sps_.FinishNALU(); | |
983 } | |
984 | |
985 void VaapiVideoEncodeAccelerator::UpdatePPS() { | |
986 memset(¤t_pps_, 0, sizeof(media::H264PPS)); | |
987 | |
988 current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id; | |
989 current_pps_.pic_parameter_set_id = 0; | |
990 | |
991 current_pps_.entropy_coding_mode_flag = | |
992 current_sps_.profile_idc >= media::H264SPS::kProfileIDCMain; | |
993 | |
994 CHECK_GT(max_ref_idx_l0_size_, 0u); | |
995 current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1; | |
996 current_pps_.num_ref_idx_l1_default_active_minus1 = 0; | |
997 DCHECK_LE(qp_, 51u); | |
998 current_pps_.pic_init_qp_minus26 = qp_ - 26; | |
999 current_pps_.deblocking_filter_control_present_flag = true; | |
1000 current_pps_.transform_8x8_mode_flag = | |
1001 (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh); | |
1002 } | |
1003 | |
1004 void VaapiVideoEncodeAccelerator::GeneratePackedPPS() { | |
1005 packed_pps_.Reset(); | |
1006 | |
1007 packed_pps_.BeginNALU(media::H264NALU::kPPS, 3); | |
1008 | |
1009 packed_pps_.AppendUE(current_pps_.pic_parameter_set_id); | |
1010 packed_pps_.AppendUE(current_pps_.seq_parameter_set_id); | |
1011 packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag); | |
1012 packed_pps_.AppendBool( | |
1013 current_pps_.bottom_field_pic_order_in_frame_present_flag); | |
1014 CHECK_EQ(current_pps_.num_slice_groups_minus1, 0); | |
1015 packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1); | |
1016 | |
1017 packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1); | |
1018 packed_pps_.AppendUE(current_pps_.num_ref_idx_l1_default_active_minus1); | |
1019 | |
1020 packed_pps_.AppendBool(current_pps_.weighted_pred_flag); | |
1021 packed_pps_.AppendBits(2, current_pps_.weighted_bipred_idc); | |
1022 | |
1023 packed_pps_.AppendSE(current_pps_.pic_init_qp_minus26); | |
1024 packed_pps_.AppendSE(current_pps_.pic_init_qs_minus26); | |
1025 packed_pps_.AppendSE(current_pps_.chroma_qp_index_offset); | |
1026 | |
1027 packed_pps_.AppendBool(current_pps_.deblocking_filter_control_present_flag); | |
1028 packed_pps_.AppendBool(current_pps_.constrained_intra_pred_flag); | |
1029 packed_pps_.AppendBool(current_pps_.redundant_pic_cnt_present_flag); | |
1030 | |
1031 packed_pps_.AppendBool(current_pps_.transform_8x8_mode_flag); | |
1032 packed_pps_.AppendBool(current_pps_.pic_scaling_matrix_present_flag); | |
1033 DCHECK(!current_pps_.pic_scaling_matrix_present_flag); | |
1034 packed_pps_.AppendSE(current_pps_.second_chroma_qp_index_offset); | |
1035 | |
1036 packed_pps_.FinishNALU(); | |
1037 } | |
1038 | |
1039 void VaapiVideoEncodeAccelerator::SetState(State state) { | |
1040 // Only touch state on encoder thread, unless it's not running. | |
1041 if (encoder_thread_.IsRunning() && | |
1042 !encoder_thread_task_runner_->BelongsToCurrentThread()) { | |
1043 encoder_thread_task_runner_->PostTask( | |
1044 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::SetState, | |
1045 base::Unretained(this), state)); | |
1046 return; | |
1047 } | |
1048 | |
1049 DVLOGF(1) << "setting state to: " << state; | |
1050 state_ = state; | |
1051 } | |
1052 | |
1053 void VaapiVideoEncodeAccelerator::NotifyError(Error error) { | |
1054 if (!child_task_runner_->BelongsToCurrentThread()) { | |
1055 child_task_runner_->PostTask( | |
1056 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::NotifyError, | |
1057 weak_this_, error)); | |
1058 return; | |
1059 } | |
1060 | |
1061 if (client_) { | |
1062 client_->NotifyError(error); | |
1063 client_ptr_factory_.reset(); | |
1064 } | |
1065 } | |
1066 | |
1067 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob() | |
1068 : coded_buffer(VA_INVALID_ID), keyframe(false) { | |
1069 } | |
1070 | |
1071 VaapiVideoEncodeAccelerator::EncodeJob::~EncodeJob() { | |
1072 } | |
1073 | |
1074 } // namespace content | |
OLD | NEW |