OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2014 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/callback.h" | |
9 #include "base/command_line.h" | |
10 #include "base/message_loop/message_loop_proxy.h" | |
11 #include "base/metrics/histogram.h" | |
12 #include "base/numerics/safe_conversions.h" | |
13 #include "content/common/gpu/media/h264_dpb.h" | |
14 #include "content/public/common/content_switches.h" | |
15 #include "media/base/bind_to_current_loop.h" | |
16 #include "third_party/libva/va/va_enc_h264.h" | |
17 | |
18 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): " | |
19 | |
20 #define NOTIFY_ERROR(error, msg) \ | |
21 do { \ | |
22 SetState(kError); \ | |
23 DVLOGF(1) << msg; \ | |
24 DVLOGF(1) << "Calling NotifyError(" << error << ")"; \ | |
25 NotifyError(error); \ | |
26 } while (0) | |
27 | |
28 namespace content { | |
29 | |
30 static inline size_t RoundUpToPowerOf2(size_t value, size_t alignment) { | |
xhwang
2014/06/20 07:34:39
Add a comment? It's not obvious what it's doing...
Pawel Osciak
2014/06/20 10:53:15
Done.
| |
31 // Check that |alignment| is a power of 2. | |
32 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); | |
33 return ((value + (alignment - 1)) & ~(alignment - 1)); | |
34 } | |
35 | |
36 static void ReportToUMA( | |
37 VaapiVideoEncodeAccelerator::VAVEAEncoderFailure failure) { | |
38 UMA_HISTOGRAM_ENUMERATION( | |
39 "Media.VAVEA.EncoderFailure", | |
40 failure, | |
41 VaapiVideoEncodeAccelerator::VAVEA_ENCODER_FAILURES_MAX); | |
42 } | |
43 | |
44 struct VaapiVideoEncodeAccelerator::InputFrameRef { | |
45 InputFrameRef(const scoped_refptr<media::VideoFrame>& frame, | |
46 bool force_keyframe) | |
47 : frame(frame), force_keyframe(force_keyframe) {} | |
48 const scoped_refptr<media::VideoFrame> frame; | |
49 const bool force_keyframe; | |
50 }; | |
51 | |
52 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef { | |
xhwang
2014/06/20 07:34:39
This takes the ownership of |shm|, why it's called
Pawel Osciak
2014/06/20 10:53:15
Mostly historical reasons, we use a similar struct
| |
53 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size) | |
54 : id(id), shm(shm.Pass()), size(size) {} | |
55 const int32 id; | |
56 const scoped_ptr<base::SharedMemory> shm; | |
57 const size_t size; | |
58 }; | |
59 | |
60 // static | |
61 std::vector<media::VideoEncodeAccelerator::SupportedProfile> | |
62 VaapiVideoEncodeAccelerator::GetSupportedProfiles() { | |
63 std::vector<SupportedProfile> profiles; | |
64 | |
65 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | |
66 if (!cmd_line->HasSwitch(switches::kEnableVaapiAcceleratedVideoEncode)) | |
67 return profiles; | |
68 | |
69 SupportedProfile profile; | |
70 profile.profile = media::H264PROFILE_MAIN; | |
71 profile.max_resolution.SetSize(1920, 1088); | |
72 profile.max_framerate.numerator = kDefaultFramerate; | |
73 profile.max_framerate.denominator = 1; | |
74 profiles.push_back(profile); | |
75 | |
76 // This is actually only constrained (see crbug.com/345569). | |
77 profile.profile = media::H264PROFILE_BASELINE; | |
78 profiles.push_back(profile); | |
79 | |
80 profile.profile = media::H264PROFILE_HIGH; | |
81 profiles.push_back(profile); | |
82 | |
83 return profiles; | |
84 } | |
85 | |
86 static unsigned int Log2OfPowerOf2(unsigned int x) { | |
87 CHECK_GT(x, 0); | |
88 DCHECK_EQ(x & (x - 1), 0); | |
89 | |
90 int log = 0; | |
91 while (x) { | |
92 x >>= 1; | |
93 ++log; | |
94 } | |
95 return log; | |
96 } | |
97 | |
98 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator(Display* x_display) | |
99 : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN), | |
100 mb_width_(0), | |
101 mb_height_(0), | |
102 output_buffer_byte_size_(0), | |
103 x_display_(x_display), | |
104 state_(kUninitialized), | |
105 frame_num_(0), | |
106 last_idr_frame_num_(0), | |
107 bitrate_(0), | |
108 framerate_(0), | |
109 cpb_size_(0), | |
110 encoding_parameters_changed_(false), | |
111 encoder_thread_("VAVEAEncoderThread"), | |
112 child_message_loop_proxy_(base::MessageLoopProxy::current()), | |
113 weak_this_ptr_factory_(this) { | |
114 DVLOGF(4); | |
115 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); | |
116 | |
117 max_ref_idx_l0_size_ = kMaxNumReferenceFrames; | |
118 qp_ = kDefaultQP; | |
119 idr_period_ = kIDRPeriod; | |
120 i_period_ = kIPeriod; | |
121 ip_period_ = kIPPeriod; | |
122 } | |
123 | |
124 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() { | |
125 DVLOGF(4); | |
126 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
127 DCHECK(!encoder_thread_.IsRunning()); | |
128 } | |
129 | |
130 bool VaapiVideoEncodeAccelerator::Initialize( | |
131 media::VideoFrame::Format format, | |
132 const gfx::Size& input_visible_size, | |
133 media::VideoCodecProfile output_profile, | |
134 uint32 initial_bitrate, | |
135 Client* client) { | |
136 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
137 DCHECK(!encoder_thread_.IsRunning()); | |
138 DCHECK_EQ(state_, kUninitialized); | |
139 | |
140 DVLOGF(1) << "Initializing VAVEA, input_format: " | |
141 << media::VideoFrame::FormatToString(format) | |
142 << ", input_visible_size: " << input_visible_size.ToString() | |
143 << ", output_profile: " << output_profile | |
144 << ", initial_bitrate: " << initial_bitrate; | |
145 | |
146 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | |
147 client_ = client_ptr_factory_->GetWeakPtr(); | |
148 | |
149 if (output_profile < media::H264PROFILE_BASELINE || | |
150 output_profile > media::H264PROFILE_MAIN) { | |
151 DVLOGF(1) << "Unsupported output profile: " << output_profile; | |
152 return false; | |
153 } | |
154 | |
155 if (format != media::VideoFrame::I420) { | |
156 DVLOGF(1) << "Unsupported input format: " | |
157 << media::VideoFrame::FormatToString(format); | |
158 return false; | |
159 } | |
160 | |
161 profile_ = output_profile; | |
162 visible_size_ = input_visible_size; | |
163 // 4:2:0 format has to be 2-aligned. | |
164 DCHECK_EQ(visible_size_.width() % 2, 0); | |
165 DCHECK_EQ(visible_size_.height() % 2, 0); | |
166 coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16), | |
167 RoundUpToPowerOf2(visible_size_.height(), 16)); | |
168 mb_width_ = coded_size_.width() / 16; | |
169 mb_height_ = coded_size_.height() / 16; | |
170 output_buffer_byte_size_ = coded_size_.GetArea(); | |
171 | |
172 UpdateRates(initial_bitrate, kDefaultFramerate); | |
173 | |
174 vaapi_wrapper_ = VaapiWrapper::Create(VaapiWrapper::kEncode, | |
175 output_profile, | |
176 x_display_, | |
177 base::Bind(&ReportToUMA, VAAPI_ERROR)); | |
178 if (!vaapi_wrapper_) { | |
179 DVLOGF(1) << "Failed initializing VAAPI"; | |
180 return false; | |
181 } | |
182 | |
183 if (!encoder_thread_.Start()) { | |
184 DVLOGF(1) << "Failed to start encoder thread"; | |
185 return false; | |
186 } | |
187 encoder_thread_proxy_ = encoder_thread_.message_loop_proxy(); | |
188 | |
189 // Finish the remaining initialization on the encoder thread. | |
190 encoder_thread_proxy_->PostTask( | |
191 FROM_HERE, | |
192 base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask, | |
193 base::Unretained(this))); | |
xhwang
2014/06/20 07:34:39
Here and below. You already have |weak_this_|, why
Pawel Osciak
2014/06/20 10:53:15
|weak_this_| is for use from ChildThread. This is
| |
194 | |
195 return true; | |
196 } | |
197 | |
198 void VaapiVideoEncodeAccelerator::InitializeTask() { | |
199 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
200 DCHECK_EQ(state_, kUninitialized); | |
201 DVLOGF(4); | |
202 | |
203 va_surface_release_cb_ = media::BindToCurrentLoop( | |
204 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID, | |
205 base::Unretained(this))); | |
206 | |
207 if (!vaapi_wrapper_->CreateSurfaces( | |
208 coded_size_, kNumSurfaces, &available_va_surface_ids_)) { | |
209 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces"); | |
210 return; | |
211 } | |
212 | |
213 UpdateSPS(); | |
214 GeneratePackedSPS(); | |
215 | |
216 UpdatePPS(); | |
217 GeneratePackedPPS(); | |
218 | |
219 child_message_loop_proxy_->PostTask( | |
220 FROM_HERE, | |
221 base::Bind(&Client::RequireBitstreamBuffers, | |
222 client_, | |
223 kNumInputBuffers, | |
224 coded_size_, | |
225 output_buffer_byte_size_)); | |
226 | |
227 SetState(kEncoding); | |
228 } | |
229 | |
230 void VaapiVideoEncodeAccelerator::RecycleVASurfaceID( | |
231 VASurfaceID va_surface_id) { | |
232 DVLOGF(4) << "va_surface_id: " << va_surface_id; | |
233 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
234 | |
235 available_va_surface_ids_.push_back(va_surface_id); | |
236 EncodeFrameTask(); | |
237 } | |
238 | |
239 void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) { | |
240 memset(¤t_pic_, 0, sizeof(current_pic_)); | |
241 | |
242 current_pic_.frame_num = frame_num_++; | |
243 frame_num_ %= idr_period_; | |
244 | |
245 if (current_pic_.frame_num % i_period_ == 0 || force_keyframe) | |
246 current_pic_.type = media::H264SliceHeader::kISlice; | |
247 else | |
248 current_pic_.type = media::H264SliceHeader::kPSlice; | |
249 | |
250 if (current_pic_.frame_num % idr_period_ == 0) { | |
251 current_pic_.idr = true; | |
252 last_idr_frame_num_ = current_pic_.frame_num; | |
253 ref_pic_list0_.clear(); | |
254 } | |
255 | |
256 if (current_pic_.type != media::H264SliceHeader::kBSlice) | |
257 current_pic_.ref = true; | |
258 | |
259 current_pic_.pic_order_cnt = current_pic_.frame_num * 2; | |
260 current_pic_.top_field_order_cnt = current_pic_.pic_order_cnt; | |
261 current_pic_.pic_order_cnt_lsb = current_pic_.pic_order_cnt; | |
262 | |
263 current_encode_job_->keyframe = | |
264 (current_pic_.type == media::H264SliceHeader::kISlice); | |
265 | |
266 DVLOGF(4) << "Starting a new frame, type: " << current_pic_.type | |
267 << (force_keyframe ? " (forced keyframe)" : "") | |
268 << " frame_num: " << current_pic_.frame_num | |
269 << " POC: " << current_pic_.pic_order_cnt; | |
270 } | |
271 | |
272 void VaapiVideoEncodeAccelerator::EndFrame() { | |
273 // Store the picture on the list of reference pictures and keep the list | |
274 // below maximum size, dropping oldest references. | |
275 if (current_pic_.ref) | |
276 ref_pic_list0_.push_front(current_encode_job_->recon_surface); | |
277 size_t max_num_ref_frames = | |
278 base::checked_cast<size_t>(current_sps_.max_num_ref_frames); | |
279 while (ref_pic_list0_.size() > max_num_ref_frames) | |
280 ref_pic_list0_.pop_back(); | |
281 | |
282 submitted_encode_jobs_.push(make_linked_ptr(current_encode_job_.release())); | |
283 } | |
284 | |
285 static void InitVAPicture(VAPictureH264* va_pic) { | |
286 memset(va_pic, 0, sizeof(*va_pic)); | |
287 va_pic->picture_id = VA_INVALID_ID; | |
288 va_pic->flags = VA_PICTURE_H264_INVALID; | |
289 } | |
290 | |
291 bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() { | |
292 VAEncSequenceParameterBufferH264 seq_param; | |
293 memset(&seq_param, 0, sizeof(seq_param)); | |
294 | |
295 #define SPS_TO_SP(a) seq_param.a = current_sps_.a; | |
296 SPS_TO_SP(seq_parameter_set_id); | |
297 SPS_TO_SP(level_idc); | |
298 | |
299 seq_param.intra_period = i_period_; | |
300 seq_param.intra_idr_period = idr_period_; | |
301 seq_param.ip_period = ip_period_; | |
302 seq_param.bits_per_second = bitrate_; | |
303 | |
304 SPS_TO_SP(max_num_ref_frames); | |
305 seq_param.picture_width_in_mbs = mb_width_; | |
306 seq_param.picture_height_in_mbs = mb_height_; | |
307 | |
308 #define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = current_sps_.a; | |
309 SPS_TO_SP_FS(chroma_format_idc); | |
310 SPS_TO_SP_FS(frame_mbs_only_flag); | |
311 SPS_TO_SP_FS(log2_max_frame_num_minus4); | |
312 SPS_TO_SP_FS(pic_order_cnt_type); | |
313 SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4); | |
314 #undef SPS_TO_SP_FS | |
315 | |
316 SPS_TO_SP(bit_depth_luma_minus8); | |
317 SPS_TO_SP(bit_depth_chroma_minus8); | |
318 | |
319 SPS_TO_SP(frame_cropping_flag); | |
320 if (current_sps_.frame_cropping_flag) { | |
321 SPS_TO_SP(frame_crop_left_offset); | |
322 SPS_TO_SP(frame_crop_right_offset); | |
323 SPS_TO_SP(frame_crop_top_offset); | |
324 SPS_TO_SP(frame_crop_bottom_offset); | |
325 } | |
326 | |
327 SPS_TO_SP(vui_parameters_present_flag); | |
328 #define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = current_sps_.a; | |
329 SPS_TO_SP_VF(timing_info_present_flag); | |
330 #undef SPS_TO_SP_VF | |
331 SPS_TO_SP(num_units_in_tick); | |
332 SPS_TO_SP(time_scale); | |
333 #undef SPS_TO_SP | |
334 | |
335 if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType, | |
336 sizeof(seq_param), | |
337 &seq_param)) | |
338 return false; | |
339 | |
340 VAEncPictureParameterBufferH264 pic_param; | |
341 memset(&pic_param, 0, sizeof(pic_param)); | |
342 | |
343 pic_param.CurrPic.picture_id = current_encode_job_->recon_surface->id(); | |
344 pic_param.CurrPic.TopFieldOrderCnt = current_pic_.top_field_order_cnt; | |
345 pic_param.CurrPic.BottomFieldOrderCnt = current_pic_.bottom_field_order_cnt; | |
346 pic_param.CurrPic.flags = 0; | |
347 | |
348 for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i) | |
349 InitVAPicture(&pic_param.ReferenceFrames[i]); | |
350 | |
351 DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames)); | |
352 RefPicList::const_iterator iter = ref_pic_list0_.begin(); | |
353 for (size_t i = 0; | |
354 i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end(); | |
355 ++iter, ++i) { | |
356 pic_param.ReferenceFrames[i].picture_id = (*iter)->id(); | |
357 pic_param.ReferenceFrames[i].flags = 0; | |
358 } | |
359 | |
360 pic_param.coded_buf = current_encode_job_->coded_buffer; | |
361 pic_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id; | |
362 pic_param.seq_parameter_set_id = current_pps_.seq_parameter_set_id; | |
363 pic_param.frame_num = current_pic_.frame_num; | |
364 pic_param.pic_init_qp = qp_; | |
365 pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1; | |
366 pic_param.pic_fields.bits.idr_pic_flag = current_pic_.idr; | |
367 pic_param.pic_fields.bits.reference_pic_flag = current_pic_.ref; | |
368 #define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = current_pps_.a; | |
369 PPS_TO_PP_PF(entropy_coding_mode_flag); | |
370 PPS_TO_PP_PF(transform_8x8_mode_flag); | |
371 PPS_TO_PP_PF(deblocking_filter_control_present_flag); | |
372 #undef PPS_TO_PP_PF | |
373 | |
374 if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType, | |
375 sizeof(pic_param), | |
376 &pic_param)) | |
377 return false; | |
378 | |
379 VAEncSliceParameterBufferH264 slice_param; | |
380 memset(&slice_param, 0, sizeof(slice_param)); | |
381 | |
382 slice_param.num_macroblocks = mb_width_ * mb_height_; | |
383 slice_param.macroblock_info = VA_INVALID_ID; | |
384 slice_param.slice_type = current_pic_.type; | |
385 slice_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id; | |
386 slice_param.idr_pic_id = last_idr_frame_num_; | |
387 slice_param.pic_order_cnt_lsb = current_pic_.pic_order_cnt_lsb; | |
388 slice_param.num_ref_idx_active_override_flag = true; | |
389 | |
390 for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i) | |
391 InitVAPicture(&slice_param.RefPicList0[i]); | |
392 | |
393 for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i) | |
394 InitVAPicture(&slice_param.RefPicList1[i]); | |
395 | |
396 DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0)); | |
397 iter = ref_pic_list0_.begin(); | |
398 for (size_t i = 0; | |
399 i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end(); | |
400 ++iter, ++i) { | |
401 InitVAPicture(&slice_param.RefPicList0[i]); | |
402 slice_param.RefPicList0[i].picture_id = (*iter)->id(); | |
403 slice_param.RefPicList0[i].flags = 0; | |
404 } | |
405 | |
406 if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType, | |
407 sizeof(slice_param), | |
408 &slice_param)) | |
409 return false; | |
410 | |
411 VAEncMiscParameterRateControl rate_control_param; | |
412 memset(&rate_control_param, 0, sizeof(rate_control_param)); | |
413 rate_control_param.bits_per_second = bitrate_; | |
414 rate_control_param.target_percentage = 90; | |
415 rate_control_param.window_size = kCPBWindowSizeMs; | |
416 rate_control_param.initial_qp = qp_; | |
417 rate_control_param.rc_flags.bits.disable_frame_skip = true; | |
418 | |
419 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( | |
420 VAEncMiscParameterTypeRateControl, | |
421 sizeof(rate_control_param), | |
422 &rate_control_param)) | |
423 return false; | |
424 | |
425 VAEncMiscParameterFrameRate framerate_param; | |
426 memset(&framerate_param, 0, sizeof(framerate_param)); | |
427 framerate_param.framerate = framerate_; | |
428 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer( | |
429 VAEncMiscParameterTypeFrameRate, | |
430 sizeof(framerate_param), | |
431 &framerate_param)) | |
432 return false; | |
433 | |
434 VAEncMiscParameterHRD hrd_param; | |
435 memset(&hrd_param, 0, sizeof(hrd_param)); | |
436 hrd_param.buffer_size = cpb_size_; | |
437 hrd_param.initial_buffer_fullness = cpb_size_ / 2; | |
438 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(VAEncMiscParameterTypeHRD, | |
439 sizeof(hrd_param), | |
440 &hrd_param)) | |
441 return false; | |
442 | |
443 return true; | |
444 } | |
445 | |
446 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() { | |
447 if (current_pic_.type != media::H264SliceHeader::kISlice) | |
448 return true; | |
449 | |
450 // Submit PPS. | |
451 VAEncPackedHeaderParameterBuffer par_buffer; | |
452 memset(&par_buffer, 0, sizeof(par_buffer)); | |
453 par_buffer.type = VAEncPackedHeaderSequence; | |
454 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8; | |
455 | |
456 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, | |
457 sizeof(par_buffer), | |
458 &par_buffer)) | |
459 return false; | |
460 | |
461 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, | |
462 packed_sps_.BytesInBuffer(), | |
463 packed_sps_.data())) | |
464 return false; | |
465 | |
466 // Submit PPS. | |
467 memset(&par_buffer, 0, sizeof(par_buffer)); | |
468 par_buffer.type = VAEncPackedHeaderPicture; | |
469 par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8; | |
470 | |
471 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType, | |
472 sizeof(par_buffer), | |
473 &par_buffer)) | |
474 return false; | |
475 | |
476 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType, | |
477 packed_pps_.BytesInBuffer(), | |
478 packed_pps_.data())) | |
479 return false; | |
480 | |
481 return true; | |
482 } | |
483 | |
484 bool VaapiVideoEncodeAccelerator::ExecuteEncode() { | |
485 DVLOGF(3) << "Encoding frame_num: " << current_pic_.frame_num; | |
486 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers( | |
487 current_encode_job_->input_surface->id()); | |
488 } | |
489 | |
490 bool VaapiVideoEncodeAccelerator::UploadFrame( | |
491 const scoped_refptr<media::VideoFrame>& frame) { | |
492 return vaapi_wrapper_->UploadVideoFrameToSurface( | |
493 frame, current_encode_job_->input_surface->id()); | |
494 } | |
495 | |
496 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() { | |
497 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
498 | |
499 if (state_ != kEncoding) | |
500 return; | |
501 | |
502 if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty()) | |
503 return; | |
504 | |
505 linked_ptr<BitstreamBufferRef> buffer = available_bitstream_buffers_.front(); | |
506 available_bitstream_buffers_.pop(); | |
507 | |
508 uint8* target_data = reinterpret_cast<uint8*>(buffer->shm->memory()); | |
509 | |
510 linked_ptr<EncodeJob> encode_job = submitted_encode_jobs_.front(); | |
511 submitted_encode_jobs_.pop(); | |
512 | |
513 size_t data_size = 0; | |
514 if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer( | |
515 encode_job->coded_buffer, | |
516 encode_job->input_surface->id(), | |
517 target_data, | |
518 buffer->size, | |
519 &data_size)) { | |
520 NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer"); | |
521 return; | |
522 } | |
523 | |
524 DVLOGF(3) << "Returning bitstream buffer " | |
525 << (encode_job->keyframe ? "(keyframe)" : "") | |
526 << " id: " << buffer->id << " size: " << data_size; | |
527 | |
528 child_message_loop_proxy_->PostTask(FROM_HERE, | |
529 base::Bind(&Client::BitstreamBufferReady, | |
530 client_, | |
531 buffer->id, | |
532 data_size, | |
533 encode_job->keyframe)); | |
534 } | |
535 | |
536 void VaapiVideoEncodeAccelerator::Encode( | |
537 const scoped_refptr<media::VideoFrame>& frame, | |
538 bool force_keyframe) { | |
539 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds() | |
540 << " force_keyframe: " << force_keyframe; | |
541 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
542 | |
543 encoder_thread_proxy_->PostTask( | |
544 FROM_HERE, | |
545 base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask, | |
546 base::Unretained(this), | |
547 frame, | |
548 force_keyframe)); | |
549 } | |
550 | |
551 bool VaapiVideoEncodeAccelerator::PrepareNextJob() { | |
552 if (available_va_surface_ids_.size() < kMinSurfacesToEncode) | |
553 return false; | |
554 | |
555 DCHECK(!current_encode_job_); | |
556 current_encode_job_.reset(new EncodeJob()); | |
557 | |
558 if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_, | |
559 ¤t_encode_job_->coded_buffer)) { | |
560 NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer"); | |
561 return false; | |
562 } | |
563 | |
564 current_encode_job_->input_surface = | |
565 new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); | |
566 available_va_surface_ids_.pop_back(); | |
567 | |
568 current_encode_job_->recon_surface = | |
569 new VASurface(available_va_surface_ids_.back(), va_surface_release_cb_); | |
570 available_va_surface_ids_.pop_back(); | |
571 | |
572 // Reference surfaces are needed until the job is done, but they get | |
573 // removed from ref_pic_list0_ when it's full at the end of job submission. | |
574 // Keep refs to them along with the job and only release after sync. | |
575 current_encode_job_->reference_surfaces = ref_pic_list0_; | |
576 | |
577 return true; | |
578 } | |
579 | |
580 void VaapiVideoEncodeAccelerator::EncodeTask( | |
581 const scoped_refptr<media::VideoFrame>& frame, | |
582 bool force_keyframe) { | |
583 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
584 DCHECK_NE(state_, kUninitialized); | |
585 | |
586 encoder_input_queue_.push( | |
587 make_linked_ptr(new InputFrameRef(frame, force_keyframe))); | |
588 EncodeFrameTask(); | |
589 } | |
590 | |
591 void VaapiVideoEncodeAccelerator::EncodeFrameTask() { | |
592 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
593 | |
594 if (state_ != kEncoding || encoder_input_queue_.empty()) | |
595 return; | |
596 | |
597 if (!PrepareNextJob()) { | |
598 DVLOGF(4) << "Not ready for next frame yet"; | |
599 return; | |
600 } | |
601 | |
602 linked_ptr<InputFrameRef> frame_ref = encoder_input_queue_.front(); | |
603 encoder_input_queue_.pop(); | |
604 | |
605 if (!UploadFrame(frame_ref->frame)) { | |
606 NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW."); | |
607 return; | |
608 } | |
609 | |
610 BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_); | |
611 encoding_parameters_changed_ = false; | |
612 | |
613 if (!SubmitFrameParameters()) { | |
614 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters."); | |
615 return; | |
616 } | |
617 | |
618 if (!SubmitHeadersIfNeeded()) { | |
619 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers."); | |
620 return; | |
621 } | |
622 | |
623 if (!ExecuteEncode()) { | |
624 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW."); | |
625 return; | |
626 } | |
627 | |
628 EndFrame(); | |
629 TryToReturnBitstreamBuffer(); | |
630 } | |
631 | |
632 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
633 const media::BitstreamBuffer& buffer) { | |
634 DVLOGF(4) << "id: " << buffer.id(); | |
635 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
636 | |
637 if (buffer.size() < output_buffer_byte_size_) { | |
638 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small"); | |
639 return; | |
640 } | |
641 | |
642 scoped_ptr<base::SharedMemory> shm( | |
643 new base::SharedMemory(buffer.handle(), false)); | |
644 if (!shm->Map(buffer.size())) { | |
645 NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory."); | |
646 return; | |
647 } | |
648 | |
649 scoped_ptr<BitstreamBufferRef> buffer_ref( | |
650 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size())); | |
651 | |
652 encoder_thread_proxy_->PostTask( | |
653 FROM_HERE, | |
654 base::Bind(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask, | |
655 base::Unretained(this), | |
656 base::Passed(&buffer_ref))); | |
657 } | |
658 | |
659 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask( | |
660 scoped_ptr<BitstreamBufferRef> buffer_ref) { | |
661 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
662 DCHECK_NE(state_, kUninitialized); | |
663 | |
664 available_bitstream_buffers_.push(make_linked_ptr(buffer_ref.release())); | |
665 TryToReturnBitstreamBuffer(); | |
666 } | |
667 | |
668 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange( | |
669 uint32 bitrate, | |
670 uint32 framerate) { | |
671 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; | |
672 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
673 | |
674 encoder_thread_proxy_->PostTask( | |
675 FROM_HERE, | |
676 base::Bind( | |
677 &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask, | |
678 base::Unretained(this), | |
679 bitrate, | |
680 framerate)); | |
681 } | |
682 | |
683 void VaapiVideoEncodeAccelerator::UpdateRates(uint32 bitrate, | |
684 uint32 framerate) { | |
685 if (encoder_thread_.IsRunning()) | |
686 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
687 DCHECK_NE(bitrate, 0); | |
688 DCHECK_NE(framerate, 0); | |
689 bitrate_ = base::checked_cast<unsigned int>(bitrate); | |
690 framerate_ = base::checked_cast<unsigned int>(framerate); | |
691 cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000; | |
692 } | |
693 | |
694 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask( | |
695 uint32 bitrate, | |
696 uint32 framerate) { | |
697 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate; | |
698 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
699 DCHECK_NE(state_, kUninitialized); | |
700 | |
701 UpdateRates(bitrate, framerate); | |
702 | |
703 UpdateSPS(); | |
704 GeneratePackedSPS(); | |
705 | |
706 // Submit new parameters along with next frame that will be processed. | |
707 encoding_parameters_changed_ = true; | |
708 } | |
709 | |
710 void VaapiVideoEncodeAccelerator::Destroy() { | |
711 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); | |
712 | |
713 // Can't call client anymore after Destroy() returns. | |
714 client_ptr_factory_.reset(); | |
715 weak_this_ptr_factory_.InvalidateWeakPtrs(); | |
716 | |
717 // Early-exit encoder tasks if they are running and join the thread. | |
718 if (encoder_thread_.IsRunning()) { | |
719 encoder_thread_.message_loop()->PostTask( | |
720 FROM_HERE, | |
721 base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask, | |
722 base::Unretained(this))); | |
723 encoder_thread_.Stop(); | |
724 } | |
725 | |
726 delete this; | |
727 } | |
728 | |
729 void VaapiVideoEncodeAccelerator::DestroyTask() { | |
730 DVLOGF(2); | |
731 DCHECK(encoder_thread_proxy_->BelongsToCurrentThread()); | |
732 SetState(kError); | |
733 } | |
734 | |
735 void VaapiVideoEncodeAccelerator::UpdateSPS() { | |
736 memset(¤t_sps_, 0, sizeof(media::H264SPS)); | |
737 | |
738 // Spec A.2 and A.3. | |
739 switch (profile_) { | |
740 case media::H264PROFILE_BASELINE: | |
741 // Due to crbug.com/345569, we don't distinguish between constrained | |
742 // and non-constrained baseline profiles. Since many codecs can't do | |
743 // non-constrained, and constrained is usually what we mean (and it's a | |
744 // subset of non-constrained), default to it. | |
745 current_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline; | |
746 current_sps_.constraint_set0_flag = true; | |
747 break; | |
748 case media::H264PROFILE_MAIN: | |
749 current_sps_.profile_idc = media::H264SPS::kProfileIDCMain; | |
750 current_sps_.constraint_set1_flag = true; | |
751 break; | |
752 case media::H264PROFILE_HIGH: | |
753 current_sps_.profile_idc = media::H264SPS::kProfileIDCHigh; | |
754 break; | |
755 default: | |
756 NOTIMPLEMENTED(); | |
757 return; | |
758 } | |
759 | |
760 current_sps_.level_idc = kDefaultLevelIDC; | |
761 current_sps_.seq_parameter_set_id = 0; | |
762 current_sps_.chroma_format_idc = kChromaFormatIDC; | |
763 | |
764 DCHECK_GE(idr_period_, 1 << 4); | |
765 current_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4; | |
766 current_sps_.pic_order_cnt_type = 0; | |
767 current_sps_.log2_max_pic_order_cnt_lsb_minus4 = | |
768 Log2OfPowerOf2(idr_period_ * 2) - 4; | |
769 current_sps_.max_num_ref_frames = max_ref_idx_l0_size_; | |
770 | |
771 current_sps_.frame_mbs_only_flag = true; | |
772 | |
773 DCHECK_GT(mb_width_, 0); | |
774 DCHECK_GT(mb_height_, 0); | |
775 current_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1; | |
776 DCHECK(current_sps_.frame_mbs_only_flag); | |
777 current_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1; | |
778 | |
779 if (visible_size_ != coded_size_) { | |
780 // Visible size differs from coded size, fill crop information. | |
781 current_sps_.frame_cropping_flag = true; | |
782 DCHECK(!current_sps_.separate_colour_plane_flag); | |
783 // Spec table 6-1. Only 4:2:0 for now. | |
784 DCHECK_EQ(current_sps_.chroma_format_idc, 1); | |
785 // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0. | |
786 const unsigned int crop_unit_x = 2; | |
787 const unsigned int crop_unit_y = 2 * (2 - current_sps_.frame_mbs_only_flag); | |
788 current_sps_.frame_crop_left_offset = 0; | |
789 current_sps_.frame_crop_right_offset = | |
790 (coded_size_.width() - visible_size_.width()) / crop_unit_x; | |
791 current_sps_.frame_crop_top_offset = 0; | |
792 current_sps_.frame_crop_bottom_offset = | |
793 (coded_size_.height() - visible_size_.height()) / crop_unit_y; | |
794 } | |
795 | |
796 current_sps_.vui_parameters_present_flag = true; | |
797 current_sps_.timing_info_present_flag = true; | |
798 current_sps_.num_units_in_tick = 1; | |
799 current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec. | |
800 current_sps_.fixed_frame_rate_flag = true; | |
801 | |
802 current_sps_.nal_hrd_parameters_present_flag = true; | |
803 // H.264 spec ch. E.2.2. | |
804 current_sps_.cpb_cnt_minus1 = 0; | |
805 current_sps_.bit_rate_scale = kBitRateScale; | |
806 current_sps_.cpb_size_scale = kCPBSizeScale; | |
807 current_sps_.bit_rate_value_minus1[0] = | |
808 (bitrate_ >> | |
809 (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - 1; | |
810 current_sps_.cpb_size_value_minus1[0] = | |
811 (cpb_size_ >> | |
812 (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - 1; | |
813 current_sps_.cbr_flag[0] = true; | |
814 current_sps_.initial_cpb_removal_delay_length_minus_1 = | |
815 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; | |
816 current_sps_.cpb_removal_delay_length_minus1 = | |
817 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1; | |
818 current_sps_.dpb_output_delay_length_minus1 = | |
819 media::H264SPS::kDefaultDPBOutputDelayLength - 1; | |
820 current_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength; | |
821 current_sps_.low_delay_hrd_flag = false; | |
822 } | |
823 | |
824 void VaapiVideoEncodeAccelerator::GeneratePackedSPS() { | |
825 packed_sps_.Reset(); | |
826 | |
827 packed_sps_.BeginNALU(media::H264NALU::kSPS, 3); | |
828 | |
829 packed_sps_.AppendBits(8, current_sps_.profile_idc); | |
830 packed_sps_.AppendBool(current_sps_.constraint_set0_flag); | |
831 packed_sps_.AppendBool(current_sps_.constraint_set1_flag); | |
832 packed_sps_.AppendBool(current_sps_.constraint_set2_flag); | |
833 packed_sps_.AppendBool(current_sps_.constraint_set3_flag); | |
834 packed_sps_.AppendBool(current_sps_.constraint_set4_flag); | |
835 packed_sps_.AppendBool(current_sps_.constraint_set5_flag); | |
836 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits | |
837 packed_sps_.AppendBits(8, current_sps_.level_idc); | |
838 packed_sps_.AppendUE(current_sps_.seq_parameter_set_id); | |
839 | |
840 if (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) { | |
841 packed_sps_.AppendUE(current_sps_.chroma_format_idc); | |
842 if (current_sps_.chroma_format_idc == 3) | |
843 packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag); | |
844 packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8); | |
845 packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8); | |
846 packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag); | |
847 packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag); | |
848 CHECK(!current_sps_.seq_scaling_matrix_present_flag); | |
849 } | |
850 | |
851 packed_sps_.AppendUE(current_sps_.log2_max_frame_num_minus4); | |
852 packed_sps_.AppendUE(current_sps_.pic_order_cnt_type); | |
853 if (current_sps_.pic_order_cnt_type == 0) | |
854 packed_sps_.AppendUE(current_sps_.log2_max_pic_order_cnt_lsb_minus4); | |
855 else if (current_sps_.pic_order_cnt_type == 1) { | |
856 CHECK(1); | |
857 } | |
858 | |
859 packed_sps_.AppendUE(current_sps_.max_num_ref_frames); | |
860 packed_sps_.AppendBool(current_sps_.gaps_in_frame_num_value_allowed_flag); | |
861 packed_sps_.AppendUE(current_sps_.pic_width_in_mbs_minus1); | |
862 packed_sps_.AppendUE(current_sps_.pic_height_in_map_units_minus1); | |
863 | |
864 packed_sps_.AppendBool(current_sps_.frame_mbs_only_flag); | |
865 if (!current_sps_.frame_mbs_only_flag) | |
866 packed_sps_.AppendBool(current_sps_.mb_adaptive_frame_field_flag); | |
867 | |
868 packed_sps_.AppendBool(current_sps_.direct_8x8_inference_flag); | |
869 | |
870 packed_sps_.AppendBool(current_sps_.frame_cropping_flag); | |
871 if (current_sps_.frame_cropping_flag) { | |
872 packed_sps_.AppendUE(current_sps_.frame_crop_left_offset); | |
873 packed_sps_.AppendUE(current_sps_.frame_crop_right_offset); | |
874 packed_sps_.AppendUE(current_sps_.frame_crop_top_offset); | |
875 packed_sps_.AppendUE(current_sps_.frame_crop_bottom_offset); | |
876 } | |
877 | |
878 packed_sps_.AppendBool(current_sps_.vui_parameters_present_flag); | |
879 if (current_sps_.vui_parameters_present_flag) { | |
880 packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag | |
881 packed_sps_.AppendBool(false); // overscan_info_present_flag | |
882 packed_sps_.AppendBool(false); // video_signal_type_present_flag | |
883 packed_sps_.AppendBool(false); // chroma_loc_info_present_flag | |
884 | |
885 packed_sps_.AppendBool(current_sps_.timing_info_present_flag); | |
886 if (current_sps_.timing_info_present_flag) { | |
887 packed_sps_.AppendBits(32, current_sps_.num_units_in_tick); | |
888 packed_sps_.AppendBits(32, current_sps_.time_scale); | |
889 packed_sps_.AppendBool(current_sps_.fixed_frame_rate_flag); | |
890 } | |
891 | |
892 packed_sps_.AppendBool(current_sps_.nal_hrd_parameters_present_flag); | |
893 if (current_sps_.nal_hrd_parameters_present_flag) { | |
894 packed_sps_.AppendUE(current_sps_.cpb_cnt_minus1); | |
895 packed_sps_.AppendBits(4, current_sps_.bit_rate_scale); | |
896 packed_sps_.AppendBits(4, current_sps_.cpb_size_scale); | |
897 CHECK_LT(base::checked_cast<size_t>(current_sps_.cpb_cnt_minus1), | |
898 arraysize(current_sps_.bit_rate_value_minus1)); | |
899 for (int i = 0; i <= current_sps_.cpb_cnt_minus1; ++i) { | |
900 packed_sps_.AppendUE(current_sps_.bit_rate_value_minus1[i]); | |
901 packed_sps_.AppendUE(current_sps_.cpb_size_value_minus1[i]); | |
902 packed_sps_.AppendBool(current_sps_.cbr_flag[i]); | |
903 } | |
904 packed_sps_.AppendBits( | |
905 5, current_sps_.initial_cpb_removal_delay_length_minus_1); | |
906 packed_sps_.AppendBits(5, current_sps_.cpb_removal_delay_length_minus1); | |
907 packed_sps_.AppendBits(5, current_sps_.dpb_output_delay_length_minus1); | |
908 packed_sps_.AppendBits(5, current_sps_.time_offset_length); | |
909 } | |
910 | |
911 packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag | |
912 if (current_sps_.nal_hrd_parameters_present_flag) | |
913 packed_sps_.AppendBool(current_sps_.low_delay_hrd_flag); | |
914 | |
915 packed_sps_.AppendBool(false); // pic_struct_present_flag | |
916 packed_sps_.AppendBool(false); // bitstream_restriction_flag | |
917 } | |
918 | |
919 packed_sps_.FinishNALU(); | |
920 } | |
921 | |
922 void VaapiVideoEncodeAccelerator::UpdatePPS() { | |
923 memset(¤t_pps_, 0, sizeof(media::H264PPS)); | |
924 | |
925 current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id; | |
926 current_pps_.pic_parameter_set_id = 0; | |
927 | |
928 current_pps_.entropy_coding_mode_flag = | |
929 current_sps_.profile_idc >= media::H264SPS::kProfileIDCMain; | |
930 | |
931 CHECK_GT(max_ref_idx_l0_size_, 0); | |
932 current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1; | |
933 current_pps_.num_ref_idx_l1_default_active_minus1 = 0; | |
934 DCHECK_LE(qp_, 51); | |
935 current_pps_.pic_init_qp_minus26 = qp_ - 26; | |
936 current_pps_.deblocking_filter_control_present_flag = true; | |
937 current_pps_.transform_8x8_mode_flag = | |
938 (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh); | |
939 } | |
940 | |
941 void VaapiVideoEncodeAccelerator::GeneratePackedPPS() { | |
942 packed_pps_.Reset(); | |
943 | |
944 packed_pps_.BeginNALU(media::H264NALU::kPPS, 3); | |
945 | |
946 packed_pps_.AppendUE(current_pps_.pic_parameter_set_id); | |
947 packed_pps_.AppendUE(current_pps_.seq_parameter_set_id); | |
948 packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag); | |
949 packed_pps_.AppendBool( | |
950 current_pps_.bottom_field_pic_order_in_frame_present_flag); | |
951 CHECK_EQ(current_pps_.num_slice_groups_minus1, 0); | |
952 packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1); | |
953 | |
954 packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1); | |
955 packed_pps_.AppendUE(current_pps_.num_ref_idx_l1_default_active_minus1); | |
956 | |
957 packed_pps_.AppendBool(current_pps_.weighted_pred_flag); | |
958 packed_pps_.AppendBits(2, current_pps_.weighted_bipred_idc); | |
959 | |
960 packed_pps_.AppendSE(current_pps_.pic_init_qp_minus26); | |
961 packed_pps_.AppendSE(current_pps_.pic_init_qs_minus26); | |
962 packed_pps_.AppendSE(current_pps_.chroma_qp_index_offset); | |
963 | |
964 packed_pps_.AppendBool(current_pps_.deblocking_filter_control_present_flag); | |
965 packed_pps_.AppendBool(current_pps_.constrained_intra_pred_flag); | |
966 packed_pps_.AppendBool(current_pps_.redundant_pic_cnt_present_flag); | |
967 | |
968 packed_pps_.AppendBool(current_pps_.transform_8x8_mode_flag); | |
969 packed_pps_.AppendBool(current_pps_.pic_scaling_matrix_present_flag); | |
970 DCHECK(!current_pps_.pic_scaling_matrix_present_flag); | |
971 packed_pps_.AppendSE(current_pps_.second_chroma_qp_index_offset); | |
972 | |
973 packed_pps_.FinishNALU(); | |
974 } | |
975 | |
976 void VaapiVideoEncodeAccelerator::SetState(State state) { | |
977 // Only touch state on encoder thread, unless it's not running. | |
978 if (encoder_thread_.IsRunning() && | |
979 !encoder_thread_proxy_->BelongsToCurrentThread()) { | |
980 encoder_thread_proxy_->PostTask( | |
981 FROM_HERE, | |
982 base::Bind(&VaapiVideoEncodeAccelerator::SetState, | |
983 base::Unretained(this), | |
984 state)); | |
985 return; | |
986 } | |
987 | |
988 DVLOGF(1) << "setting state to: " << state; | |
989 state_ = state; | |
990 } | |
991 | |
992 void VaapiVideoEncodeAccelerator::NotifyError(Error error) { | |
993 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { | |
994 child_message_loop_proxy_->PostTask( | |
995 FROM_HERE, | |
996 base::Bind( | |
997 &VaapiVideoEncodeAccelerator::NotifyError, weak_this_, error)); | |
998 return; | |
999 } | |
1000 | |
1001 if (client_) { | |
1002 client_->NotifyError(error); | |
1003 client_ptr_factory_.reset(); | |
1004 } | |
1005 } | |
1006 | |
1007 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob() | |
1008 : coded_buffer(VA_INVALID_ID), keyframe(false) { | |
1009 } | |
1010 | |
1011 } // namespace content | |
OLD | NEW |