Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(694)

Side by Side Diff: content/common/gpu/media/vaapi_video_encode_accelerator.cc

Issue 1882373004: Migrate content/common/gpu/media code to media/gpu (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix several more bot-identified build issues Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h"
6
7 #include <string.h>
8 #include <utility>
9
10 #include "base/bind.h"
11 #include "base/callback.h"
12 #include "base/macros.h"
13 #include "base/metrics/histogram.h"
14 #include "base/numerics/safe_conversions.h"
15 #include "content/common/gpu/media/h264_dpb.h"
16 #include "content/common/gpu/media/shared_memory_region.h"
17 #include "media/base/bind_to_current_loop.h"
18 #include "third_party/libva/va/va_enc_h264.h"
19
20 #define DVLOGF(level) DVLOG(level) << __FUNCTION__ << "(): "
21
22 #define NOTIFY_ERROR(error, msg) \
23 do { \
24 SetState(kError); \
25 LOG(ERROR) << msg; \
26 LOG(ERROR) << "Calling NotifyError(" << error << ")";\
27 NotifyError(error); \
28 } while (0)
29
30 namespace content {
31
32 namespace {
33 // Need 2 surfaces for each frame: one for input data and one for
34 // reconstructed picture, which is later used for reference.
35 const size_t kMinSurfacesToEncode = 2;
36
37 // Subjectively chosen.
38 const size_t kNumInputBuffers = 4;
39 const size_t kMaxNumReferenceFrames = 4;
40
41 // We need up to kMaxNumReferenceFrames surfaces for reference, plus one
42 // for input and one for encode (which will be added to the set of reference
43 // frames for subsequent frames). Actual execution of HW encode is done
44 // in parallel, and we want to process more frames in the meantime.
45 // To have kNumInputBuffers in flight, we need a full set of reference +
46 // encode surfaces (i.e. kMaxNumReferenceFrames + kMinSurfacesToEncode), and
47 // (kNumInputBuffers - 1) of kMinSurfacesToEncode for the remaining frames
48 // in flight.
49 const size_t kNumSurfaces = kMaxNumReferenceFrames + kMinSurfacesToEncode +
50 kMinSurfacesToEncode * (kNumInputBuffers - 1);
51
52 // An IDR every 2048 frames, an I frame every 256 and no B frames.
53 // We choose IDR period to equal MaxFrameNum so it must be a power of 2.
54 const int kIDRPeriod = 2048;
55 const int kIPeriod = 256;
56 const int kIPPeriod = 1;
57
58 const int kDefaultFramerate = 30;
59
60 // HRD parameters (ch. E.2.2 in spec).
61 const int kBitRateScale = 0; // bit_rate_scale for SPS HRD parameters.
62 const int kCPBSizeScale = 0; // cpb_size_scale for SPS HRD parameters.
63
64 const int kDefaultQP = 26;
65 // All Intel codecs can do at least 4.1.
66 const int kDefaultLevelIDC = 41;
67 const int kChromaFormatIDC = 1; // 4:2:0
68
69 // Arbitrarily chosen bitrate window size for rate control, in ms.
70 const int kCPBWindowSizeMs = 1500;
71
72 // UMA errors that the VaapiVideoEncodeAccelerator class reports.
73 enum VAVEAEncoderFailure {
74 VAAPI_ERROR = 0,
75 // UMA requires that max must be greater than 1.
76 VAVEA_ENCODER_FAILURES_MAX = 2,
77 };
78
79 }
80
81 // Round |value| up to |alignment|, which must be a power of 2.
82 static inline size_t RoundUpToPowerOf2(size_t value, size_t alignment) {
83 // Check that |alignment| is a power of 2.
84 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
85 return ((value + (alignment - 1)) & ~(alignment - 1));
86 }
87
88 static void ReportToUMA(VAVEAEncoderFailure failure) {
89 UMA_HISTOGRAM_ENUMERATION(
90 "Media.VAVEA.EncoderFailure",
91 failure,
92 VAVEA_ENCODER_FAILURES_MAX);
93 }
94
95 struct VaapiVideoEncodeAccelerator::InputFrameRef {
96 InputFrameRef(const scoped_refptr<media::VideoFrame>& frame,
97 bool force_keyframe)
98 : frame(frame), force_keyframe(force_keyframe) {}
99 const scoped_refptr<media::VideoFrame> frame;
100 const bool force_keyframe;
101 };
102
103 struct VaapiVideoEncodeAccelerator::BitstreamBufferRef {
104 BitstreamBufferRef(int32_t id, std::unique_ptr<SharedMemoryRegion> shm)
105 : id(id), shm(std::move(shm)) {}
106 const int32_t id;
107 const std::unique_ptr<SharedMemoryRegion> shm;
108 };
109
110 media::VideoEncodeAccelerator::SupportedProfiles
111 VaapiVideoEncodeAccelerator::GetSupportedProfiles() {
112 return VaapiWrapper::GetSupportedEncodeProfiles();
113 }
114
115 static unsigned int Log2OfPowerOf2(unsigned int x) {
116 CHECK_GT(x, 0u);
117 DCHECK_EQ(x & (x - 1), 0u);
118
119 int log = 0;
120 while (x > 1) {
121 x >>= 1;
122 ++log;
123 }
124 return log;
125 }
126
127 VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator()
128 : profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
129 mb_width_(0),
130 mb_height_(0),
131 output_buffer_byte_size_(0),
132 state_(kUninitialized),
133 frame_num_(0),
134 idr_pic_id_(0),
135 bitrate_(0),
136 framerate_(0),
137 cpb_size_(0),
138 encoding_parameters_changed_(false),
139 encoder_thread_("VAVEAEncoderThread"),
140 child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
141 weak_this_ptr_factory_(this) {
142 DVLOGF(4);
143 weak_this_ = weak_this_ptr_factory_.GetWeakPtr();
144
145 max_ref_idx_l0_size_ = kMaxNumReferenceFrames;
146 qp_ = kDefaultQP;
147 idr_period_ = kIDRPeriod;
148 i_period_ = kIPeriod;
149 ip_period_ = kIPPeriod;
150 }
151
152 VaapiVideoEncodeAccelerator::~VaapiVideoEncodeAccelerator() {
153 DVLOGF(4);
154 DCHECK(child_task_runner_->BelongsToCurrentThread());
155 DCHECK(!encoder_thread_.IsRunning());
156 }
157
158 bool VaapiVideoEncodeAccelerator::Initialize(
159 media::VideoPixelFormat format,
160 const gfx::Size& input_visible_size,
161 media::VideoCodecProfile output_profile,
162 uint32_t initial_bitrate,
163 Client* client) {
164 DCHECK(child_task_runner_->BelongsToCurrentThread());
165 DCHECK(!encoder_thread_.IsRunning());
166 DCHECK_EQ(state_, kUninitialized);
167
168 DVLOGF(1) << "Initializing VAVEA, input_format: "
169 << media::VideoPixelFormatToString(format)
170 << ", input_visible_size: " << input_visible_size.ToString()
171 << ", output_profile: " << output_profile
172 << ", initial_bitrate: " << initial_bitrate;
173
174 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
175 client_ = client_ptr_factory_->GetWeakPtr();
176
177 const SupportedProfiles& profiles = GetSupportedProfiles();
178 auto profile = find_if(profiles.begin(), profiles.end(),
179 [output_profile](const SupportedProfile& profile) {
180 return profile.profile == output_profile;
181 });
182 if (profile == profiles.end()) {
183 DVLOGF(1) << "Unsupported output profile " << output_profile;
184 return false;
185 }
186 if (input_visible_size.width() > profile->max_resolution.width() ||
187 input_visible_size.height() > profile->max_resolution.height()) {
188 DVLOGF(1) << "Input size too big: " << input_visible_size.ToString()
189 << ", max supported size: " << profile->max_resolution.ToString();
190 return false;
191 }
192
193 if (format != media::PIXEL_FORMAT_I420) {
194 DVLOGF(1) << "Unsupported input format: "
195 << media::VideoPixelFormatToString(format);
196 return false;
197 }
198
199 profile_ = output_profile;
200 visible_size_ = input_visible_size;
201 // 4:2:0 format has to be 2-aligned.
202 DCHECK_EQ(visible_size_.width() % 2, 0);
203 DCHECK_EQ(visible_size_.height() % 2, 0);
204 coded_size_ = gfx::Size(RoundUpToPowerOf2(visible_size_.width(), 16),
205 RoundUpToPowerOf2(visible_size_.height(), 16));
206 mb_width_ = coded_size_.width() / 16;
207 mb_height_ = coded_size_.height() / 16;
208 output_buffer_byte_size_ = coded_size_.GetArea();
209
210 UpdateRates(initial_bitrate, kDefaultFramerate);
211
212 vaapi_wrapper_ =
213 VaapiWrapper::CreateForVideoCodec(VaapiWrapper::kEncode, output_profile,
214 base::Bind(&ReportToUMA, VAAPI_ERROR));
215 if (!vaapi_wrapper_.get()) {
216 DVLOGF(1) << "Failed initializing VAAPI for profile " << output_profile;
217 return false;
218 }
219
220 if (!encoder_thread_.Start()) {
221 LOG(ERROR) << "Failed to start encoder thread";
222 return false;
223 }
224 encoder_thread_task_runner_ = encoder_thread_.task_runner();
225
226 // Finish the remaining initialization on the encoder thread.
227 encoder_thread_task_runner_->PostTask(
228 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::InitializeTask,
229 base::Unretained(this)));
230
231 return true;
232 }
233
234 void VaapiVideoEncodeAccelerator::InitializeTask() {
235 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
236 DCHECK_EQ(state_, kUninitialized);
237 DVLOGF(4);
238
239 va_surface_release_cb_ = media::BindToCurrentLoop(
240 base::Bind(&VaapiVideoEncodeAccelerator::RecycleVASurfaceID,
241 base::Unretained(this)));
242
243 if (!vaapi_wrapper_->CreateSurfaces(VA_RT_FORMAT_YUV420, coded_size_,
244 kNumSurfaces,
245 &available_va_surface_ids_)) {
246 NOTIFY_ERROR(kPlatformFailureError, "Failed creating VASurfaces");
247 return;
248 }
249
250 UpdateSPS();
251 GeneratePackedSPS();
252
253 UpdatePPS();
254 GeneratePackedPPS();
255
256 child_task_runner_->PostTask(
257 FROM_HERE,
258 base::Bind(&Client::RequireBitstreamBuffers, client_, kNumInputBuffers,
259 coded_size_, output_buffer_byte_size_));
260
261 SetState(kEncoding);
262 }
263
264 void VaapiVideoEncodeAccelerator::RecycleVASurfaceID(
265 VASurfaceID va_surface_id) {
266 DVLOGF(4) << "va_surface_id: " << va_surface_id;
267 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
268
269 available_va_surface_ids_.push_back(va_surface_id);
270 EncodeFrameTask();
271 }
272
273 void VaapiVideoEncodeAccelerator::BeginFrame(bool force_keyframe) {
274 current_pic_ = new H264Picture();
275
276 // If the current picture is an IDR picture, frame_num shall be equal to 0.
277 if (force_keyframe)
278 frame_num_ = 0;
279
280 current_pic_->frame_num = frame_num_++;
281 frame_num_ %= idr_period_;
282
283 if (current_pic_->frame_num == 0) {
284 current_pic_->idr = true;
285 // H264 spec mandates idr_pic_id to differ between two consecutive IDRs.
286 idr_pic_id_ ^= 1;
287 ref_pic_list0_.clear();
288 }
289
290 if (current_pic_->frame_num % i_period_ == 0)
291 current_pic_->type = media::H264SliceHeader::kISlice;
292 else
293 current_pic_->type = media::H264SliceHeader::kPSlice;
294
295 if (current_pic_->type != media::H264SliceHeader::kBSlice)
296 current_pic_->ref = true;
297
298 current_pic_->pic_order_cnt = current_pic_->frame_num * 2;
299 current_pic_->top_field_order_cnt = current_pic_->pic_order_cnt;
300 current_pic_->pic_order_cnt_lsb = current_pic_->pic_order_cnt;
301
302 current_encode_job_->keyframe = current_pic_->idr;
303
304 DVLOGF(4) << "Starting a new frame, type: " << current_pic_->type
305 << (force_keyframe ? " (forced keyframe)" : "")
306 << " frame_num: " << current_pic_->frame_num
307 << " POC: " << current_pic_->pic_order_cnt;
308 }
309
310 void VaapiVideoEncodeAccelerator::EndFrame() {
311 DCHECK(current_pic_);
312 // Store the picture on the list of reference pictures and keep the list
313 // below maximum size, dropping oldest references.
314 if (current_pic_->ref)
315 ref_pic_list0_.push_front(current_encode_job_->recon_surface);
316 size_t max_num_ref_frames =
317 base::checked_cast<size_t>(current_sps_.max_num_ref_frames);
318 while (ref_pic_list0_.size() > max_num_ref_frames)
319 ref_pic_list0_.pop_back();
320
321 submitted_encode_jobs_.push(make_linked_ptr(current_encode_job_.release()));
322 }
323
324 static void InitVAPicture(VAPictureH264* va_pic) {
325 memset(va_pic, 0, sizeof(*va_pic));
326 va_pic->picture_id = VA_INVALID_ID;
327 va_pic->flags = VA_PICTURE_H264_INVALID;
328 }
329
330 bool VaapiVideoEncodeAccelerator::SubmitFrameParameters() {
331 DCHECK(current_pic_);
332 VAEncSequenceParameterBufferH264 seq_param;
333 memset(&seq_param, 0, sizeof(seq_param));
334
335 #define SPS_TO_SP(a) seq_param.a = current_sps_.a;
336 SPS_TO_SP(seq_parameter_set_id);
337 SPS_TO_SP(level_idc);
338
339 seq_param.intra_period = i_period_;
340 seq_param.intra_idr_period = idr_period_;
341 seq_param.ip_period = ip_period_;
342 seq_param.bits_per_second = bitrate_;
343
344 SPS_TO_SP(max_num_ref_frames);
345 seq_param.picture_width_in_mbs = mb_width_;
346 seq_param.picture_height_in_mbs = mb_height_;
347
348 #define SPS_TO_SP_FS(a) seq_param.seq_fields.bits.a = current_sps_.a;
349 SPS_TO_SP_FS(chroma_format_idc);
350 SPS_TO_SP_FS(frame_mbs_only_flag);
351 SPS_TO_SP_FS(log2_max_frame_num_minus4);
352 SPS_TO_SP_FS(pic_order_cnt_type);
353 SPS_TO_SP_FS(log2_max_pic_order_cnt_lsb_minus4);
354 #undef SPS_TO_SP_FS
355
356 SPS_TO_SP(bit_depth_luma_minus8);
357 SPS_TO_SP(bit_depth_chroma_minus8);
358
359 SPS_TO_SP(frame_cropping_flag);
360 if (current_sps_.frame_cropping_flag) {
361 SPS_TO_SP(frame_crop_left_offset);
362 SPS_TO_SP(frame_crop_right_offset);
363 SPS_TO_SP(frame_crop_top_offset);
364 SPS_TO_SP(frame_crop_bottom_offset);
365 }
366
367 SPS_TO_SP(vui_parameters_present_flag);
368 #define SPS_TO_SP_VF(a) seq_param.vui_fields.bits.a = current_sps_.a;
369 SPS_TO_SP_VF(timing_info_present_flag);
370 #undef SPS_TO_SP_VF
371 SPS_TO_SP(num_units_in_tick);
372 SPS_TO_SP(time_scale);
373 #undef SPS_TO_SP
374
375 if (!vaapi_wrapper_->SubmitBuffer(VAEncSequenceParameterBufferType,
376 sizeof(seq_param),
377 &seq_param))
378 return false;
379
380 VAEncPictureParameterBufferH264 pic_param;
381 memset(&pic_param, 0, sizeof(pic_param));
382
383 pic_param.CurrPic.picture_id = current_encode_job_->recon_surface->id();
384 pic_param.CurrPic.TopFieldOrderCnt = current_pic_->top_field_order_cnt;
385 pic_param.CurrPic.BottomFieldOrderCnt = current_pic_->bottom_field_order_cnt;
386 pic_param.CurrPic.flags = 0;
387
388 for (size_t i = 0; i < arraysize(pic_param.ReferenceFrames); ++i)
389 InitVAPicture(&pic_param.ReferenceFrames[i]);
390
391 DCHECK_LE(ref_pic_list0_.size(), arraysize(pic_param.ReferenceFrames));
392 RefPicList::const_iterator iter = ref_pic_list0_.begin();
393 for (size_t i = 0;
394 i < arraysize(pic_param.ReferenceFrames) && iter != ref_pic_list0_.end();
395 ++iter, ++i) {
396 pic_param.ReferenceFrames[i].picture_id = (*iter)->id();
397 pic_param.ReferenceFrames[i].flags = 0;
398 }
399
400 pic_param.coded_buf = current_encode_job_->coded_buffer;
401 pic_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id;
402 pic_param.seq_parameter_set_id = current_pps_.seq_parameter_set_id;
403 pic_param.frame_num = current_pic_->frame_num;
404 pic_param.pic_init_qp = qp_;
405 pic_param.num_ref_idx_l0_active_minus1 = max_ref_idx_l0_size_ - 1;
406 pic_param.pic_fields.bits.idr_pic_flag = current_pic_->idr;
407 pic_param.pic_fields.bits.reference_pic_flag = current_pic_->ref;
408 #define PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = current_pps_.a;
409 PPS_TO_PP_PF(entropy_coding_mode_flag);
410 PPS_TO_PP_PF(transform_8x8_mode_flag);
411 PPS_TO_PP_PF(deblocking_filter_control_present_flag);
412 #undef PPS_TO_PP_PF
413
414 if (!vaapi_wrapper_->SubmitBuffer(VAEncPictureParameterBufferType,
415 sizeof(pic_param),
416 &pic_param))
417 return false;
418
419 VAEncSliceParameterBufferH264 slice_param;
420 memset(&slice_param, 0, sizeof(slice_param));
421
422 slice_param.num_macroblocks = mb_width_ * mb_height_;
423 slice_param.macroblock_info = VA_INVALID_ID;
424 slice_param.slice_type = current_pic_->type;
425 slice_param.pic_parameter_set_id = current_pps_.pic_parameter_set_id;
426 slice_param.idr_pic_id = idr_pic_id_;
427 slice_param.pic_order_cnt_lsb = current_pic_->pic_order_cnt_lsb;
428 slice_param.num_ref_idx_active_override_flag = true;
429
430 for (size_t i = 0; i < arraysize(slice_param.RefPicList0); ++i)
431 InitVAPicture(&slice_param.RefPicList0[i]);
432
433 for (size_t i = 0; i < arraysize(slice_param.RefPicList1); ++i)
434 InitVAPicture(&slice_param.RefPicList1[i]);
435
436 DCHECK_LE(ref_pic_list0_.size(), arraysize(slice_param.RefPicList0));
437 iter = ref_pic_list0_.begin();
438 for (size_t i = 0;
439 i < arraysize(slice_param.RefPicList0) && iter != ref_pic_list0_.end();
440 ++iter, ++i) {
441 InitVAPicture(&slice_param.RefPicList0[i]);
442 slice_param.RefPicList0[i].picture_id = (*iter)->id();
443 slice_param.RefPicList0[i].flags = 0;
444 }
445
446 if (!vaapi_wrapper_->SubmitBuffer(VAEncSliceParameterBufferType,
447 sizeof(slice_param),
448 &slice_param))
449 return false;
450
451 VAEncMiscParameterRateControl rate_control_param;
452 memset(&rate_control_param, 0, sizeof(rate_control_param));
453 rate_control_param.bits_per_second = bitrate_;
454 rate_control_param.target_percentage = 90;
455 rate_control_param.window_size = kCPBWindowSizeMs;
456 rate_control_param.initial_qp = qp_;
457 rate_control_param.rc_flags.bits.disable_frame_skip = true;
458
459 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(
460 VAEncMiscParameterTypeRateControl,
461 sizeof(rate_control_param),
462 &rate_control_param))
463 return false;
464
465 VAEncMiscParameterFrameRate framerate_param;
466 memset(&framerate_param, 0, sizeof(framerate_param));
467 framerate_param.framerate = framerate_;
468 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(
469 VAEncMiscParameterTypeFrameRate,
470 sizeof(framerate_param),
471 &framerate_param))
472 return false;
473
474 VAEncMiscParameterHRD hrd_param;
475 memset(&hrd_param, 0, sizeof(hrd_param));
476 hrd_param.buffer_size = cpb_size_;
477 hrd_param.initial_buffer_fullness = cpb_size_ / 2;
478 if (!vaapi_wrapper_->SubmitVAEncMiscParamBuffer(VAEncMiscParameterTypeHRD,
479 sizeof(hrd_param),
480 &hrd_param))
481 return false;
482
483 return true;
484 }
485
486 bool VaapiVideoEncodeAccelerator::SubmitHeadersIfNeeded() {
487 DCHECK(current_pic_);
488 if (current_pic_->type != media::H264SliceHeader::kISlice)
489 return true;
490
491 // Submit PPS.
492 VAEncPackedHeaderParameterBuffer par_buffer;
493 memset(&par_buffer, 0, sizeof(par_buffer));
494 par_buffer.type = VAEncPackedHeaderSequence;
495 par_buffer.bit_length = packed_sps_.BytesInBuffer() * 8;
496
497 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType,
498 sizeof(par_buffer),
499 &par_buffer))
500 return false;
501
502 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType,
503 packed_sps_.BytesInBuffer(),
504 packed_sps_.data()))
505 return false;
506
507 // Submit PPS.
508 memset(&par_buffer, 0, sizeof(par_buffer));
509 par_buffer.type = VAEncPackedHeaderPicture;
510 par_buffer.bit_length = packed_pps_.BytesInBuffer() * 8;
511
512 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderParameterBufferType,
513 sizeof(par_buffer),
514 &par_buffer))
515 return false;
516
517 if (!vaapi_wrapper_->SubmitBuffer(VAEncPackedHeaderDataBufferType,
518 packed_pps_.BytesInBuffer(),
519 packed_pps_.data()))
520 return false;
521
522 return true;
523 }
524
525 bool VaapiVideoEncodeAccelerator::ExecuteEncode() {
526 DCHECK(current_pic_);
527 DVLOGF(3) << "Encoding frame_num: " << current_pic_->frame_num;
528 return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
529 current_encode_job_->input_surface->id());
530 }
531
532 bool VaapiVideoEncodeAccelerator::UploadFrame(
533 const scoped_refptr<media::VideoFrame>& frame) {
534 return vaapi_wrapper_->UploadVideoFrameToSurface(
535 frame, current_encode_job_->input_surface->id());
536 }
537
538 void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() {
539 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
540
541 if (state_ != kEncoding)
542 return;
543
544 if (submitted_encode_jobs_.empty() || available_bitstream_buffers_.empty())
545 return;
546
547 linked_ptr<BitstreamBufferRef> buffer = available_bitstream_buffers_.front();
548 available_bitstream_buffers_.pop();
549
550 uint8_t* target_data = reinterpret_cast<uint8_t*>(buffer->shm->memory());
551
552 linked_ptr<EncodeJob> encode_job = submitted_encode_jobs_.front();
553 submitted_encode_jobs_.pop();
554
555 size_t data_size = 0;
556 if (!vaapi_wrapper_->DownloadAndDestroyCodedBuffer(
557 encode_job->coded_buffer, encode_job->input_surface->id(),
558 target_data, buffer->shm->size(), &data_size)) {
559 NOTIFY_ERROR(kPlatformFailureError, "Failed downloading coded buffer");
560 return;
561 }
562
563 DVLOGF(3) << "Returning bitstream buffer "
564 << (encode_job->keyframe ? "(keyframe)" : "")
565 << " id: " << buffer->id << " size: " << data_size;
566
567 child_task_runner_->PostTask(
568 FROM_HERE, base::Bind(&Client::BitstreamBufferReady, client_, buffer->id,
569 data_size, encode_job->keyframe));
570 }
571
572 void VaapiVideoEncodeAccelerator::Encode(
573 const scoped_refptr<media::VideoFrame>& frame,
574 bool force_keyframe) {
575 DVLOGF(3) << "Frame timestamp: " << frame->timestamp().InMilliseconds()
576 << " force_keyframe: " << force_keyframe;
577 DCHECK(child_task_runner_->BelongsToCurrentThread());
578
579 encoder_thread_task_runner_->PostTask(
580 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::EncodeTask,
581 base::Unretained(this), frame, force_keyframe));
582 }
583
584 bool VaapiVideoEncodeAccelerator::PrepareNextJob() {
585 if (available_va_surface_ids_.size() < kMinSurfacesToEncode)
586 return false;
587
588 DCHECK(!current_encode_job_);
589 current_encode_job_.reset(new EncodeJob());
590
591 if (!vaapi_wrapper_->CreateCodedBuffer(output_buffer_byte_size_,
592 &current_encode_job_->coded_buffer)) {
593 NOTIFY_ERROR(kPlatformFailureError, "Failed creating coded buffer");
594 return false;
595 }
596
597 current_encode_job_->input_surface = new VASurface(
598 available_va_surface_ids_.back(), coded_size_,
599 vaapi_wrapper_->va_surface_format(), va_surface_release_cb_);
600 available_va_surface_ids_.pop_back();
601
602 current_encode_job_->recon_surface = new VASurface(
603 available_va_surface_ids_.back(), coded_size_,
604 vaapi_wrapper_->va_surface_format(), va_surface_release_cb_);
605 available_va_surface_ids_.pop_back();
606
607 // Reference surfaces are needed until the job is done, but they get
608 // removed from ref_pic_list0_ when it's full at the end of job submission.
609 // Keep refs to them along with the job and only release after sync.
610 current_encode_job_->reference_surfaces = ref_pic_list0_;
611
612 return true;
613 }
614
615 void VaapiVideoEncodeAccelerator::EncodeTask(
616 const scoped_refptr<media::VideoFrame>& frame,
617 bool force_keyframe) {
618 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
619 DCHECK_NE(state_, kUninitialized);
620
621 encoder_input_queue_.push(
622 make_linked_ptr(new InputFrameRef(frame, force_keyframe)));
623 EncodeFrameTask();
624 }
625
626 void VaapiVideoEncodeAccelerator::EncodeFrameTask() {
627 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
628
629 if (state_ != kEncoding || encoder_input_queue_.empty())
630 return;
631
632 if (!PrepareNextJob()) {
633 DVLOGF(4) << "Not ready for next frame yet";
634 return;
635 }
636
637 linked_ptr<InputFrameRef> frame_ref = encoder_input_queue_.front();
638 encoder_input_queue_.pop();
639
640 if (!UploadFrame(frame_ref->frame)) {
641 NOTIFY_ERROR(kPlatformFailureError, "Failed uploading source frame to HW.");
642 return;
643 }
644
645 BeginFrame(frame_ref->force_keyframe || encoding_parameters_changed_);
646 encoding_parameters_changed_ = false;
647
648 if (!SubmitFrameParameters()) {
649 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame parameters.");
650 return;
651 }
652
653 if (!SubmitHeadersIfNeeded()) {
654 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting frame headers.");
655 return;
656 }
657
658 if (!ExecuteEncode()) {
659 NOTIFY_ERROR(kPlatformFailureError, "Failed submitting encode job to HW.");
660 return;
661 }
662
663 EndFrame();
664 TryToReturnBitstreamBuffer();
665 }
666
667 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBuffer(
668 const media::BitstreamBuffer& buffer) {
669 DVLOGF(4) << "id: " << buffer.id();
670 DCHECK(child_task_runner_->BelongsToCurrentThread());
671
672 if (buffer.size() < output_buffer_byte_size_) {
673 NOTIFY_ERROR(kInvalidArgumentError, "Provided bitstream buffer too small");
674 return;
675 }
676
677 std::unique_ptr<SharedMemoryRegion> shm(
678 new SharedMemoryRegion(buffer, false));
679 if (!shm->Map()) {
680 NOTIFY_ERROR(kPlatformFailureError, "Failed mapping shared memory.");
681 return;
682 }
683
684 std::unique_ptr<BitstreamBufferRef> buffer_ref(
685 new BitstreamBufferRef(buffer.id(), std::move(shm)));
686
687 encoder_thread_task_runner_->PostTask(
688 FROM_HERE,
689 base::Bind(&VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask,
690 base::Unretained(this), base::Passed(&buffer_ref)));
691 }
692
693 void VaapiVideoEncodeAccelerator::UseOutputBitstreamBufferTask(
694 std::unique_ptr<BitstreamBufferRef> buffer_ref) {
695 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
696 DCHECK_NE(state_, kUninitialized);
697
698 available_bitstream_buffers_.push(make_linked_ptr(buffer_ref.release()));
699 TryToReturnBitstreamBuffer();
700 }
701
702 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChange(
703 uint32_t bitrate,
704 uint32_t framerate) {
705 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate;
706 DCHECK(child_task_runner_->BelongsToCurrentThread());
707
708 encoder_thread_task_runner_->PostTask(
709 FROM_HERE,
710 base::Bind(
711 &VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask,
712 base::Unretained(this), bitrate, framerate));
713 }
714
715 void VaapiVideoEncodeAccelerator::UpdateRates(uint32_t bitrate,
716 uint32_t framerate) {
717 if (encoder_thread_.IsRunning())
718 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
719 DCHECK_NE(bitrate, 0u);
720 DCHECK_NE(framerate, 0u);
721 bitrate_ = bitrate;
722 framerate_ = framerate;
723 cpb_size_ = bitrate_ * kCPBWindowSizeMs / 1000;
724 }
725
726 void VaapiVideoEncodeAccelerator::RequestEncodingParametersChangeTask(
727 uint32_t bitrate,
728 uint32_t framerate) {
729 DVLOGF(2) << "bitrate: " << bitrate << " framerate: " << framerate;
730 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
731 DCHECK_NE(state_, kUninitialized);
732
733 // This is a workaround to zero being temporarily, as part of the initial
734 // setup, provided by the webrtc video encode and a zero bitrate and
735 // framerate not being accepted by VAAPI
736 // TODO: This code is common with v4l2_video_encode_accelerator.cc, perhaps
737 // it could be pulled up to RTCVideoEncoder
738 if (bitrate < 1)
739 bitrate = 1;
740 if (framerate < 1)
741 framerate = 1;
742
743 if (bitrate_ == bitrate && framerate_ == framerate)
744 return;
745
746 UpdateRates(bitrate, framerate);
747
748 UpdateSPS();
749 GeneratePackedSPS();
750
751 // Submit new parameters along with next frame that will be processed.
752 encoding_parameters_changed_ = true;
753 }
754
755 void VaapiVideoEncodeAccelerator::Destroy() {
756 DCHECK(child_task_runner_->BelongsToCurrentThread());
757
758 // Can't call client anymore after Destroy() returns.
759 client_ptr_factory_.reset();
760 weak_this_ptr_factory_.InvalidateWeakPtrs();
761
762 // Early-exit encoder tasks if they are running and join the thread.
763 if (encoder_thread_.IsRunning()) {
764 encoder_thread_.message_loop()->PostTask(
765 FROM_HERE,
766 base::Bind(&VaapiVideoEncodeAccelerator::DestroyTask,
767 base::Unretained(this)));
768 encoder_thread_.Stop();
769 }
770
771 delete this;
772 }
773
774 void VaapiVideoEncodeAccelerator::DestroyTask() {
775 DVLOGF(2);
776 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
777 SetState(kError);
778 }
779
780 void VaapiVideoEncodeAccelerator::UpdateSPS() {
781 memset(&current_sps_, 0, sizeof(media::H264SPS));
782
783 // Spec A.2 and A.3.
784 switch (profile_) {
785 case media::H264PROFILE_BASELINE:
786 // Due to crbug.com/345569, we don't distinguish between constrained
787 // and non-constrained baseline profiles. Since many codecs can't do
788 // non-constrained, and constrained is usually what we mean (and it's a
789 // subset of non-constrained), default to it.
790 current_sps_.profile_idc = media::H264SPS::kProfileIDCBaseline;
791 current_sps_.constraint_set0_flag = true;
792 break;
793 case media::H264PROFILE_MAIN:
794 current_sps_.profile_idc = media::H264SPS::kProfileIDCMain;
795 current_sps_.constraint_set1_flag = true;
796 break;
797 case media::H264PROFILE_HIGH:
798 current_sps_.profile_idc = media::H264SPS::kProfileIDCHigh;
799 break;
800 default:
801 NOTIMPLEMENTED();
802 return;
803 }
804
805 current_sps_.level_idc = kDefaultLevelIDC;
806 current_sps_.seq_parameter_set_id = 0;
807 current_sps_.chroma_format_idc = kChromaFormatIDC;
808
809 DCHECK_GE(idr_period_, 1u << 4);
810 current_sps_.log2_max_frame_num_minus4 = Log2OfPowerOf2(idr_period_) - 4;
811 current_sps_.pic_order_cnt_type = 0;
812 current_sps_.log2_max_pic_order_cnt_lsb_minus4 =
813 Log2OfPowerOf2(idr_period_ * 2) - 4;
814 current_sps_.max_num_ref_frames = max_ref_idx_l0_size_;
815
816 current_sps_.frame_mbs_only_flag = true;
817
818 DCHECK_GT(mb_width_, 0u);
819 DCHECK_GT(mb_height_, 0u);
820 current_sps_.pic_width_in_mbs_minus1 = mb_width_ - 1;
821 DCHECK(current_sps_.frame_mbs_only_flag);
822 current_sps_.pic_height_in_map_units_minus1 = mb_height_ - 1;
823
824 if (visible_size_ != coded_size_) {
825 // Visible size differs from coded size, fill crop information.
826 current_sps_.frame_cropping_flag = true;
827 DCHECK(!current_sps_.separate_colour_plane_flag);
828 // Spec table 6-1. Only 4:2:0 for now.
829 DCHECK_EQ(current_sps_.chroma_format_idc, 1);
830 // Spec 7.4.2.1.1. Crop is in crop units, which is 2 pixels for 4:2:0.
831 const unsigned int crop_unit_x = 2;
832 const unsigned int crop_unit_y = 2 * (2 - current_sps_.frame_mbs_only_flag);
833 current_sps_.frame_crop_left_offset = 0;
834 current_sps_.frame_crop_right_offset =
835 (coded_size_.width() - visible_size_.width()) / crop_unit_x;
836 current_sps_.frame_crop_top_offset = 0;
837 current_sps_.frame_crop_bottom_offset =
838 (coded_size_.height() - visible_size_.height()) / crop_unit_y;
839 }
840
841 current_sps_.vui_parameters_present_flag = true;
842 current_sps_.timing_info_present_flag = true;
843 current_sps_.num_units_in_tick = 1;
844 current_sps_.time_scale = framerate_ * 2; // See equation D-2 in spec.
845 current_sps_.fixed_frame_rate_flag = true;
846
847 current_sps_.nal_hrd_parameters_present_flag = true;
848 // H.264 spec ch. E.2.2.
849 current_sps_.cpb_cnt_minus1 = 0;
850 current_sps_.bit_rate_scale = kBitRateScale;
851 current_sps_.cpb_size_scale = kCPBSizeScale;
852 current_sps_.bit_rate_value_minus1[0] =
853 (bitrate_ >>
854 (kBitRateScale + media::H264SPS::kBitRateScaleConstantTerm)) - 1;
855 current_sps_.cpb_size_value_minus1[0] =
856 (cpb_size_ >>
857 (kCPBSizeScale + media::H264SPS::kCPBSizeScaleConstantTerm)) - 1;
858 current_sps_.cbr_flag[0] = true;
859 current_sps_.initial_cpb_removal_delay_length_minus_1 =
860 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1;
861 current_sps_.cpb_removal_delay_length_minus1 =
862 media::H264SPS::kDefaultInitialCPBRemovalDelayLength - 1;
863 current_sps_.dpb_output_delay_length_minus1 =
864 media::H264SPS::kDefaultDPBOutputDelayLength - 1;
865 current_sps_.time_offset_length = media::H264SPS::kDefaultTimeOffsetLength;
866 current_sps_.low_delay_hrd_flag = false;
867 }
868
869 void VaapiVideoEncodeAccelerator::GeneratePackedSPS() {
870 packed_sps_.Reset();
871
872 packed_sps_.BeginNALU(media::H264NALU::kSPS, 3);
873
874 packed_sps_.AppendBits(8, current_sps_.profile_idc);
875 packed_sps_.AppendBool(current_sps_.constraint_set0_flag);
876 packed_sps_.AppendBool(current_sps_.constraint_set1_flag);
877 packed_sps_.AppendBool(current_sps_.constraint_set2_flag);
878 packed_sps_.AppendBool(current_sps_.constraint_set3_flag);
879 packed_sps_.AppendBool(current_sps_.constraint_set4_flag);
880 packed_sps_.AppendBool(current_sps_.constraint_set5_flag);
881 packed_sps_.AppendBits(2, 0); // reserved_zero_2bits
882 packed_sps_.AppendBits(8, current_sps_.level_idc);
883 packed_sps_.AppendUE(current_sps_.seq_parameter_set_id);
884
885 if (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh) {
886 packed_sps_.AppendUE(current_sps_.chroma_format_idc);
887 if (current_sps_.chroma_format_idc == 3)
888 packed_sps_.AppendBool(current_sps_.separate_colour_plane_flag);
889 packed_sps_.AppendUE(current_sps_.bit_depth_luma_minus8);
890 packed_sps_.AppendUE(current_sps_.bit_depth_chroma_minus8);
891 packed_sps_.AppendBool(current_sps_.qpprime_y_zero_transform_bypass_flag);
892 packed_sps_.AppendBool(current_sps_.seq_scaling_matrix_present_flag);
893 CHECK(!current_sps_.seq_scaling_matrix_present_flag);
894 }
895
896 packed_sps_.AppendUE(current_sps_.log2_max_frame_num_minus4);
897 packed_sps_.AppendUE(current_sps_.pic_order_cnt_type);
898 if (current_sps_.pic_order_cnt_type == 0)
899 packed_sps_.AppendUE(current_sps_.log2_max_pic_order_cnt_lsb_minus4);
900 else if (current_sps_.pic_order_cnt_type == 1) {
901 CHECK(1);
902 }
903
904 packed_sps_.AppendUE(current_sps_.max_num_ref_frames);
905 packed_sps_.AppendBool(current_sps_.gaps_in_frame_num_value_allowed_flag);
906 packed_sps_.AppendUE(current_sps_.pic_width_in_mbs_minus1);
907 packed_sps_.AppendUE(current_sps_.pic_height_in_map_units_minus1);
908
909 packed_sps_.AppendBool(current_sps_.frame_mbs_only_flag);
910 if (!current_sps_.frame_mbs_only_flag)
911 packed_sps_.AppendBool(current_sps_.mb_adaptive_frame_field_flag);
912
913 packed_sps_.AppendBool(current_sps_.direct_8x8_inference_flag);
914
915 packed_sps_.AppendBool(current_sps_.frame_cropping_flag);
916 if (current_sps_.frame_cropping_flag) {
917 packed_sps_.AppendUE(current_sps_.frame_crop_left_offset);
918 packed_sps_.AppendUE(current_sps_.frame_crop_right_offset);
919 packed_sps_.AppendUE(current_sps_.frame_crop_top_offset);
920 packed_sps_.AppendUE(current_sps_.frame_crop_bottom_offset);
921 }
922
923 packed_sps_.AppendBool(current_sps_.vui_parameters_present_flag);
924 if (current_sps_.vui_parameters_present_flag) {
925 packed_sps_.AppendBool(false); // aspect_ratio_info_present_flag
926 packed_sps_.AppendBool(false); // overscan_info_present_flag
927 packed_sps_.AppendBool(false); // video_signal_type_present_flag
928 packed_sps_.AppendBool(false); // chroma_loc_info_present_flag
929
930 packed_sps_.AppendBool(current_sps_.timing_info_present_flag);
931 if (current_sps_.timing_info_present_flag) {
932 packed_sps_.AppendBits(32, current_sps_.num_units_in_tick);
933 packed_sps_.AppendBits(32, current_sps_.time_scale);
934 packed_sps_.AppendBool(current_sps_.fixed_frame_rate_flag);
935 }
936
937 packed_sps_.AppendBool(current_sps_.nal_hrd_parameters_present_flag);
938 if (current_sps_.nal_hrd_parameters_present_flag) {
939 packed_sps_.AppendUE(current_sps_.cpb_cnt_minus1);
940 packed_sps_.AppendBits(4, current_sps_.bit_rate_scale);
941 packed_sps_.AppendBits(4, current_sps_.cpb_size_scale);
942 CHECK_LT(base::checked_cast<size_t>(current_sps_.cpb_cnt_minus1),
943 arraysize(current_sps_.bit_rate_value_minus1));
944 for (int i = 0; i <= current_sps_.cpb_cnt_minus1; ++i) {
945 packed_sps_.AppendUE(current_sps_.bit_rate_value_minus1[i]);
946 packed_sps_.AppendUE(current_sps_.cpb_size_value_minus1[i]);
947 packed_sps_.AppendBool(current_sps_.cbr_flag[i]);
948 }
949 packed_sps_.AppendBits(
950 5, current_sps_.initial_cpb_removal_delay_length_minus_1);
951 packed_sps_.AppendBits(5, current_sps_.cpb_removal_delay_length_minus1);
952 packed_sps_.AppendBits(5, current_sps_.dpb_output_delay_length_minus1);
953 packed_sps_.AppendBits(5, current_sps_.time_offset_length);
954 }
955
956 packed_sps_.AppendBool(false); // vcl_hrd_parameters_flag
957 if (current_sps_.nal_hrd_parameters_present_flag)
958 packed_sps_.AppendBool(current_sps_.low_delay_hrd_flag);
959
960 packed_sps_.AppendBool(false); // pic_struct_present_flag
961 packed_sps_.AppendBool(true); // bitstream_restriction_flag
962
963 packed_sps_.AppendBool(false); // motion_vectors_over_pic_boundaries_flag
964 packed_sps_.AppendUE(2); // max_bytes_per_pic_denom
965 packed_sps_.AppendUE(1); // max_bits_per_mb_denom
966 packed_sps_.AppendUE(16); // log2_max_mv_length_horizontal
967 packed_sps_.AppendUE(16); // log2_max_mv_length_vertical
968
969 // Explicitly set max_num_reorder_frames to 0 to allow the decoder to
970 // output pictures early.
971 packed_sps_.AppendUE(0); // max_num_reorder_frames
972
973 // The value of max_dec_frame_buffering shall be greater than or equal to
974 // max_num_ref_frames.
975 const unsigned int max_dec_frame_buffering =
976 current_sps_.max_num_ref_frames;
977 packed_sps_.AppendUE(max_dec_frame_buffering);
978 }
979
980 packed_sps_.FinishNALU();
981 }
982
983 void VaapiVideoEncodeAccelerator::UpdatePPS() {
984 memset(&current_pps_, 0, sizeof(media::H264PPS));
985
986 current_pps_.seq_parameter_set_id = current_sps_.seq_parameter_set_id;
987 current_pps_.pic_parameter_set_id = 0;
988
989 current_pps_.entropy_coding_mode_flag =
990 current_sps_.profile_idc >= media::H264SPS::kProfileIDCMain;
991
992 CHECK_GT(max_ref_idx_l0_size_, 0u);
993 current_pps_.num_ref_idx_l0_default_active_minus1 = max_ref_idx_l0_size_ - 1;
994 current_pps_.num_ref_idx_l1_default_active_minus1 = 0;
995 DCHECK_LE(qp_, 51u);
996 current_pps_.pic_init_qp_minus26 = qp_ - 26;
997 current_pps_.deblocking_filter_control_present_flag = true;
998 current_pps_.transform_8x8_mode_flag =
999 (current_sps_.profile_idc == media::H264SPS::kProfileIDCHigh);
1000 }
1001
1002 void VaapiVideoEncodeAccelerator::GeneratePackedPPS() {
1003 packed_pps_.Reset();
1004
1005 packed_pps_.BeginNALU(media::H264NALU::kPPS, 3);
1006
1007 packed_pps_.AppendUE(current_pps_.pic_parameter_set_id);
1008 packed_pps_.AppendUE(current_pps_.seq_parameter_set_id);
1009 packed_pps_.AppendBool(current_pps_.entropy_coding_mode_flag);
1010 packed_pps_.AppendBool(
1011 current_pps_.bottom_field_pic_order_in_frame_present_flag);
1012 CHECK_EQ(current_pps_.num_slice_groups_minus1, 0);
1013 packed_pps_.AppendUE(current_pps_.num_slice_groups_minus1);
1014
1015 packed_pps_.AppendUE(current_pps_.num_ref_idx_l0_default_active_minus1);
1016 packed_pps_.AppendUE(current_pps_.num_ref_idx_l1_default_active_minus1);
1017
1018 packed_pps_.AppendBool(current_pps_.weighted_pred_flag);
1019 packed_pps_.AppendBits(2, current_pps_.weighted_bipred_idc);
1020
1021 packed_pps_.AppendSE(current_pps_.pic_init_qp_minus26);
1022 packed_pps_.AppendSE(current_pps_.pic_init_qs_minus26);
1023 packed_pps_.AppendSE(current_pps_.chroma_qp_index_offset);
1024
1025 packed_pps_.AppendBool(current_pps_.deblocking_filter_control_present_flag);
1026 packed_pps_.AppendBool(current_pps_.constrained_intra_pred_flag);
1027 packed_pps_.AppendBool(current_pps_.redundant_pic_cnt_present_flag);
1028
1029 packed_pps_.AppendBool(current_pps_.transform_8x8_mode_flag);
1030 packed_pps_.AppendBool(current_pps_.pic_scaling_matrix_present_flag);
1031 DCHECK(!current_pps_.pic_scaling_matrix_present_flag);
1032 packed_pps_.AppendSE(current_pps_.second_chroma_qp_index_offset);
1033
1034 packed_pps_.FinishNALU();
1035 }
1036
1037 void VaapiVideoEncodeAccelerator::SetState(State state) {
1038 // Only touch state on encoder thread, unless it's not running.
1039 if (encoder_thread_.IsRunning() &&
1040 !encoder_thread_task_runner_->BelongsToCurrentThread()) {
1041 encoder_thread_task_runner_->PostTask(
1042 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::SetState,
1043 base::Unretained(this), state));
1044 return;
1045 }
1046
1047 DVLOGF(1) << "setting state to: " << state;
1048 state_ = state;
1049 }
1050
1051 void VaapiVideoEncodeAccelerator::NotifyError(Error error) {
1052 if (!child_task_runner_->BelongsToCurrentThread()) {
1053 child_task_runner_->PostTask(
1054 FROM_HERE, base::Bind(&VaapiVideoEncodeAccelerator::NotifyError,
1055 weak_this_, error));
1056 return;
1057 }
1058
1059 if (client_) {
1060 client_->NotifyError(error);
1061 client_ptr_factory_.reset();
1062 }
1063 }
1064
1065 VaapiVideoEncodeAccelerator::EncodeJob::EncodeJob()
1066 : coded_buffer(VA_INVALID_ID), keyframe(false) {
1067 }
1068
1069 VaapiVideoEncodeAccelerator::EncodeJob::~EncodeJob() {
1070 }
1071
1072 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698