Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(196)

Side by Side Diff: content/common/gpu/media/v4l2_video_encode_accelerator.cc

Issue 1882373004: Migrate content/common/gpu/media code to media/gpu (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Squash and rebase Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/common/gpu/media/v4l2_video_encode_accelerator.h"
6
7 #include <fcntl.h>
8 #include <linux/videodev2.h>
9 #include <poll.h>
10 #include <string.h>
11 #include <sys/eventfd.h>
12 #include <sys/ioctl.h>
13 #include <sys/mman.h>
14
15 #include <utility>
16
17 #include "base/callback.h"
18 #include "base/command_line.h"
19 #include "base/macros.h"
20 #include "base/numerics/safe_conversions.h"
21 #include "base/thread_task_runner_handle.h"
22 #include "base/trace_event/trace_event.h"
23 #include "content/common/gpu/media/shared_memory_region.h"
24 #include "media/base/bind_to_current_loop.h"
25 #include "media/base/bitstream_buffer.h"
26
27 #define NOTIFY_ERROR(x) \
28 do { \
29 LOG(ERROR) << "Setting error state:" << x; \
30 SetErrorState(x); \
31 } while (0)
32
33 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \
34 do { \
35 if (device_->Ioctl(type, arg) != 0) { \
36 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_str; \
37 NOTIFY_ERROR(kPlatformFailureError); \
38 return value; \
39 } \
40 } while (0)
41
42 #define IOCTL_OR_ERROR_RETURN(type, arg) \
43 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type)
44
45 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
46 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type)
47
48 #define IOCTL_OR_LOG_ERROR(type, arg) \
49 do { \
50 if (device_->Ioctl(type, arg) != 0) \
51 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
52 } while (0)
53
54 namespace content {
55
56 struct V4L2VideoEncodeAccelerator::BitstreamBufferRef {
57 BitstreamBufferRef(int32_t id, std::unique_ptr<SharedMemoryRegion> shm)
58 : id(id), shm(std::move(shm)) {}
59 const int32_t id;
60 const std::unique_ptr<SharedMemoryRegion> shm;
61 };
62
63 V4L2VideoEncodeAccelerator::InputRecord::InputRecord() : at_device(false) {
64 }
65
66 V4L2VideoEncodeAccelerator::InputRecord::~InputRecord() {
67 }
68
69 V4L2VideoEncodeAccelerator::OutputRecord::OutputRecord()
70 : at_device(false), address(NULL), length(0) {
71 }
72
73 V4L2VideoEncodeAccelerator::OutputRecord::~OutputRecord() {
74 }
75
76 V4L2VideoEncodeAccelerator::ImageProcessorInputRecord::
77 ImageProcessorInputRecord()
78 : force_keyframe(false) {}
79
80 V4L2VideoEncodeAccelerator::ImageProcessorInputRecord::
81 ~ImageProcessorInputRecord() {}
82
83 V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator(
84 const scoped_refptr<V4L2Device>& device)
85 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
86 output_buffer_byte_size_(0),
87 device_input_format_(media::PIXEL_FORMAT_UNKNOWN),
88 input_planes_count_(0),
89 output_format_fourcc_(0),
90 encoder_state_(kUninitialized),
91 stream_header_size_(0),
92 device_(device),
93 input_streamon_(false),
94 input_buffer_queued_count_(0),
95 input_memory_type_(V4L2_MEMORY_USERPTR),
96 output_streamon_(false),
97 output_buffer_queued_count_(0),
98 encoder_thread_("V4L2EncoderThread"),
99 device_poll_thread_("V4L2EncoderDevicePollThread"),
100 weak_this_ptr_factory_(this) {
101 weak_this_ = weak_this_ptr_factory_.GetWeakPtr();
102 }
103
104 V4L2VideoEncodeAccelerator::~V4L2VideoEncodeAccelerator() {
105 DCHECK(!encoder_thread_.IsRunning());
106 DCHECK(!device_poll_thread_.IsRunning());
107 DVLOG(4) << __func__;
108
109 DestroyInputBuffers();
110 DestroyOutputBuffers();
111 }
112
113 bool V4L2VideoEncodeAccelerator::Initialize(
114 media::VideoPixelFormat input_format,
115 const gfx::Size& input_visible_size,
116 media::VideoCodecProfile output_profile,
117 uint32_t initial_bitrate,
118 Client* client) {
119 DVLOG(3) << __func__
120 << ": input_format=" << media::VideoPixelFormatToString(input_format)
121 << ", input_visible_size=" << input_visible_size.ToString()
122 << ", output_profile=" << output_profile
123 << ", initial_bitrate=" << initial_bitrate;
124
125 visible_size_ = input_visible_size;
126
127 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
128 client_ = client_ptr_factory_->GetWeakPtr();
129
130 DCHECK(child_task_runner_->BelongsToCurrentThread());
131 DCHECK_EQ(encoder_state_, kUninitialized);
132
133 struct v4l2_capability caps;
134 memset(&caps, 0, sizeof(caps));
135 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
136 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
137 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
138 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: "
139 "caps check failed: 0x" << std::hex << caps.capabilities;
140 return false;
141 }
142
143 if (!SetFormats(input_format, output_profile)) {
144 DLOG(ERROR) << "Failed setting up formats";
145 return false;
146 }
147
148 if (input_format != device_input_format_) {
149 DVLOG(1) << "Input format not supported by the HW, will convert to "
150 << media::VideoPixelFormatToString(device_input_format_);
151
152 scoped_refptr<V4L2Device> device =
153 V4L2Device::Create(V4L2Device::kImageProcessor);
154 image_processor_.reset(new V4L2ImageProcessor(device));
155
156 // Convert from input_format to device_input_format_, keeping the size
157 // at visible_size_ and requiring the output buffers to be of at least
158 // input_allocated_size_. Unretained is safe because |this| owns image
159 // processor and there will be no callbacks after processor destroys.
160 if (!image_processor_->Initialize(
161 input_format, device_input_format_, V4L2_MEMORY_USERPTR,
162 visible_size_, visible_size_, visible_size_, input_allocated_size_,
163 kImageProcBufferCount,
164 base::Bind(&V4L2VideoEncodeAccelerator::ImageProcessorError,
165 base::Unretained(this)))) {
166 LOG(ERROR) << "Failed initializing image processor";
167 return false;
168 }
169 // The output of image processor is the input of encoder. Output coded
170 // width of processor must be the same as input coded width of encoder.
171 // Output coded height of processor can be larger but not smaller than the
172 // input coded height of encoder. For example, suppose input size of encoder
173 // is 320x193. It is OK if the output of processor is 320x208.
174 if (image_processor_->output_allocated_size().width() !=
175 input_allocated_size_.width() ||
176 image_processor_->output_allocated_size().height() <
177 input_allocated_size_.height()) {
178 LOG(ERROR) << "Invalid image processor output coded size "
179 << image_processor_->output_allocated_size().ToString()
180 << ", encode input coded size is "
181 << input_allocated_size_.ToString();
182 return false;
183 }
184
185 for (int i = 0; i < kImageProcBufferCount; i++) {
186 std::vector<base::ScopedFD> fds =
187 image_processor_->GetDmabufsForOutputBuffer(i);
188 if (fds.size() == 0) {
189 LOG(ERROR) << __func__ << ": failed to get fds of image processor.";
190 return false;
191 }
192 image_processor_output_buffer_map_.push_back(std::move(fds));
193 free_image_processor_output_buffers_.push_back(i);
194 }
195 }
196
197 if (!InitControls())
198 return false;
199
200 if (!CreateOutputBuffers())
201 return false;
202
203 if (!encoder_thread_.Start()) {
204 LOG(ERROR) << "Initialize(): encoder thread failed to start";
205 return false;
206 }
207
208 RequestEncodingParametersChange(initial_bitrate, kInitialFramerate);
209
210 encoder_state_ = kInitialized;
211
212 child_task_runner_->PostTask(
213 FROM_HERE,
214 base::Bind(&Client::RequireBitstreamBuffers, client_, kInputBufferCount,
215 image_processor_.get()
216 ? image_processor_->input_allocated_size()
217 : input_allocated_size_,
218 output_buffer_byte_size_));
219 return true;
220 }
221
222 void V4L2VideoEncodeAccelerator::ImageProcessorError() {
223 LOG(ERROR) << "Image processor error";
224 NOTIFY_ERROR(kPlatformFailureError);
225 }
226
227 void V4L2VideoEncodeAccelerator::Encode(
228 const scoped_refptr<media::VideoFrame>& frame,
229 bool force_keyframe) {
230 DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe;
231 DCHECK(child_task_runner_->BelongsToCurrentThread());
232
233 if (image_processor_) {
234 if (free_image_processor_output_buffers_.size() > 0) {
235 int output_buffer_index = free_image_processor_output_buffers_.back();
236 free_image_processor_output_buffers_.pop_back();
237 // Unretained is safe because |this| owns image processor and there will
238 // be no callbacks after processor destroys.
239 image_processor_->Process(
240 frame, output_buffer_index,
241 base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed,
242 base::Unretained(this), force_keyframe,
243 frame->timestamp()));
244 } else {
245 ImageProcessorInputRecord record;
246 record.frame = frame;
247 record.force_keyframe = force_keyframe;
248 image_processor_input_queue_.push(record);
249 }
250 } else {
251 encoder_thread_.message_loop()->PostTask(
252 FROM_HERE,
253 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
254 base::Unretained(this),
255 frame,
256 force_keyframe));
257 }
258 }
259
260 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer(
261 const media::BitstreamBuffer& buffer) {
262 DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id();
263 DCHECK(child_task_runner_->BelongsToCurrentThread());
264
265 if (buffer.size() < output_buffer_byte_size_) {
266 NOTIFY_ERROR(kInvalidArgumentError);
267 return;
268 }
269
270 std::unique_ptr<SharedMemoryRegion> shm(
271 new SharedMemoryRegion(buffer, false));
272 if (!shm->Map()) {
273 NOTIFY_ERROR(kPlatformFailureError);
274 return;
275 }
276
277 std::unique_ptr<BitstreamBufferRef> buffer_ref(
278 new BitstreamBufferRef(buffer.id(), std::move(shm)));
279 encoder_thread_.message_loop()->PostTask(
280 FROM_HERE,
281 base::Bind(&V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask,
282 base::Unretained(this),
283 base::Passed(&buffer_ref)));
284 }
285
286 void V4L2VideoEncodeAccelerator::RequestEncodingParametersChange(
287 uint32_t bitrate,
288 uint32_t framerate) {
289 DVLOG(3) << "RequestEncodingParametersChange(): bitrate=" << bitrate
290 << ", framerate=" << framerate;
291 DCHECK(child_task_runner_->BelongsToCurrentThread());
292
293 encoder_thread_.message_loop()->PostTask(
294 FROM_HERE,
295 base::Bind(
296 &V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask,
297 base::Unretained(this),
298 bitrate,
299 framerate));
300 }
301
302 void V4L2VideoEncodeAccelerator::Destroy() {
303 DVLOG(3) << "Destroy()";
304 DCHECK(child_task_runner_->BelongsToCurrentThread());
305
306 // We're destroying; cancel all callbacks.
307 client_ptr_factory_.reset();
308 weak_this_ptr_factory_.InvalidateWeakPtrs();
309
310 if (image_processor_.get())
311 image_processor_.release()->Destroy();
312
313 // If the encoder thread is running, destroy using posted task.
314 if (encoder_thread_.IsRunning()) {
315 encoder_thread_.message_loop()->PostTask(
316 FROM_HERE,
317 base::Bind(&V4L2VideoEncodeAccelerator::DestroyTask,
318 base::Unretained(this)));
319 // DestroyTask() will put the encoder into kError state and cause all tasks
320 // to no-op.
321 encoder_thread_.Stop();
322 } else {
323 // Otherwise, call the destroy task directly.
324 DestroyTask();
325 }
326
327 // Set to kError state just in case.
328 encoder_state_ = kError;
329
330 delete this;
331 }
332
333 media::VideoEncodeAccelerator::SupportedProfiles
334 V4L2VideoEncodeAccelerator::GetSupportedProfiles() {
335 SupportedProfiles profiles;
336 SupportedProfile profile;
337 profile.max_framerate_numerator = 30;
338 profile.max_framerate_denominator = 1;
339
340 gfx::Size min_resolution;
341 v4l2_fmtdesc fmtdesc;
342 memset(&fmtdesc, 0, sizeof(fmtdesc));
343 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
344 for (; device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) {
345 device_->GetSupportedResolution(fmtdesc.pixelformat,
346 &min_resolution, &profile.max_resolution);
347 switch (fmtdesc.pixelformat) {
348 case V4L2_PIX_FMT_H264:
349 profile.profile = media::H264PROFILE_MAIN;
350 profiles.push_back(profile);
351 break;
352 case V4L2_PIX_FMT_VP8:
353 profile.profile = media::VP8PROFILE_ANY;
354 profiles.push_back(profile);
355 break;
356 case V4L2_PIX_FMT_VP9:
357 profile.profile = media::VP9PROFILE_PROFILE0;
358 profiles.push_back(profile);
359 profile.profile = media::VP9PROFILE_PROFILE1;
360 profiles.push_back(profile);
361 profile.profile = media::VP9PROFILE_PROFILE2;
362 profiles.push_back(profile);
363 profile.profile = media::VP9PROFILE_PROFILE3;
364 profiles.push_back(profile);
365 break;
366 }
367 }
368
369 return profiles;
370 }
371
372 void V4L2VideoEncodeAccelerator::FrameProcessed(bool force_keyframe,
373 base::TimeDelta timestamp,
374 int output_buffer_index) {
375 DCHECK(child_task_runner_->BelongsToCurrentThread());
376 DVLOG(3) << "FrameProcessed(): force_keyframe=" << force_keyframe
377 << ", output_buffer_index=" << output_buffer_index;
378 DCHECK_GE(output_buffer_index, 0);
379 DCHECK_LT(static_cast<size_t>(output_buffer_index),
380 image_processor_output_buffer_map_.size());
381
382 std::vector<base::ScopedFD>& scoped_fds =
383 image_processor_output_buffer_map_[output_buffer_index];
384 std::vector<int> fds;
385 for (auto& fd : scoped_fds) {
386 fds.push_back(fd.get());
387 }
388 scoped_refptr<media::VideoFrame> output_frame =
389 media::VideoFrame::WrapExternalDmabufs(
390 device_input_format_, image_processor_->output_allocated_size(),
391 gfx::Rect(visible_size_), visible_size_, fds, timestamp);
392 if (!output_frame) {
393 NOTIFY_ERROR(kPlatformFailureError);
394 return;
395 }
396 output_frame->AddDestructionObserver(media::BindToCurrentLoop(
397 base::Bind(&V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer,
398 weak_this_, output_buffer_index)));
399
400 encoder_thread_.message_loop()->PostTask(
401 FROM_HERE,
402 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
403 base::Unretained(this), output_frame, force_keyframe));
404 }
405
406 void V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer(
407 int output_buffer_index) {
408 DCHECK(child_task_runner_->BelongsToCurrentThread());
409 DVLOG(3) << __func__ << ": output_buffer_index=" << output_buffer_index;
410 free_image_processor_output_buffers_.push_back(output_buffer_index);
411 if (!image_processor_input_queue_.empty()) {
412 ImageProcessorInputRecord record = image_processor_input_queue_.front();
413 image_processor_input_queue_.pop();
414 Encode(record.frame, record.force_keyframe);
415 }
416 }
417
418 void V4L2VideoEncodeAccelerator::EncodeTask(
419 const scoped_refptr<media::VideoFrame>& frame,
420 bool force_keyframe) {
421 DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe;
422 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
423 DCHECK_NE(encoder_state_, kUninitialized);
424
425 if (encoder_state_ == kError) {
426 DVLOG(2) << "EncodeTask(): early out: kError state";
427 return;
428 }
429
430 encoder_input_queue_.push(frame);
431 Enqueue();
432
433 if (force_keyframe) {
434 // TODO(posciak): this presently makes for slightly imprecise encoding
435 // parameters updates. To precisely align the parameter updates with the
436 // incoming input frame, we should queue the parameters together with the
437 // frame onto encoder_input_queue_ and apply them when the input is about
438 // to be queued to the codec.
439 std::vector<struct v4l2_ext_control> ctrls;
440 struct v4l2_ext_control ctrl;
441 memset(&ctrl, 0, sizeof(ctrl));
442 ctrl.id = V4L2_CID_MPEG_VIDEO_FORCE_KEY_FRAME;
443 ctrls.push_back(ctrl);
444 if (!SetExtCtrls(ctrls)) {
445 // Some platforms still use the old control. Fallback before they are
446 // updated.
447 ctrls.clear();
448 memset(&ctrl, 0, sizeof(ctrl));
449 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE;
450 ctrl.value = V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME;
451 ctrls.push_back(ctrl);
452 if (!SetExtCtrls(ctrls)) {
453 LOG(ERROR) << "Failed requesting keyframe";
454 NOTIFY_ERROR(kPlatformFailureError);
455 return;
456 }
457 }
458 }
459 }
460
461 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask(
462 std::unique_ptr<BitstreamBufferRef> buffer_ref) {
463 DVLOG(3) << "UseOutputBitstreamBufferTask(): id=" << buffer_ref->id;
464 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
465
466 encoder_output_queue_.push_back(
467 linked_ptr<BitstreamBufferRef>(buffer_ref.release()));
468 Enqueue();
469
470 if (encoder_state_ == kInitialized) {
471 // Finish setting up our OUTPUT queue. See: Initialize().
472 // VIDIOC_REQBUFS on OUTPUT queue.
473 if (!CreateInputBuffers())
474 return;
475 if (!StartDevicePoll())
476 return;
477 encoder_state_ = kEncoding;
478 }
479 }
480
481 void V4L2VideoEncodeAccelerator::DestroyTask() {
482 DVLOG(3) << "DestroyTask()";
483
484 // DestroyTask() should run regardless of encoder_state_.
485
486 // Stop streaming and the device_poll_thread_.
487 StopDevicePoll();
488
489 // Set our state to kError, and early-out all tasks.
490 encoder_state_ = kError;
491 }
492
493 void V4L2VideoEncodeAccelerator::ServiceDeviceTask() {
494 DVLOG(3) << "ServiceDeviceTask()";
495 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
496 DCHECK_NE(encoder_state_, kUninitialized);
497 DCHECK_NE(encoder_state_, kInitialized);
498
499 if (encoder_state_ == kError) {
500 DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
501 return;
502 }
503
504 Dequeue();
505 Enqueue();
506
507 // Clear the interrupt fd.
508 if (!device_->ClearDevicePollInterrupt())
509 return;
510
511 // Device can be polled as soon as either input or output buffers are queued.
512 bool poll_device =
513 (input_buffer_queued_count_ + output_buffer_queued_count_ > 0);
514
515 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
516 // so either:
517 // * device_poll_thread_ is running normally
518 // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down,
519 // in which case we're in kError state, and we should have early-outed
520 // already.
521 DCHECK(device_poll_thread_.message_loop());
522 // Queue the DevicePollTask() now.
523 device_poll_thread_.message_loop()->PostTask(
524 FROM_HERE,
525 base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask,
526 base::Unretained(this),
527 poll_device));
528
529 DVLOG(2) << __func__ << ": buffer counts: ENC["
530 << encoder_input_queue_.size() << "] => DEVICE["
531 << free_input_buffers_.size() << "+"
532 << input_buffer_queued_count_ << "/"
533 << input_buffer_map_.size() << "->"
534 << free_output_buffers_.size() << "+"
535 << output_buffer_queued_count_ << "/"
536 << output_buffer_map_.size() << "] => OUT["
537 << encoder_output_queue_.size() << "]";
538 }
539
540 void V4L2VideoEncodeAccelerator::Enqueue() {
541 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
542
543 DVLOG(3) << "Enqueue() "
544 << "free_input_buffers: " << free_input_buffers_.size()
545 << "input_queue: " << encoder_input_queue_.size();
546
547 // Enqueue all the inputs we can.
548 const int old_inputs_queued = input_buffer_queued_count_;
549 // while (!ready_input_buffers_.empty()) {
550 while (!encoder_input_queue_.empty() && !free_input_buffers_.empty()) {
551 if (!EnqueueInputRecord())
552 return;
553 }
554 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
555 // We just started up a previously empty queue.
556 // Queue state changed; signal interrupt.
557 if (!device_->SetDevicePollInterrupt())
558 return;
559 // Start VIDIOC_STREAMON if we haven't yet.
560 if (!input_streamon_) {
561 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
562 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
563 input_streamon_ = true;
564 }
565 }
566
567 // Enqueue all the outputs we can.
568 const int old_outputs_queued = output_buffer_queued_count_;
569 while (!free_output_buffers_.empty() && !encoder_output_queue_.empty()) {
570 if (!EnqueueOutputRecord())
571 return;
572 }
573 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
574 // We just started up a previously empty queue.
575 // Queue state changed; signal interrupt.
576 if (!device_->SetDevicePollInterrupt())
577 return;
578 // Start VIDIOC_STREAMON if we haven't yet.
579 if (!output_streamon_) {
580 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
581 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
582 output_streamon_ = true;
583 }
584 }
585 }
586
587 void V4L2VideoEncodeAccelerator::Dequeue() {
588 DVLOG(3) << "Dequeue()";
589 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
590
591 // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
592 // list.
593 struct v4l2_buffer dqbuf;
594 struct v4l2_plane planes[VIDEO_MAX_PLANES];
595 while (input_buffer_queued_count_ > 0) {
596 DVLOG(4) << "inputs queued: " << input_buffer_queued_count_;
597 DCHECK(input_streamon_);
598 memset(&dqbuf, 0, sizeof(dqbuf));
599 memset(&planes, 0, sizeof(planes));
600 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
601 dqbuf.memory = input_memory_type_;
602 dqbuf.m.planes = planes;
603 dqbuf.length = input_planes_count_;
604 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
605 if (errno == EAGAIN) {
606 // EAGAIN if we're just out of buffers to dequeue.
607 break;
608 }
609 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
610 NOTIFY_ERROR(kPlatformFailureError);
611 return;
612 }
613 InputRecord& input_record = input_buffer_map_[dqbuf.index];
614 DCHECK(input_record.at_device);
615 input_record.at_device = false;
616
617 input_record.frame = NULL;
618 free_input_buffers_.push_back(dqbuf.index);
619 input_buffer_queued_count_--;
620 }
621
622 // Dequeue completed output (VIDEO_CAPTURE) buffers, and recycle to the
623 // free list. Notify the client that an output buffer is complete.
624 while (output_buffer_queued_count_ > 0) {
625 DCHECK(output_streamon_);
626 memset(&dqbuf, 0, sizeof(dqbuf));
627 memset(planes, 0, sizeof(planes));
628 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
629 dqbuf.memory = V4L2_MEMORY_MMAP;
630 dqbuf.m.planes = planes;
631 dqbuf.length = 1;
632 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
633 if (errno == EAGAIN) {
634 // EAGAIN if we're just out of buffers to dequeue.
635 break;
636 }
637 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
638 NOTIFY_ERROR(kPlatformFailureError);
639 return;
640 }
641 const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0);
642 OutputRecord& output_record = output_buffer_map_[dqbuf.index];
643 DCHECK(output_record.at_device);
644 DCHECK(output_record.buffer_ref.get());
645
646 void* output_data = output_record.address;
647 size_t output_size = dqbuf.m.planes[0].bytesused;
648 // This shouldn't happen, but just in case. We should be able to recover
649 // after next keyframe after showing some corruption.
650 DCHECK_LE(output_size, output_buffer_byte_size_);
651 if (output_size > output_buffer_byte_size_)
652 output_size = output_buffer_byte_size_;
653 uint8_t* target_data =
654 reinterpret_cast<uint8_t*>(output_record.buffer_ref->shm->memory());
655 if (output_format_fourcc_ == V4L2_PIX_FMT_H264) {
656 if (stream_header_size_ == 0) {
657 // Assume that the first buffer dequeued is the stream header.
658 stream_header_size_ = output_size;
659 stream_header_.reset(new uint8_t[stream_header_size_]);
660 memcpy(stream_header_.get(), output_data, stream_header_size_);
661 }
662 if (key_frame &&
663 output_buffer_byte_size_ - stream_header_size_ >= output_size) {
664 // Insert stream header before every keyframe.
665 memcpy(target_data, stream_header_.get(), stream_header_size_);
666 memcpy(target_data + stream_header_size_, output_data, output_size);
667 output_size += stream_header_size_;
668 } else {
669 memcpy(target_data, output_data, output_size);
670 }
671 } else {
672 memcpy(target_data, output_data, output_size);
673 }
674
675 DVLOG(3) << "Dequeue(): returning "
676 "bitstream_buffer_id=" << output_record.buffer_ref->id
677 << ", size=" << output_size << ", key_frame=" << key_frame;
678 child_task_runner_->PostTask(
679 FROM_HERE,
680 base::Bind(&Client::BitstreamBufferReady, client_,
681 output_record.buffer_ref->id, output_size, key_frame));
682 output_record.at_device = false;
683 output_record.buffer_ref.reset();
684 free_output_buffers_.push_back(dqbuf.index);
685 output_buffer_queued_count_--;
686 }
687 }
688
689 bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
690 DVLOG(3) << "EnqueueInputRecord()";
691 DCHECK(!free_input_buffers_.empty());
692 DCHECK(!encoder_input_queue_.empty());
693
694 // Enqueue an input (VIDEO_OUTPUT) buffer.
695 scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front();
696 const int index = free_input_buffers_.back();
697 InputRecord& input_record = input_buffer_map_[index];
698 DCHECK(!input_record.at_device);
699 struct v4l2_buffer qbuf;
700 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
701 memset(&qbuf, 0, sizeof(qbuf));
702 memset(qbuf_planes, 0, sizeof(qbuf_planes));
703 qbuf.index = index;
704 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
705 qbuf.m.planes = qbuf_planes;
706
707 DCHECK_EQ(device_input_format_, frame->format());
708 for (size_t i = 0; i < input_planes_count_; ++i) {
709 qbuf.m.planes[i].bytesused =
710 base::checked_cast<__u32>(media::VideoFrame::PlaneSize(
711 frame->format(), i, input_allocated_size_).GetArea());
712
713 switch (input_memory_type_) {
714 case V4L2_MEMORY_USERPTR:
715 qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused;
716 qbuf.m.planes[i].m.userptr =
717 reinterpret_cast<unsigned long>(frame->data(i));
718 DCHECK(qbuf.m.planes[i].m.userptr);
719 break;
720
721 case V4L2_MEMORY_DMABUF:
722 qbuf.m.planes[i].m.fd = frame->dmabuf_fd(i);
723 DCHECK_NE(qbuf.m.planes[i].m.fd, -1);
724 break;
725
726 default:
727 NOTREACHED();
728 return false;
729 }
730 }
731
732 qbuf.memory = input_memory_type_;
733 qbuf.length = input_planes_count_;
734
735 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
736 input_record.at_device = true;
737 input_record.frame = frame;
738 encoder_input_queue_.pop();
739 free_input_buffers_.pop_back();
740 input_buffer_queued_count_++;
741 return true;
742 }
743
744 bool V4L2VideoEncodeAccelerator::EnqueueOutputRecord() {
745 DVLOG(3) << "EnqueueOutputRecord()";
746 DCHECK(!free_output_buffers_.empty());
747 DCHECK(!encoder_output_queue_.empty());
748
749 // Enqueue an output (VIDEO_CAPTURE) buffer.
750 linked_ptr<BitstreamBufferRef> output_buffer = encoder_output_queue_.back();
751 const int index = free_output_buffers_.back();
752 OutputRecord& output_record = output_buffer_map_[index];
753 DCHECK(!output_record.at_device);
754 DCHECK(!output_record.buffer_ref.get());
755 struct v4l2_buffer qbuf;
756 struct v4l2_plane qbuf_planes[1];
757 memset(&qbuf, 0, sizeof(qbuf));
758 memset(qbuf_planes, 0, sizeof(qbuf_planes));
759 qbuf.index = index;
760 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
761 qbuf.memory = V4L2_MEMORY_MMAP;
762 qbuf.m.planes = qbuf_planes;
763 qbuf.length = 1;
764 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
765 output_record.at_device = true;
766 output_record.buffer_ref = output_buffer;
767 encoder_output_queue_.pop_back();
768 free_output_buffers_.pop_back();
769 output_buffer_queued_count_++;
770 return true;
771 }
772
773 bool V4L2VideoEncodeAccelerator::StartDevicePoll() {
774 DVLOG(3) << "StartDevicePoll()";
775 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
776 DCHECK(!device_poll_thread_.IsRunning());
777
778 // Start up the device poll thread and schedule its first DevicePollTask().
779 if (!device_poll_thread_.Start()) {
780 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
781 NOTIFY_ERROR(kPlatformFailureError);
782 return false;
783 }
784 // Enqueue a poll task with no devices to poll on -- it will wait only on the
785 // interrupt fd.
786 device_poll_thread_.message_loop()->PostTask(
787 FROM_HERE,
788 base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask,
789 base::Unretained(this),
790 false));
791
792 return true;
793 }
794
795 bool V4L2VideoEncodeAccelerator::StopDevicePoll() {
796 DVLOG(3) << "StopDevicePoll()";
797
798 // Signal the DevicePollTask() to stop, and stop the device poll thread.
799 if (!device_->SetDevicePollInterrupt())
800 return false;
801 device_poll_thread_.Stop();
802 // Clear the interrupt now, to be sure.
803 if (!device_->ClearDevicePollInterrupt())
804 return false;
805
806 if (input_streamon_) {
807 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
808 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
809 }
810 input_streamon_ = false;
811
812 if (output_streamon_) {
813 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
814 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
815 }
816 output_streamon_ = false;
817
818 // Reset all our accounting info.
819 while (!encoder_input_queue_.empty())
820 encoder_input_queue_.pop();
821 free_input_buffers_.clear();
822 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
823 InputRecord& input_record = input_buffer_map_[i];
824 input_record.at_device = false;
825 input_record.frame = NULL;
826 free_input_buffers_.push_back(i);
827 }
828 input_buffer_queued_count_ = 0;
829
830 free_output_buffers_.clear();
831 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
832 OutputRecord& output_record = output_buffer_map_[i];
833 output_record.at_device = false;
834 output_record.buffer_ref.reset();
835 free_output_buffers_.push_back(i);
836 }
837 output_buffer_queued_count_ = 0;
838
839 encoder_output_queue_.clear();
840
841 DVLOG(3) << "StopDevicePoll(): device poll stopped";
842 return true;
843 }
844
845 void V4L2VideoEncodeAccelerator::DevicePollTask(bool poll_device) {
846 DVLOG(3) << "DevicePollTask()";
847 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
848
849 bool event_pending;
850 if (!device_->Poll(poll_device, &event_pending)) {
851 NOTIFY_ERROR(kPlatformFailureError);
852 return;
853 }
854
855 // All processing should happen on ServiceDeviceTask(), since we shouldn't
856 // touch encoder state from this thread.
857 encoder_thread_.message_loop()->PostTask(
858 FROM_HERE,
859 base::Bind(&V4L2VideoEncodeAccelerator::ServiceDeviceTask,
860 base::Unretained(this)));
861 }
862
863 void V4L2VideoEncodeAccelerator::NotifyError(Error error) {
864 DVLOG(1) << "NotifyError(): error=" << error;
865
866 if (!child_task_runner_->BelongsToCurrentThread()) {
867 child_task_runner_->PostTask(
868 FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::NotifyError,
869 weak_this_, error));
870 return;
871 }
872
873 if (client_) {
874 client_->NotifyError(error);
875 client_ptr_factory_.reset();
876 }
877 }
878
879 void V4L2VideoEncodeAccelerator::SetErrorState(Error error) {
880 // We can touch encoder_state_ only if this is the encoder thread or the
881 // encoder thread isn't running.
882 if (encoder_thread_.message_loop() != NULL &&
883 encoder_thread_.message_loop() != base::MessageLoop::current()) {
884 encoder_thread_.message_loop()->PostTask(
885 FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::SetErrorState,
886 base::Unretained(this), error));
887 return;
888 }
889
890 // Post NotifyError only if we are already initialized, as the API does
891 // not allow doing so before that.
892 if (encoder_state_ != kError && encoder_state_ != kUninitialized)
893 NotifyError(error);
894
895 encoder_state_ = kError;
896 }
897
898 void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask(
899 uint32_t bitrate,
900 uint32_t framerate) {
901 DVLOG(3) << "RequestEncodingParametersChangeTask(): bitrate=" << bitrate
902 << ", framerate=" << framerate;
903 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
904
905 if (bitrate < 1)
906 bitrate = 1;
907 if (framerate < 1)
908 framerate = 1;
909
910 std::vector<struct v4l2_ext_control> ctrls;
911 struct v4l2_ext_control ctrl;
912 memset(&ctrl, 0, sizeof(ctrl));
913 ctrl.id = V4L2_CID_MPEG_VIDEO_BITRATE;
914 ctrl.value = bitrate;
915 ctrls.push_back(ctrl);
916 if (!SetExtCtrls(ctrls)) {
917 LOG(ERROR) << "Failed changing bitrate";
918 NOTIFY_ERROR(kPlatformFailureError);
919 return;
920 }
921
922 struct v4l2_streamparm parms;
923 memset(&parms, 0, sizeof(parms));
924 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
925 // Note that we are provided "frames per second" but V4L2 expects "time per
926 // frame"; hence we provide the reciprocal of the framerate here.
927 parms.parm.output.timeperframe.numerator = 1;
928 parms.parm.output.timeperframe.denominator = framerate;
929 IOCTL_OR_ERROR_RETURN(VIDIOC_S_PARM, &parms);
930 }
931
932 bool V4L2VideoEncodeAccelerator::SetOutputFormat(
933 media::VideoCodecProfile output_profile) {
934 DCHECK(child_task_runner_->BelongsToCurrentThread());
935 DCHECK(!input_streamon_);
936 DCHECK(!output_streamon_);
937
938 output_format_fourcc_ =
939 V4L2Device::VideoCodecProfileToV4L2PixFmt(output_profile, false);
940 if (!output_format_fourcc_) {
941 LOG(ERROR) << "Initialize(): invalid output_profile=" << output_profile;
942 return false;
943 }
944
945 output_buffer_byte_size_ = kOutputBufferSize;
946
947 struct v4l2_format format;
948 memset(&format, 0, sizeof(format));
949 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
950 format.fmt.pix_mp.width = visible_size_.width();
951 format.fmt.pix_mp.height = visible_size_.height();
952 format.fmt.pix_mp.pixelformat = output_format_fourcc_;
953 format.fmt.pix_mp.plane_fmt[0].sizeimage =
954 base::checked_cast<__u32>(output_buffer_byte_size_);
955 format.fmt.pix_mp.num_planes = 1;
956 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
957
958 // Device might have adjusted the required output size.
959 size_t adjusted_output_buffer_size =
960 base::checked_cast<size_t>(format.fmt.pix_mp.plane_fmt[0].sizeimage);
961 output_buffer_byte_size_ = adjusted_output_buffer_size;
962
963 return true;
964 }
965
966 bool V4L2VideoEncodeAccelerator::NegotiateInputFormat(
967 media::VideoPixelFormat input_format) {
968 DVLOG(3) << "NegotiateInputFormat()";
969 DCHECK(child_task_runner_->BelongsToCurrentThread());
970 DCHECK(!input_streamon_);
971 DCHECK(!output_streamon_);
972
973 device_input_format_ = media::PIXEL_FORMAT_UNKNOWN;
974 input_planes_count_ = 0;
975
976 uint32_t input_format_fourcc =
977 V4L2Device::VideoPixelFormatToV4L2PixFmt(input_format);
978 if (!input_format_fourcc) {
979 LOG(ERROR) << "Unsupported input format" << input_format_fourcc;
980 return false;
981 }
982
983 size_t input_planes_count = media::VideoFrame::NumPlanes(input_format);
984 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
985
986 // First see if we the device can use the provided input_format directly.
987 struct v4l2_format format;
988 memset(&format, 0, sizeof(format));
989 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
990 format.fmt.pix_mp.width = visible_size_.width();
991 format.fmt.pix_mp.height = visible_size_.height();
992 format.fmt.pix_mp.pixelformat = input_format_fourcc;
993 format.fmt.pix_mp.num_planes = input_planes_count;
994 if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
995 // Error or format unsupported by device, try to negotiate a fallback.
996 input_format_fourcc = device_->PreferredInputFormat();
997 input_format =
998 V4L2Device::V4L2PixFmtToVideoPixelFormat(input_format_fourcc);
999 if (input_format == media::PIXEL_FORMAT_UNKNOWN) {
1000 LOG(ERROR) << "Unsupported input format" << input_format_fourcc;
1001 return false;
1002 }
1003
1004 input_planes_count = media::VideoFrame::NumPlanes(input_format);
1005 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
1006
1007 // Device might have adjusted parameters, reset them along with the format.
1008 memset(&format, 0, sizeof(format));
1009 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1010 format.fmt.pix_mp.width = visible_size_.width();
1011 format.fmt.pix_mp.height = visible_size_.height();
1012 format.fmt.pix_mp.pixelformat = input_format_fourcc;
1013 format.fmt.pix_mp.num_planes = input_planes_count;
1014 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1015 DCHECK_EQ(format.fmt.pix_mp.num_planes, input_planes_count);
1016 }
1017
1018 // Take device-adjusted sizes for allocated size. If the size is adjusted
1019 // down, it means the input is too big and the hardware does not support it.
1020 input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format);
1021 if (!gfx::Rect(input_allocated_size_).Contains(gfx::Rect(visible_size_))) {
1022 DVLOG(1) << "Input size too big " << visible_size_.ToString()
1023 << ", adjusted to " << input_allocated_size_.ToString();
1024 return false;
1025 }
1026
1027 device_input_format_ = input_format;
1028 input_planes_count_ = input_planes_count;
1029 return true;
1030 }
1031
1032 bool V4L2VideoEncodeAccelerator::SetFormats(
1033 media::VideoPixelFormat input_format,
1034 media::VideoCodecProfile output_profile) {
1035 DVLOG(3) << "SetFormats()";
1036 DCHECK(child_task_runner_->BelongsToCurrentThread());
1037 DCHECK(!input_streamon_);
1038 DCHECK(!output_streamon_);
1039
1040 if (!SetOutputFormat(output_profile))
1041 return false;
1042
1043 if (!NegotiateInputFormat(input_format))
1044 return false;
1045
1046 struct v4l2_crop crop;
1047 memset(&crop, 0, sizeof(crop));
1048 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1049 crop.c.left = 0;
1050 crop.c.top = 0;
1051 crop.c.width = visible_size_.width();
1052 crop.c.height = visible_size_.height();
1053 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop);
1054
1055 // The width and height might be adjusted by driver.
1056 // Need to read it back and set to visible_size_.
1057 if (device_->Ioctl(VIDIOC_G_CROP, &crop) != 0) {
1058 // Some devices haven't supported G_CROP yet, so treat the failure
1059 // non-fatal for now.
1060 // TODO(kcwu): NOTIFY_ERROR and return false after all devices support it.
1061 PLOG(WARNING) << "SetFormats(): ioctl() VIDIOC_G_CROP failed";
1062 return true;
1063 }
1064 visible_size_.SetSize(crop.c.width, crop.c.height);
1065 DVLOG(3) << "After adjusted by driver, visible_size_="
1066 << visible_size_.ToString();
1067
1068 return true;
1069 }
1070
1071 bool V4L2VideoEncodeAccelerator::SetExtCtrls(
1072 std::vector<struct v4l2_ext_control> ctrls) {
1073 struct v4l2_ext_controls ext_ctrls;
1074 memset(&ext_ctrls, 0, sizeof(ext_ctrls));
1075 ext_ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
1076 ext_ctrls.count = ctrls.size();
1077 ext_ctrls.controls = &ctrls[0];
1078 return device_->Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls) == 0;
1079 }
1080
1081 bool V4L2VideoEncodeAccelerator::InitControls() {
1082 std::vector<struct v4l2_ext_control> ctrls;
1083 struct v4l2_ext_control ctrl;
1084
1085 // Enable frame-level bitrate control. This is the only mandatory control.
1086 memset(&ctrl, 0, sizeof(ctrl));
1087 ctrl.id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE;
1088 ctrl.value = 1;
1089 ctrls.push_back(ctrl);
1090 if (!SetExtCtrls(ctrls)) {
1091 LOG(ERROR) << "Failed enabling bitrate control";
1092 NOTIFY_ERROR(kPlatformFailureError);
1093 return false;
1094 }
1095
1096 // Optional controls.
1097 ctrls.clear();
1098 if (output_format_fourcc_ == V4L2_PIX_FMT_H264) {
1099 // No B-frames, for lowest decoding latency.
1100 memset(&ctrl, 0, sizeof(ctrl));
1101 ctrl.id = V4L2_CID_MPEG_VIDEO_B_FRAMES;
1102 ctrl.value = 0;
1103 ctrls.push_back(ctrl);
1104
1105 // Quantization parameter maximum value (for variable bitrate control).
1106 memset(&ctrl, 0, sizeof(ctrl));
1107 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP;
1108 ctrl.value = 51;
1109 ctrls.push_back(ctrl);
1110
1111 // Use H.264 level 4.0 to match the supported max resolution.
1112 memset(&ctrl, 0, sizeof(ctrl));
1113 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
1114 ctrl.value = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
1115 ctrls.push_back(ctrl);
1116
1117 // Separate stream header so we can cache it and insert into the stream.
1118 memset(&ctrl, 0, sizeof(ctrl));
1119 ctrl.id = V4L2_CID_MPEG_VIDEO_HEADER_MODE;
1120 ctrl.value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE;
1121 ctrls.push_back(ctrl);
1122 }
1123
1124 // Enable macroblock-level bitrate control.
1125 memset(&ctrl, 0, sizeof(ctrl));
1126 ctrl.id = V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE;
1127 ctrl.value = 1;
1128 ctrls.push_back(ctrl);
1129
1130 // Disable periodic key frames.
1131 memset(&ctrl, 0, sizeof(ctrl));
1132 ctrl.id = V4L2_CID_MPEG_VIDEO_GOP_SIZE;
1133 ctrl.value = 0;
1134 ctrls.push_back(ctrl);
1135
1136 // Ignore return value as these controls are optional.
1137 SetExtCtrls(ctrls);
1138
1139 // Optional Exynos specific controls.
1140 ctrls.clear();
1141 // Enable "tight" bitrate mode. For this to work properly, frame- and mb-level
1142 // bitrate controls have to be enabled as well.
1143 memset(&ctrl, 0, sizeof(ctrl));
1144 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF;
1145 ctrl.value = 1;
1146 ctrls.push_back(ctrl);
1147
1148 // Force bitrate control to average over a GOP (for tight bitrate
1149 // tolerance).
1150 memset(&ctrl, 0, sizeof(ctrl));
1151 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT;
1152 ctrl.value = 1;
1153 ctrls.push_back(ctrl);
1154
1155 // Ignore return value as these controls are optional.
1156 SetExtCtrls(ctrls);
1157
1158 return true;
1159 }
1160
1161 bool V4L2VideoEncodeAccelerator::CreateInputBuffers() {
1162 DVLOG(3) << "CreateInputBuffers()";
1163 // This function runs on encoder_thread_ after output buffers have been
1164 // provided by the client.
1165 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
1166 DCHECK(!input_streamon_);
1167
1168 struct v4l2_requestbuffers reqbufs;
1169 memset(&reqbufs, 0, sizeof(reqbufs));
1170 // Driver will modify to the appropriate number of buffers.
1171 reqbufs.count = 1;
1172 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1173 // TODO(posciak): Once we start doing zero-copy, we should decide based on
1174 // the current pipeline setup which memory type to use. This should probably
1175 // be decided based on an argument to Initialize().
1176 if (image_processor_.get())
1177 input_memory_type_ = V4L2_MEMORY_DMABUF;
1178 else
1179 input_memory_type_ = V4L2_MEMORY_USERPTR;
1180
1181 reqbufs.memory = input_memory_type_;
1182 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1183
1184 DCHECK(input_buffer_map_.empty());
1185 input_buffer_map_.resize(reqbufs.count);
1186 for (size_t i = 0; i < input_buffer_map_.size(); ++i)
1187 free_input_buffers_.push_back(i);
1188
1189 return true;
1190 }
1191
1192 bool V4L2VideoEncodeAccelerator::CreateOutputBuffers() {
1193 DVLOG(3) << "CreateOutputBuffers()";
1194 DCHECK(child_task_runner_->BelongsToCurrentThread());
1195 DCHECK(!output_streamon_);
1196
1197 struct v4l2_requestbuffers reqbufs;
1198 memset(&reqbufs, 0, sizeof(reqbufs));
1199 reqbufs.count = kOutputBufferCount;
1200 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1201 reqbufs.memory = V4L2_MEMORY_MMAP;
1202 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1203
1204 DCHECK(output_buffer_map_.empty());
1205 output_buffer_map_.resize(reqbufs.count);
1206 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1207 struct v4l2_plane planes[1];
1208 struct v4l2_buffer buffer;
1209 memset(&buffer, 0, sizeof(buffer));
1210 memset(planes, 0, sizeof(planes));
1211 buffer.index = i;
1212 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1213 buffer.memory = V4L2_MEMORY_MMAP;
1214 buffer.m.planes = planes;
1215 buffer.length = arraysize(planes);
1216 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1217 void* address = device_->Mmap(NULL,
1218 buffer.m.planes[0].length,
1219 PROT_READ | PROT_WRITE,
1220 MAP_SHARED,
1221 buffer.m.planes[0].m.mem_offset);
1222 if (address == MAP_FAILED) {
1223 PLOG(ERROR) << "CreateOutputBuffers(): mmap() failed";
1224 return false;
1225 }
1226 output_buffer_map_[i].address = address;
1227 output_buffer_map_[i].length = buffer.m.planes[0].length;
1228 free_output_buffers_.push_back(i);
1229 }
1230
1231 return true;
1232 }
1233
1234 void V4L2VideoEncodeAccelerator::DestroyInputBuffers() {
1235 DVLOG(3) << "DestroyInputBuffers()";
1236 DCHECK(child_task_runner_->BelongsToCurrentThread());
1237 DCHECK(!input_streamon_);
1238
1239 struct v4l2_requestbuffers reqbufs;
1240 memset(&reqbufs, 0, sizeof(reqbufs));
1241 reqbufs.count = 0;
1242 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1243 reqbufs.memory = input_memory_type_;
1244 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1245
1246 input_buffer_map_.clear();
1247 free_input_buffers_.clear();
1248 }
1249
1250 void V4L2VideoEncodeAccelerator::DestroyOutputBuffers() {
1251 DVLOG(3) << "DestroyOutputBuffers()";
1252 DCHECK(child_task_runner_->BelongsToCurrentThread());
1253 DCHECK(!output_streamon_);
1254
1255 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1256 if (output_buffer_map_[i].address != NULL)
1257 device_->Munmap(output_buffer_map_[i].address,
1258 output_buffer_map_[i].length);
1259 }
1260
1261 struct v4l2_requestbuffers reqbufs;
1262 memset(&reqbufs, 0, sizeof(reqbufs));
1263 reqbufs.count = 0;
1264 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1265 reqbufs.memory = V4L2_MEMORY_MMAP;
1266 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1267
1268 output_buffer_map_.clear();
1269 free_output_buffers_.clear();
1270 }
1271
1272 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698