| OLD | NEW |
| (Empty) |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include <fcntl.h> | |
| 6 #include <linux/videodev2.h> | |
| 7 #include <poll.h> | |
| 8 #include <string.h> | |
| 9 #include <sys/eventfd.h> | |
| 10 #include <sys/ioctl.h> | |
| 11 #include <sys/mman.h> | |
| 12 #include <utility> | |
| 13 | |
| 14 #include "base/callback.h" | |
| 15 #include "base/command_line.h" | |
| 16 #include "base/macros.h" | |
| 17 #include "base/numerics/safe_conversions.h" | |
| 18 #include "base/thread_task_runner_handle.h" | |
| 19 #include "base/trace_event/trace_event.h" | |
| 20 #include "content/common/gpu/media/shared_memory_region.h" | |
| 21 #include "content/common/gpu/media/v4l2_video_encode_accelerator.h" | |
| 22 #include "media/base/bind_to_current_loop.h" | |
| 23 #include "media/base/bitstream_buffer.h" | |
| 24 | |
| 25 #define NOTIFY_ERROR(x) \ | |
| 26 do { \ | |
| 27 LOG(ERROR) << "Setting error state:" << x; \ | |
| 28 SetErrorState(x); \ | |
| 29 } while (0) | |
| 30 | |
| 31 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_str) \ | |
| 32 do { \ | |
| 33 if (device_->Ioctl(type, arg) != 0) { \ | |
| 34 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_str; \ | |
| 35 NOTIFY_ERROR(kPlatformFailureError); \ | |
| 36 return value; \ | |
| 37 } \ | |
| 38 } while (0) | |
| 39 | |
| 40 #define IOCTL_OR_ERROR_RETURN(type, arg) \ | |
| 41 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0), #type) | |
| 42 | |
| 43 #define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ | |
| 44 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type) | |
| 45 | |
| 46 #define IOCTL_OR_LOG_ERROR(type, arg) \ | |
| 47 do { \ | |
| 48 if (device_->Ioctl(type, arg) != 0) \ | |
| 49 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ | |
| 50 } while (0) | |
| 51 | |
| 52 namespace content { | |
| 53 | |
| 54 struct V4L2VideoEncodeAccelerator::BitstreamBufferRef { | |
| 55 BitstreamBufferRef(int32_t id, std::unique_ptr<SharedMemoryRegion> shm) | |
| 56 : id(id), shm(std::move(shm)) {} | |
| 57 const int32_t id; | |
| 58 const std::unique_ptr<SharedMemoryRegion> shm; | |
| 59 }; | |
| 60 | |
| 61 V4L2VideoEncodeAccelerator::InputRecord::InputRecord() : at_device(false) { | |
| 62 } | |
| 63 | |
| 64 V4L2VideoEncodeAccelerator::InputRecord::~InputRecord() { | |
| 65 } | |
| 66 | |
| 67 V4L2VideoEncodeAccelerator::OutputRecord::OutputRecord() | |
| 68 : at_device(false), address(NULL), length(0) { | |
| 69 } | |
| 70 | |
| 71 V4L2VideoEncodeAccelerator::OutputRecord::~OutputRecord() { | |
| 72 } | |
| 73 | |
| 74 V4L2VideoEncodeAccelerator::ImageProcessorInputRecord:: | |
| 75 ImageProcessorInputRecord() | |
| 76 : force_keyframe(false) {} | |
| 77 | |
| 78 V4L2VideoEncodeAccelerator::ImageProcessorInputRecord:: | |
| 79 ~ImageProcessorInputRecord() {} | |
| 80 | |
| 81 V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator( | |
| 82 const scoped_refptr<V4L2Device>& device) | |
| 83 : child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 84 output_buffer_byte_size_(0), | |
| 85 device_input_format_(media::PIXEL_FORMAT_UNKNOWN), | |
| 86 input_planes_count_(0), | |
| 87 output_format_fourcc_(0), | |
| 88 encoder_state_(kUninitialized), | |
| 89 stream_header_size_(0), | |
| 90 device_(device), | |
| 91 input_streamon_(false), | |
| 92 input_buffer_queued_count_(0), | |
| 93 input_memory_type_(V4L2_MEMORY_USERPTR), | |
| 94 output_streamon_(false), | |
| 95 output_buffer_queued_count_(0), | |
| 96 encoder_thread_("V4L2EncoderThread"), | |
| 97 device_poll_thread_("V4L2EncoderDevicePollThread"), | |
| 98 weak_this_ptr_factory_(this) { | |
| 99 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); | |
| 100 } | |
| 101 | |
| 102 V4L2VideoEncodeAccelerator::~V4L2VideoEncodeAccelerator() { | |
| 103 DCHECK(!encoder_thread_.IsRunning()); | |
| 104 DCHECK(!device_poll_thread_.IsRunning()); | |
| 105 DVLOG(4) << __func__; | |
| 106 | |
| 107 DestroyInputBuffers(); | |
| 108 DestroyOutputBuffers(); | |
| 109 } | |
| 110 | |
| 111 bool V4L2VideoEncodeAccelerator::Initialize( | |
| 112 media::VideoPixelFormat input_format, | |
| 113 const gfx::Size& input_visible_size, | |
| 114 media::VideoCodecProfile output_profile, | |
| 115 uint32_t initial_bitrate, | |
| 116 Client* client) { | |
| 117 DVLOG(3) << __func__ | |
| 118 << ": input_format=" << media::VideoPixelFormatToString(input_format) | |
| 119 << ", input_visible_size=" << input_visible_size.ToString() | |
| 120 << ", output_profile=" << output_profile | |
| 121 << ", initial_bitrate=" << initial_bitrate; | |
| 122 | |
| 123 visible_size_ = input_visible_size; | |
| 124 | |
| 125 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); | |
| 126 client_ = client_ptr_factory_->GetWeakPtr(); | |
| 127 | |
| 128 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 129 DCHECK_EQ(encoder_state_, kUninitialized); | |
| 130 | |
| 131 struct v4l2_capability caps; | |
| 132 memset(&caps, 0, sizeof(caps)); | |
| 133 const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING; | |
| 134 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); | |
| 135 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | |
| 136 LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " | |
| 137 "caps check failed: 0x" << std::hex << caps.capabilities; | |
| 138 return false; | |
| 139 } | |
| 140 | |
| 141 if (!SetFormats(input_format, output_profile)) { | |
| 142 DLOG(ERROR) << "Failed setting up formats"; | |
| 143 return false; | |
| 144 } | |
| 145 | |
| 146 if (input_format != device_input_format_) { | |
| 147 DVLOG(1) << "Input format not supported by the HW, will convert to " | |
| 148 << media::VideoPixelFormatToString(device_input_format_); | |
| 149 | |
| 150 scoped_refptr<V4L2Device> device = | |
| 151 V4L2Device::Create(V4L2Device::kImageProcessor); | |
| 152 image_processor_.reset(new V4L2ImageProcessor(device)); | |
| 153 | |
| 154 // Convert from input_format to device_input_format_, keeping the size | |
| 155 // at visible_size_ and requiring the output buffers to be of at least | |
| 156 // input_allocated_size_. | |
| 157 if (!image_processor_->Initialize( | |
| 158 input_format, device_input_format_, visible_size_, visible_size_, | |
| 159 input_allocated_size_, kImageProcBufferCount, | |
| 160 base::Bind(&V4L2VideoEncodeAccelerator::ImageProcessorError, | |
| 161 weak_this_))) { | |
| 162 LOG(ERROR) << "Failed initializing image processor"; | |
| 163 return false; | |
| 164 } | |
| 165 | |
| 166 for (int i = 0; i < kImageProcBufferCount; i++) { | |
| 167 std::vector<base::ScopedFD> fds = | |
| 168 image_processor_->GetDmabufsForOutputBuffer(i); | |
| 169 if (fds.size() == 0) { | |
| 170 LOG(ERROR) << __func__ << ": failed to get fds of image processor."; | |
| 171 return false; | |
| 172 } | |
| 173 image_processor_output_buffer_map_.push_back(std::move(fds)); | |
| 174 free_image_processor_output_buffers_.push_back(i); | |
| 175 } | |
| 176 } | |
| 177 | |
| 178 if (!InitControls()) | |
| 179 return false; | |
| 180 | |
| 181 if (!CreateOutputBuffers()) | |
| 182 return false; | |
| 183 | |
| 184 if (!encoder_thread_.Start()) { | |
| 185 LOG(ERROR) << "Initialize(): encoder thread failed to start"; | |
| 186 return false; | |
| 187 } | |
| 188 | |
| 189 RequestEncodingParametersChange(initial_bitrate, kInitialFramerate); | |
| 190 | |
| 191 encoder_state_ = kInitialized; | |
| 192 | |
| 193 child_task_runner_->PostTask( | |
| 194 FROM_HERE, | |
| 195 base::Bind(&Client::RequireBitstreamBuffers, client_, kInputBufferCount, | |
| 196 image_processor_.get() | |
| 197 ? image_processor_->input_allocated_size() | |
| 198 : input_allocated_size_, | |
| 199 output_buffer_byte_size_)); | |
| 200 return true; | |
| 201 } | |
| 202 | |
| 203 void V4L2VideoEncodeAccelerator::ImageProcessorError() { | |
| 204 LOG(ERROR) << "Image processor error"; | |
| 205 NOTIFY_ERROR(kPlatformFailureError); | |
| 206 } | |
| 207 | |
| 208 void V4L2VideoEncodeAccelerator::Encode( | |
| 209 const scoped_refptr<media::VideoFrame>& frame, | |
| 210 bool force_keyframe) { | |
| 211 DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe; | |
| 212 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 213 | |
| 214 if (image_processor_) { | |
| 215 if (free_image_processor_output_buffers_.size() > 0) { | |
| 216 int output_buffer_index = free_image_processor_output_buffers_.back(); | |
| 217 free_image_processor_output_buffers_.pop_back(); | |
| 218 image_processor_->Process( | |
| 219 frame, output_buffer_index, | |
| 220 base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed, weak_this_, | |
| 221 force_keyframe, frame->timestamp())); | |
| 222 } else { | |
| 223 ImageProcessorInputRecord record; | |
| 224 record.frame = frame; | |
| 225 record.force_keyframe = force_keyframe; | |
| 226 image_processor_input_queue_.push(record); | |
| 227 } | |
| 228 } else { | |
| 229 encoder_thread_.message_loop()->PostTask( | |
| 230 FROM_HERE, | |
| 231 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask, | |
| 232 base::Unretained(this), | |
| 233 frame, | |
| 234 force_keyframe)); | |
| 235 } | |
| 236 } | |
| 237 | |
| 238 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer( | |
| 239 const media::BitstreamBuffer& buffer) { | |
| 240 DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id(); | |
| 241 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 242 | |
| 243 if (buffer.size() < output_buffer_byte_size_) { | |
| 244 NOTIFY_ERROR(kInvalidArgumentError); | |
| 245 return; | |
| 246 } | |
| 247 | |
| 248 std::unique_ptr<SharedMemoryRegion> shm( | |
| 249 new SharedMemoryRegion(buffer, false)); | |
| 250 if (!shm->Map()) { | |
| 251 NOTIFY_ERROR(kPlatformFailureError); | |
| 252 return; | |
| 253 } | |
| 254 | |
| 255 std::unique_ptr<BitstreamBufferRef> buffer_ref( | |
| 256 new BitstreamBufferRef(buffer.id(), std::move(shm))); | |
| 257 encoder_thread_.message_loop()->PostTask( | |
| 258 FROM_HERE, | |
| 259 base::Bind(&V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask, | |
| 260 base::Unretained(this), | |
| 261 base::Passed(&buffer_ref))); | |
| 262 } | |
| 263 | |
| 264 void V4L2VideoEncodeAccelerator::RequestEncodingParametersChange( | |
| 265 uint32_t bitrate, | |
| 266 uint32_t framerate) { | |
| 267 DVLOG(3) << "RequestEncodingParametersChange(): bitrate=" << bitrate | |
| 268 << ", framerate=" << framerate; | |
| 269 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 270 | |
| 271 encoder_thread_.message_loop()->PostTask( | |
| 272 FROM_HERE, | |
| 273 base::Bind( | |
| 274 &V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask, | |
| 275 base::Unretained(this), | |
| 276 bitrate, | |
| 277 framerate)); | |
| 278 } | |
| 279 | |
| 280 void V4L2VideoEncodeAccelerator::Destroy() { | |
| 281 DVLOG(3) << "Destroy()"; | |
| 282 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 283 | |
| 284 // We're destroying; cancel all callbacks. | |
| 285 client_ptr_factory_.reset(); | |
| 286 weak_this_ptr_factory_.InvalidateWeakPtrs(); | |
| 287 | |
| 288 if (image_processor_.get()) | |
| 289 image_processor_.release()->Destroy(); | |
| 290 | |
| 291 // If the encoder thread is running, destroy using posted task. | |
| 292 if (encoder_thread_.IsRunning()) { | |
| 293 encoder_thread_.message_loop()->PostTask( | |
| 294 FROM_HERE, | |
| 295 base::Bind(&V4L2VideoEncodeAccelerator::DestroyTask, | |
| 296 base::Unretained(this))); | |
| 297 // DestroyTask() will put the encoder into kError state and cause all tasks | |
| 298 // to no-op. | |
| 299 encoder_thread_.Stop(); | |
| 300 } else { | |
| 301 // Otherwise, call the destroy task directly. | |
| 302 DestroyTask(); | |
| 303 } | |
| 304 | |
| 305 // Set to kError state just in case. | |
| 306 encoder_state_ = kError; | |
| 307 | |
| 308 delete this; | |
| 309 } | |
| 310 | |
| 311 media::VideoEncodeAccelerator::SupportedProfiles | |
| 312 V4L2VideoEncodeAccelerator::GetSupportedProfiles() { | |
| 313 SupportedProfiles profiles; | |
| 314 SupportedProfile profile; | |
| 315 profile.max_framerate_numerator = 30; | |
| 316 profile.max_framerate_denominator = 1; | |
| 317 | |
| 318 gfx::Size min_resolution; | |
| 319 v4l2_fmtdesc fmtdesc; | |
| 320 memset(&fmtdesc, 0, sizeof(fmtdesc)); | |
| 321 fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 322 for (; device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0; ++fmtdesc.index) { | |
| 323 device_->GetSupportedResolution(fmtdesc.pixelformat, | |
| 324 &min_resolution, &profile.max_resolution); | |
| 325 switch (fmtdesc.pixelformat) { | |
| 326 case V4L2_PIX_FMT_H264: | |
| 327 profile.profile = media::H264PROFILE_MAIN; | |
| 328 profiles.push_back(profile); | |
| 329 break; | |
| 330 case V4L2_PIX_FMT_VP8: | |
| 331 profile.profile = media::VP8PROFILE_ANY; | |
| 332 profiles.push_back(profile); | |
| 333 break; | |
| 334 case V4L2_PIX_FMT_VP9: | |
| 335 profile.profile = media::VP9PROFILE_PROFILE0; | |
| 336 profiles.push_back(profile); | |
| 337 profile.profile = media::VP9PROFILE_PROFILE1; | |
| 338 profiles.push_back(profile); | |
| 339 profile.profile = media::VP9PROFILE_PROFILE2; | |
| 340 profiles.push_back(profile); | |
| 341 profile.profile = media::VP9PROFILE_PROFILE3; | |
| 342 profiles.push_back(profile); | |
| 343 break; | |
| 344 } | |
| 345 } | |
| 346 | |
| 347 return profiles; | |
| 348 } | |
| 349 | |
| 350 void V4L2VideoEncodeAccelerator::FrameProcessed(bool force_keyframe, | |
| 351 base::TimeDelta timestamp, | |
| 352 int output_buffer_index) { | |
| 353 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 354 DVLOG(3) << "FrameProcessed(): force_keyframe=" << force_keyframe | |
| 355 << ", output_buffer_index=" << output_buffer_index; | |
| 356 DCHECK_GE(output_buffer_index, 0); | |
| 357 DCHECK_LT(output_buffer_index, image_processor_output_buffer_map_.size()); | |
| 358 | |
| 359 std::vector<base::ScopedFD>& scoped_fds = | |
| 360 image_processor_output_buffer_map_[output_buffer_index]; | |
| 361 std::vector<int> fds; | |
| 362 for (auto& fd : scoped_fds) { | |
| 363 fds.push_back(fd.get()); | |
| 364 } | |
| 365 scoped_refptr<media::VideoFrame> output_frame = | |
| 366 media::VideoFrame::WrapExternalDmabufs( | |
| 367 device_input_format_, image_processor_->output_allocated_size(), | |
| 368 gfx::Rect(visible_size_), visible_size_, fds, timestamp); | |
| 369 if (!output_frame) { | |
| 370 NOTIFY_ERROR(kPlatformFailureError); | |
| 371 return; | |
| 372 } | |
| 373 output_frame->AddDestructionObserver(media::BindToCurrentLoop( | |
| 374 base::Bind(&V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer, | |
| 375 weak_this_, output_buffer_index))); | |
| 376 | |
| 377 encoder_thread_.message_loop()->PostTask( | |
| 378 FROM_HERE, | |
| 379 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask, | |
| 380 base::Unretained(this), output_frame, force_keyframe)); | |
| 381 } | |
| 382 | |
| 383 void V4L2VideoEncodeAccelerator::ReuseImageProcessorOutputBuffer( | |
| 384 int output_buffer_index) { | |
| 385 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 386 DVLOG(3) << __func__ << ": output_buffer_index=" << output_buffer_index; | |
| 387 free_image_processor_output_buffers_.push_back(output_buffer_index); | |
| 388 if (!image_processor_input_queue_.empty()) { | |
| 389 ImageProcessorInputRecord record = image_processor_input_queue_.front(); | |
| 390 image_processor_input_queue_.pop(); | |
| 391 Encode(record.frame, record.force_keyframe); | |
| 392 } | |
| 393 } | |
| 394 | |
| 395 void V4L2VideoEncodeAccelerator::EncodeTask( | |
| 396 const scoped_refptr<media::VideoFrame>& frame, | |
| 397 bool force_keyframe) { | |
| 398 DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe; | |
| 399 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
| 400 DCHECK_NE(encoder_state_, kUninitialized); | |
| 401 | |
| 402 if (encoder_state_ == kError) { | |
| 403 DVLOG(2) << "EncodeTask(): early out: kError state"; | |
| 404 return; | |
| 405 } | |
| 406 | |
| 407 encoder_input_queue_.push(frame); | |
| 408 Enqueue(); | |
| 409 | |
| 410 if (force_keyframe) { | |
| 411 // TODO(posciak): this presently makes for slightly imprecise encoding | |
| 412 // parameters updates. To precisely align the parameter updates with the | |
| 413 // incoming input frame, we should queue the parameters together with the | |
| 414 // frame onto encoder_input_queue_ and apply them when the input is about | |
| 415 // to be queued to the codec. | |
| 416 std::vector<struct v4l2_ext_control> ctrls; | |
| 417 struct v4l2_ext_control ctrl; | |
| 418 memset(&ctrl, 0, sizeof(ctrl)); | |
| 419 ctrl.id = V4L2_CID_MPEG_VIDEO_FORCE_KEY_FRAME; | |
| 420 ctrls.push_back(ctrl); | |
| 421 if (!SetExtCtrls(ctrls)) { | |
| 422 // Some platforms still use the old control. Fallback before they are | |
| 423 // updated. | |
| 424 ctrls.clear(); | |
| 425 memset(&ctrl, 0, sizeof(ctrl)); | |
| 426 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE; | |
| 427 ctrl.value = V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME; | |
| 428 ctrls.push_back(ctrl); | |
| 429 if (!SetExtCtrls(ctrls)) { | |
| 430 LOG(ERROR) << "Failed requesting keyframe"; | |
| 431 NOTIFY_ERROR(kPlatformFailureError); | |
| 432 return; | |
| 433 } | |
| 434 } | |
| 435 } | |
| 436 } | |
| 437 | |
| 438 void V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask( | |
| 439 std::unique_ptr<BitstreamBufferRef> buffer_ref) { | |
| 440 DVLOG(3) << "UseOutputBitstreamBufferTask(): id=" << buffer_ref->id; | |
| 441 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
| 442 | |
| 443 encoder_output_queue_.push_back( | |
| 444 linked_ptr<BitstreamBufferRef>(buffer_ref.release())); | |
| 445 Enqueue(); | |
| 446 | |
| 447 if (encoder_state_ == kInitialized) { | |
| 448 // Finish setting up our OUTPUT queue. See: Initialize(). | |
| 449 // VIDIOC_REQBUFS on OUTPUT queue. | |
| 450 if (!CreateInputBuffers()) | |
| 451 return; | |
| 452 if (!StartDevicePoll()) | |
| 453 return; | |
| 454 encoder_state_ = kEncoding; | |
| 455 } | |
| 456 } | |
| 457 | |
| 458 void V4L2VideoEncodeAccelerator::DestroyTask() { | |
| 459 DVLOG(3) << "DestroyTask()"; | |
| 460 | |
| 461 // DestroyTask() should run regardless of encoder_state_. | |
| 462 | |
| 463 // Stop streaming and the device_poll_thread_. | |
| 464 StopDevicePoll(); | |
| 465 | |
| 466 // Set our state to kError, and early-out all tasks. | |
| 467 encoder_state_ = kError; | |
| 468 } | |
| 469 | |
| 470 void V4L2VideoEncodeAccelerator::ServiceDeviceTask() { | |
| 471 DVLOG(3) << "ServiceDeviceTask()"; | |
| 472 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
| 473 DCHECK_NE(encoder_state_, kUninitialized); | |
| 474 DCHECK_NE(encoder_state_, kInitialized); | |
| 475 | |
| 476 if (encoder_state_ == kError) { | |
| 477 DVLOG(2) << "ServiceDeviceTask(): early out: kError state"; | |
| 478 return; | |
| 479 } | |
| 480 | |
| 481 Dequeue(); | |
| 482 Enqueue(); | |
| 483 | |
| 484 // Clear the interrupt fd. | |
| 485 if (!device_->ClearDevicePollInterrupt()) | |
| 486 return; | |
| 487 | |
| 488 // Device can be polled as soon as either input or output buffers are queued. | |
| 489 bool poll_device = | |
| 490 (input_buffer_queued_count_ + output_buffer_queued_count_ > 0); | |
| 491 | |
| 492 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), | |
| 493 // so either: | |
| 494 // * device_poll_thread_ is running normally | |
| 495 // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down, | |
| 496 // in which case we're in kError state, and we should have early-outed | |
| 497 // already. | |
| 498 DCHECK(device_poll_thread_.message_loop()); | |
| 499 // Queue the DevicePollTask() now. | |
| 500 device_poll_thread_.message_loop()->PostTask( | |
| 501 FROM_HERE, | |
| 502 base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask, | |
| 503 base::Unretained(this), | |
| 504 poll_device)); | |
| 505 | |
| 506 DVLOG(2) << __func__ << ": buffer counts: ENC[" | |
| 507 << encoder_input_queue_.size() << "] => DEVICE[" | |
| 508 << free_input_buffers_.size() << "+" | |
| 509 << input_buffer_queued_count_ << "/" | |
| 510 << input_buffer_map_.size() << "->" | |
| 511 << free_output_buffers_.size() << "+" | |
| 512 << output_buffer_queued_count_ << "/" | |
| 513 << output_buffer_map_.size() << "] => OUT[" | |
| 514 << encoder_output_queue_.size() << "]"; | |
| 515 } | |
| 516 | |
| 517 void V4L2VideoEncodeAccelerator::Enqueue() { | |
| 518 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
| 519 | |
| 520 DVLOG(3) << "Enqueue() " | |
| 521 << "free_input_buffers: " << free_input_buffers_.size() | |
| 522 << "input_queue: " << encoder_input_queue_.size(); | |
| 523 | |
| 524 // Enqueue all the inputs we can. | |
| 525 const int old_inputs_queued = input_buffer_queued_count_; | |
| 526 // while (!ready_input_buffers_.empty()) { | |
| 527 while (!encoder_input_queue_.empty() && !free_input_buffers_.empty()) { | |
| 528 if (!EnqueueInputRecord()) | |
| 529 return; | |
| 530 } | |
| 531 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) { | |
| 532 // We just started up a previously empty queue. | |
| 533 // Queue state changed; signal interrupt. | |
| 534 if (!device_->SetDevicePollInterrupt()) | |
| 535 return; | |
| 536 // Start VIDIOC_STREAMON if we haven't yet. | |
| 537 if (!input_streamon_) { | |
| 538 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 539 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | |
| 540 input_streamon_ = true; | |
| 541 } | |
| 542 } | |
| 543 | |
| 544 // Enqueue all the outputs we can. | |
| 545 const int old_outputs_queued = output_buffer_queued_count_; | |
| 546 while (!free_output_buffers_.empty() && !encoder_output_queue_.empty()) { | |
| 547 if (!EnqueueOutputRecord()) | |
| 548 return; | |
| 549 } | |
| 550 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) { | |
| 551 // We just started up a previously empty queue. | |
| 552 // Queue state changed; signal interrupt. | |
| 553 if (!device_->SetDevicePollInterrupt()) | |
| 554 return; | |
| 555 // Start VIDIOC_STREAMON if we haven't yet. | |
| 556 if (!output_streamon_) { | |
| 557 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 558 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | |
| 559 output_streamon_ = true; | |
| 560 } | |
| 561 } | |
| 562 } | |
| 563 | |
| 564 void V4L2VideoEncodeAccelerator::Dequeue() { | |
| 565 DVLOG(3) << "Dequeue()"; | |
| 566 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
| 567 | |
| 568 // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free | |
| 569 // list. | |
| 570 struct v4l2_buffer dqbuf; | |
| 571 struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
| 572 while (input_buffer_queued_count_ > 0) { | |
| 573 DVLOG(4) << "inputs queued: " << input_buffer_queued_count_; | |
| 574 DCHECK(input_streamon_); | |
| 575 memset(&dqbuf, 0, sizeof(dqbuf)); | |
| 576 memset(&planes, 0, sizeof(planes)); | |
| 577 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 578 dqbuf.memory = input_memory_type_; | |
| 579 dqbuf.m.planes = planes; | |
| 580 dqbuf.length = input_planes_count_; | |
| 581 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | |
| 582 if (errno == EAGAIN) { | |
| 583 // EAGAIN if we're just out of buffers to dequeue. | |
| 584 break; | |
| 585 } | |
| 586 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF"; | |
| 587 NOTIFY_ERROR(kPlatformFailureError); | |
| 588 return; | |
| 589 } | |
| 590 InputRecord& input_record = input_buffer_map_[dqbuf.index]; | |
| 591 DCHECK(input_record.at_device); | |
| 592 input_record.at_device = false; | |
| 593 | |
| 594 input_record.frame = NULL; | |
| 595 free_input_buffers_.push_back(dqbuf.index); | |
| 596 input_buffer_queued_count_--; | |
| 597 } | |
| 598 | |
| 599 // Dequeue completed output (VIDEO_CAPTURE) buffers, and recycle to the | |
| 600 // free list. Notify the client that an output buffer is complete. | |
| 601 while (output_buffer_queued_count_ > 0) { | |
| 602 DCHECK(output_streamon_); | |
| 603 memset(&dqbuf, 0, sizeof(dqbuf)); | |
| 604 memset(planes, 0, sizeof(planes)); | |
| 605 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 606 dqbuf.memory = V4L2_MEMORY_MMAP; | |
| 607 dqbuf.m.planes = planes; | |
| 608 dqbuf.length = 1; | |
| 609 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | |
| 610 if (errno == EAGAIN) { | |
| 611 // EAGAIN if we're just out of buffers to dequeue. | |
| 612 break; | |
| 613 } | |
| 614 PLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF"; | |
| 615 NOTIFY_ERROR(kPlatformFailureError); | |
| 616 return; | |
| 617 } | |
| 618 const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0); | |
| 619 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; | |
| 620 DCHECK(output_record.at_device); | |
| 621 DCHECK(output_record.buffer_ref.get()); | |
| 622 | |
| 623 void* output_data = output_record.address; | |
| 624 size_t output_size = dqbuf.m.planes[0].bytesused; | |
| 625 // This shouldn't happen, but just in case. We should be able to recover | |
| 626 // after next keyframe after showing some corruption. | |
| 627 DCHECK_LE(output_size, output_buffer_byte_size_); | |
| 628 if (output_size > output_buffer_byte_size_) | |
| 629 output_size = output_buffer_byte_size_; | |
| 630 uint8_t* target_data = | |
| 631 reinterpret_cast<uint8_t*>(output_record.buffer_ref->shm->memory()); | |
| 632 if (output_format_fourcc_ == V4L2_PIX_FMT_H264) { | |
| 633 if (stream_header_size_ == 0) { | |
| 634 // Assume that the first buffer dequeued is the stream header. | |
| 635 stream_header_size_ = output_size; | |
| 636 stream_header_.reset(new uint8_t[stream_header_size_]); | |
| 637 memcpy(stream_header_.get(), output_data, stream_header_size_); | |
| 638 } | |
| 639 if (key_frame && | |
| 640 output_buffer_byte_size_ - stream_header_size_ >= output_size) { | |
| 641 // Insert stream header before every keyframe. | |
| 642 memcpy(target_data, stream_header_.get(), stream_header_size_); | |
| 643 memcpy(target_data + stream_header_size_, output_data, output_size); | |
| 644 output_size += stream_header_size_; | |
| 645 } else { | |
| 646 memcpy(target_data, output_data, output_size); | |
| 647 } | |
| 648 } else { | |
| 649 memcpy(target_data, output_data, output_size); | |
| 650 } | |
| 651 | |
| 652 DVLOG(3) << "Dequeue(): returning " | |
| 653 "bitstream_buffer_id=" << output_record.buffer_ref->id | |
| 654 << ", size=" << output_size << ", key_frame=" << key_frame; | |
| 655 child_task_runner_->PostTask( | |
| 656 FROM_HERE, | |
| 657 base::Bind(&Client::BitstreamBufferReady, client_, | |
| 658 output_record.buffer_ref->id, output_size, key_frame)); | |
| 659 output_record.at_device = false; | |
| 660 output_record.buffer_ref.reset(); | |
| 661 free_output_buffers_.push_back(dqbuf.index); | |
| 662 output_buffer_queued_count_--; | |
| 663 } | |
| 664 } | |
| 665 | |
| 666 bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() { | |
| 667 DVLOG(3) << "EnqueueInputRecord()"; | |
| 668 DCHECK(!free_input_buffers_.empty()); | |
| 669 DCHECK(!encoder_input_queue_.empty()); | |
| 670 | |
| 671 // Enqueue an input (VIDEO_OUTPUT) buffer. | |
| 672 scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front(); | |
| 673 const int index = free_input_buffers_.back(); | |
| 674 InputRecord& input_record = input_buffer_map_[index]; | |
| 675 DCHECK(!input_record.at_device); | |
| 676 struct v4l2_buffer qbuf; | |
| 677 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES]; | |
| 678 memset(&qbuf, 0, sizeof(qbuf)); | |
| 679 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | |
| 680 qbuf.index = index; | |
| 681 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 682 qbuf.m.planes = qbuf_planes; | |
| 683 | |
| 684 DCHECK_EQ(device_input_format_, frame->format()); | |
| 685 for (size_t i = 0; i < input_planes_count_; ++i) { | |
| 686 qbuf.m.planes[i].bytesused = | |
| 687 base::checked_cast<__u32>(media::VideoFrame::PlaneSize( | |
| 688 frame->format(), i, input_allocated_size_).GetArea()); | |
| 689 | |
| 690 switch (input_memory_type_) { | |
| 691 case V4L2_MEMORY_USERPTR: | |
| 692 qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused; | |
| 693 qbuf.m.planes[i].m.userptr = | |
| 694 reinterpret_cast<unsigned long>(frame->data(i)); | |
| 695 DCHECK(qbuf.m.planes[i].m.userptr); | |
| 696 break; | |
| 697 | |
| 698 case V4L2_MEMORY_DMABUF: | |
| 699 qbuf.m.planes[i].m.fd = frame->dmabuf_fd(i); | |
| 700 DCHECK_NE(qbuf.m.planes[i].m.fd, -1); | |
| 701 break; | |
| 702 | |
| 703 default: | |
| 704 NOTREACHED(); | |
| 705 return false; | |
| 706 } | |
| 707 } | |
| 708 | |
| 709 qbuf.memory = input_memory_type_; | |
| 710 qbuf.length = input_planes_count_; | |
| 711 | |
| 712 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | |
| 713 input_record.at_device = true; | |
| 714 input_record.frame = frame; | |
| 715 encoder_input_queue_.pop(); | |
| 716 free_input_buffers_.pop_back(); | |
| 717 input_buffer_queued_count_++; | |
| 718 return true; | |
| 719 } | |
| 720 | |
| 721 bool V4L2VideoEncodeAccelerator::EnqueueOutputRecord() { | |
| 722 DVLOG(3) << "EnqueueOutputRecord()"; | |
| 723 DCHECK(!free_output_buffers_.empty()); | |
| 724 DCHECK(!encoder_output_queue_.empty()); | |
| 725 | |
| 726 // Enqueue an output (VIDEO_CAPTURE) buffer. | |
| 727 linked_ptr<BitstreamBufferRef> output_buffer = encoder_output_queue_.back(); | |
| 728 const int index = free_output_buffers_.back(); | |
| 729 OutputRecord& output_record = output_buffer_map_[index]; | |
| 730 DCHECK(!output_record.at_device); | |
| 731 DCHECK(!output_record.buffer_ref.get()); | |
| 732 struct v4l2_buffer qbuf; | |
| 733 struct v4l2_plane qbuf_planes[1]; | |
| 734 memset(&qbuf, 0, sizeof(qbuf)); | |
| 735 memset(qbuf_planes, 0, sizeof(qbuf_planes)); | |
| 736 qbuf.index = index; | |
| 737 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 738 qbuf.memory = V4L2_MEMORY_MMAP; | |
| 739 qbuf.m.planes = qbuf_planes; | |
| 740 qbuf.length = 1; | |
| 741 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | |
| 742 output_record.at_device = true; | |
| 743 output_record.buffer_ref = output_buffer; | |
| 744 encoder_output_queue_.pop_back(); | |
| 745 free_output_buffers_.pop_back(); | |
| 746 output_buffer_queued_count_++; | |
| 747 return true; | |
| 748 } | |
| 749 | |
| 750 bool V4L2VideoEncodeAccelerator::StartDevicePoll() { | |
| 751 DVLOG(3) << "StartDevicePoll()"; | |
| 752 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
| 753 DCHECK(!device_poll_thread_.IsRunning()); | |
| 754 | |
| 755 // Start up the device poll thread and schedule its first DevicePollTask(). | |
| 756 if (!device_poll_thread_.Start()) { | |
| 757 LOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; | |
| 758 NOTIFY_ERROR(kPlatformFailureError); | |
| 759 return false; | |
| 760 } | |
| 761 // Enqueue a poll task with no devices to poll on -- it will wait only on the | |
| 762 // interrupt fd. | |
| 763 device_poll_thread_.message_loop()->PostTask( | |
| 764 FROM_HERE, | |
| 765 base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask, | |
| 766 base::Unretained(this), | |
| 767 false)); | |
| 768 | |
| 769 return true; | |
| 770 } | |
| 771 | |
| 772 bool V4L2VideoEncodeAccelerator::StopDevicePoll() { | |
| 773 DVLOG(3) << "StopDevicePoll()"; | |
| 774 | |
| 775 // Signal the DevicePollTask() to stop, and stop the device poll thread. | |
| 776 if (!device_->SetDevicePollInterrupt()) | |
| 777 return false; | |
| 778 device_poll_thread_.Stop(); | |
| 779 // Clear the interrupt now, to be sure. | |
| 780 if (!device_->ClearDevicePollInterrupt()) | |
| 781 return false; | |
| 782 | |
| 783 if (input_streamon_) { | |
| 784 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 785 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); | |
| 786 } | |
| 787 input_streamon_ = false; | |
| 788 | |
| 789 if (output_streamon_) { | |
| 790 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 791 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); | |
| 792 } | |
| 793 output_streamon_ = false; | |
| 794 | |
| 795 // Reset all our accounting info. | |
| 796 while (!encoder_input_queue_.empty()) | |
| 797 encoder_input_queue_.pop(); | |
| 798 free_input_buffers_.clear(); | |
| 799 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { | |
| 800 InputRecord& input_record = input_buffer_map_[i]; | |
| 801 input_record.at_device = false; | |
| 802 input_record.frame = NULL; | |
| 803 free_input_buffers_.push_back(i); | |
| 804 } | |
| 805 input_buffer_queued_count_ = 0; | |
| 806 | |
| 807 free_output_buffers_.clear(); | |
| 808 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | |
| 809 OutputRecord& output_record = output_buffer_map_[i]; | |
| 810 output_record.at_device = false; | |
| 811 output_record.buffer_ref.reset(); | |
| 812 free_output_buffers_.push_back(i); | |
| 813 } | |
| 814 output_buffer_queued_count_ = 0; | |
| 815 | |
| 816 encoder_output_queue_.clear(); | |
| 817 | |
| 818 DVLOG(3) << "StopDevicePoll(): device poll stopped"; | |
| 819 return true; | |
| 820 } | |
| 821 | |
| 822 void V4L2VideoEncodeAccelerator::DevicePollTask(bool poll_device) { | |
| 823 DVLOG(3) << "DevicePollTask()"; | |
| 824 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current()); | |
| 825 | |
| 826 bool event_pending; | |
| 827 if (!device_->Poll(poll_device, &event_pending)) { | |
| 828 NOTIFY_ERROR(kPlatformFailureError); | |
| 829 return; | |
| 830 } | |
| 831 | |
| 832 // All processing should happen on ServiceDeviceTask(), since we shouldn't | |
| 833 // touch encoder state from this thread. | |
| 834 encoder_thread_.message_loop()->PostTask( | |
| 835 FROM_HERE, | |
| 836 base::Bind(&V4L2VideoEncodeAccelerator::ServiceDeviceTask, | |
| 837 base::Unretained(this))); | |
| 838 } | |
| 839 | |
| 840 void V4L2VideoEncodeAccelerator::NotifyError(Error error) { | |
| 841 DVLOG(1) << "NotifyError(): error=" << error; | |
| 842 | |
| 843 if (!child_task_runner_->BelongsToCurrentThread()) { | |
| 844 child_task_runner_->PostTask( | |
| 845 FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::NotifyError, | |
| 846 weak_this_, error)); | |
| 847 return; | |
| 848 } | |
| 849 | |
| 850 if (client_) { | |
| 851 client_->NotifyError(error); | |
| 852 client_ptr_factory_.reset(); | |
| 853 } | |
| 854 } | |
| 855 | |
| 856 void V4L2VideoEncodeAccelerator::SetErrorState(Error error) { | |
| 857 // We can touch encoder_state_ only if this is the encoder thread or the | |
| 858 // encoder thread isn't running. | |
| 859 if (encoder_thread_.message_loop() != NULL && | |
| 860 encoder_thread_.message_loop() != base::MessageLoop::current()) { | |
| 861 encoder_thread_.message_loop()->PostTask( | |
| 862 FROM_HERE, base::Bind(&V4L2VideoEncodeAccelerator::SetErrorState, | |
| 863 base::Unretained(this), error)); | |
| 864 return; | |
| 865 } | |
| 866 | |
| 867 // Post NotifyError only if we are already initialized, as the API does | |
| 868 // not allow doing so before that. | |
| 869 if (encoder_state_ != kError && encoder_state_ != kUninitialized) | |
| 870 NotifyError(error); | |
| 871 | |
| 872 encoder_state_ = kError; | |
| 873 } | |
| 874 | |
| 875 void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask( | |
| 876 uint32_t bitrate, | |
| 877 uint32_t framerate) { | |
| 878 DVLOG(3) << "RequestEncodingParametersChangeTask(): bitrate=" << bitrate | |
| 879 << ", framerate=" << framerate; | |
| 880 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
| 881 | |
| 882 if (bitrate < 1) | |
| 883 bitrate = 1; | |
| 884 if (framerate < 1) | |
| 885 framerate = 1; | |
| 886 | |
| 887 std::vector<struct v4l2_ext_control> ctrls; | |
| 888 struct v4l2_ext_control ctrl; | |
| 889 memset(&ctrl, 0, sizeof(ctrl)); | |
| 890 ctrl.id = V4L2_CID_MPEG_VIDEO_BITRATE; | |
| 891 ctrl.value = bitrate; | |
| 892 ctrls.push_back(ctrl); | |
| 893 if (!SetExtCtrls(ctrls)) { | |
| 894 LOG(ERROR) << "Failed changing bitrate"; | |
| 895 NOTIFY_ERROR(kPlatformFailureError); | |
| 896 return; | |
| 897 } | |
| 898 | |
| 899 struct v4l2_streamparm parms; | |
| 900 memset(&parms, 0, sizeof(parms)); | |
| 901 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 902 // Note that we are provided "frames per second" but V4L2 expects "time per | |
| 903 // frame"; hence we provide the reciprocal of the framerate here. | |
| 904 parms.parm.output.timeperframe.numerator = 1; | |
| 905 parms.parm.output.timeperframe.denominator = framerate; | |
| 906 IOCTL_OR_ERROR_RETURN(VIDIOC_S_PARM, &parms); | |
| 907 } | |
| 908 | |
| 909 bool V4L2VideoEncodeAccelerator::SetOutputFormat( | |
| 910 media::VideoCodecProfile output_profile) { | |
| 911 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 912 DCHECK(!input_streamon_); | |
| 913 DCHECK(!output_streamon_); | |
| 914 | |
| 915 output_format_fourcc_ = | |
| 916 V4L2Device::VideoCodecProfileToV4L2PixFmt(output_profile, false); | |
| 917 if (!output_format_fourcc_) { | |
| 918 LOG(ERROR) << "Initialize(): invalid output_profile=" << output_profile; | |
| 919 return false; | |
| 920 } | |
| 921 | |
| 922 output_buffer_byte_size_ = kOutputBufferSize; | |
| 923 | |
| 924 struct v4l2_format format; | |
| 925 memset(&format, 0, sizeof(format)); | |
| 926 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 927 format.fmt.pix_mp.width = visible_size_.width(); | |
| 928 format.fmt.pix_mp.height = visible_size_.height(); | |
| 929 format.fmt.pix_mp.pixelformat = output_format_fourcc_; | |
| 930 format.fmt.pix_mp.plane_fmt[0].sizeimage = | |
| 931 base::checked_cast<__u32>(output_buffer_byte_size_); | |
| 932 format.fmt.pix_mp.num_planes = 1; | |
| 933 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | |
| 934 | |
| 935 // Device might have adjusted the required output size. | |
| 936 size_t adjusted_output_buffer_size = | |
| 937 base::checked_cast<size_t>(format.fmt.pix_mp.plane_fmt[0].sizeimage); | |
| 938 output_buffer_byte_size_ = adjusted_output_buffer_size; | |
| 939 | |
| 940 return true; | |
| 941 } | |
| 942 | |
| 943 bool V4L2VideoEncodeAccelerator::NegotiateInputFormat( | |
| 944 media::VideoPixelFormat input_format) { | |
| 945 DVLOG(3) << "NegotiateInputFormat()"; | |
| 946 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 947 DCHECK(!input_streamon_); | |
| 948 DCHECK(!output_streamon_); | |
| 949 | |
| 950 device_input_format_ = media::PIXEL_FORMAT_UNKNOWN; | |
| 951 input_planes_count_ = 0; | |
| 952 | |
| 953 uint32_t input_format_fourcc = | |
| 954 V4L2Device::VideoPixelFormatToV4L2PixFmt(input_format); | |
| 955 if (!input_format_fourcc) { | |
| 956 LOG(ERROR) << "Unsupported input format" << input_format_fourcc; | |
| 957 return false; | |
| 958 } | |
| 959 | |
| 960 size_t input_planes_count = media::VideoFrame::NumPlanes(input_format); | |
| 961 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES)); | |
| 962 | |
| 963 // First see if we the device can use the provided input_format directly. | |
| 964 struct v4l2_format format; | |
| 965 memset(&format, 0, sizeof(format)); | |
| 966 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 967 format.fmt.pix_mp.width = visible_size_.width(); | |
| 968 format.fmt.pix_mp.height = visible_size_.height(); | |
| 969 format.fmt.pix_mp.pixelformat = input_format_fourcc; | |
| 970 format.fmt.pix_mp.num_planes = input_planes_count; | |
| 971 if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) { | |
| 972 // Error or format unsupported by device, try to negotiate a fallback. | |
| 973 input_format_fourcc = device_->PreferredInputFormat(); | |
| 974 input_format = | |
| 975 V4L2Device::V4L2PixFmtToVideoPixelFormat(input_format_fourcc); | |
| 976 if (input_format == media::PIXEL_FORMAT_UNKNOWN) { | |
| 977 LOG(ERROR) << "Unsupported input format" << input_format_fourcc; | |
| 978 return false; | |
| 979 } | |
| 980 | |
| 981 input_planes_count = media::VideoFrame::NumPlanes(input_format); | |
| 982 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES)); | |
| 983 | |
| 984 // Device might have adjusted parameters, reset them along with the format. | |
| 985 memset(&format, 0, sizeof(format)); | |
| 986 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 987 format.fmt.pix_mp.width = visible_size_.width(); | |
| 988 format.fmt.pix_mp.height = visible_size_.height(); | |
| 989 format.fmt.pix_mp.pixelformat = input_format_fourcc; | |
| 990 format.fmt.pix_mp.num_planes = input_planes_count; | |
| 991 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | |
| 992 DCHECK_EQ(format.fmt.pix_mp.num_planes, input_planes_count); | |
| 993 } | |
| 994 | |
| 995 // Take device-adjusted sizes for allocated size. If the size is adjusted | |
| 996 // down, it means the input is too big and the hardware does not support it. | |
| 997 input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format); | |
| 998 if (!gfx::Rect(input_allocated_size_).Contains(gfx::Rect(visible_size_))) { | |
| 999 DVLOG(1) << "Input size too big " << visible_size_.ToString() | |
| 1000 << ", adjusted to " << input_allocated_size_.ToString(); | |
| 1001 return false; | |
| 1002 } | |
| 1003 | |
| 1004 device_input_format_ = input_format; | |
| 1005 input_planes_count_ = input_planes_count; | |
| 1006 return true; | |
| 1007 } | |
| 1008 | |
| 1009 bool V4L2VideoEncodeAccelerator::SetFormats( | |
| 1010 media::VideoPixelFormat input_format, | |
| 1011 media::VideoCodecProfile output_profile) { | |
| 1012 DVLOG(3) << "SetFormats()"; | |
| 1013 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 1014 DCHECK(!input_streamon_); | |
| 1015 DCHECK(!output_streamon_); | |
| 1016 | |
| 1017 if (!SetOutputFormat(output_profile)) | |
| 1018 return false; | |
| 1019 | |
| 1020 if (!NegotiateInputFormat(input_format)) | |
| 1021 return false; | |
| 1022 | |
| 1023 struct v4l2_crop crop; | |
| 1024 memset(&crop, 0, sizeof(crop)); | |
| 1025 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 1026 crop.c.left = 0; | |
| 1027 crop.c.top = 0; | |
| 1028 crop.c.width = visible_size_.width(); | |
| 1029 crop.c.height = visible_size_.height(); | |
| 1030 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop); | |
| 1031 | |
| 1032 // The width and height might be adjusted by driver. | |
| 1033 // Need to read it back and set to visible_size_. | |
| 1034 if (device_->Ioctl(VIDIOC_G_CROP, &crop) != 0) { | |
| 1035 // Some devices haven't supported G_CROP yet, so treat the failure | |
| 1036 // non-fatal for now. | |
| 1037 // TODO(kcwu): NOTIFY_ERROR and return false after all devices support it. | |
| 1038 PLOG(WARNING) << "SetFormats(): ioctl() VIDIOC_G_CROP failed"; | |
| 1039 return true; | |
| 1040 } | |
| 1041 visible_size_.SetSize(crop.c.width, crop.c.height); | |
| 1042 DVLOG(3) << "After adjusted by driver, visible_size_=" | |
| 1043 << visible_size_.ToString(); | |
| 1044 | |
| 1045 return true; | |
| 1046 } | |
| 1047 | |
| 1048 bool V4L2VideoEncodeAccelerator::SetExtCtrls( | |
| 1049 std::vector<struct v4l2_ext_control> ctrls) { | |
| 1050 struct v4l2_ext_controls ext_ctrls; | |
| 1051 memset(&ext_ctrls, 0, sizeof(ext_ctrls)); | |
| 1052 ext_ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG; | |
| 1053 ext_ctrls.count = ctrls.size(); | |
| 1054 ext_ctrls.controls = &ctrls[0]; | |
| 1055 return device_->Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls) == 0; | |
| 1056 } | |
| 1057 | |
| 1058 bool V4L2VideoEncodeAccelerator::InitControls() { | |
| 1059 std::vector<struct v4l2_ext_control> ctrls; | |
| 1060 struct v4l2_ext_control ctrl; | |
| 1061 | |
| 1062 // Enable frame-level bitrate control. This is the only mandatory control. | |
| 1063 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1064 ctrl.id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; | |
| 1065 ctrl.value = 1; | |
| 1066 ctrls.push_back(ctrl); | |
| 1067 if (!SetExtCtrls(ctrls)) { | |
| 1068 LOG(ERROR) << "Failed enabling bitrate control"; | |
| 1069 NOTIFY_ERROR(kPlatformFailureError); | |
| 1070 return false; | |
| 1071 } | |
| 1072 | |
| 1073 // Optional controls. | |
| 1074 ctrls.clear(); | |
| 1075 if (output_format_fourcc_ == V4L2_PIX_FMT_H264) { | |
| 1076 // No B-frames, for lowest decoding latency. | |
| 1077 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1078 ctrl.id = V4L2_CID_MPEG_VIDEO_B_FRAMES; | |
| 1079 ctrl.value = 0; | |
| 1080 ctrls.push_back(ctrl); | |
| 1081 | |
| 1082 // Quantization parameter maximum value (for variable bitrate control). | |
| 1083 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1084 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP; | |
| 1085 ctrl.value = 51; | |
| 1086 ctrls.push_back(ctrl); | |
| 1087 | |
| 1088 // Use H.264 level 4.0 to match the supported max resolution. | |
| 1089 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1090 ctrl.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL; | |
| 1091 ctrl.value = V4L2_MPEG_VIDEO_H264_LEVEL_4_0; | |
| 1092 ctrls.push_back(ctrl); | |
| 1093 | |
| 1094 // Separate stream header so we can cache it and insert into the stream. | |
| 1095 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1096 ctrl.id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; | |
| 1097 ctrl.value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; | |
| 1098 ctrls.push_back(ctrl); | |
| 1099 } | |
| 1100 | |
| 1101 // Enable macroblock-level bitrate control. | |
| 1102 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1103 ctrl.id = V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE; | |
| 1104 ctrl.value = 1; | |
| 1105 ctrls.push_back(ctrl); | |
| 1106 | |
| 1107 // Disable periodic key frames. | |
| 1108 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1109 ctrl.id = V4L2_CID_MPEG_VIDEO_GOP_SIZE; | |
| 1110 ctrl.value = 0; | |
| 1111 ctrls.push_back(ctrl); | |
| 1112 | |
| 1113 // Ignore return value as these controls are optional. | |
| 1114 SetExtCtrls(ctrls); | |
| 1115 | |
| 1116 // Optional Exynos specific controls. | |
| 1117 ctrls.clear(); | |
| 1118 // Enable "tight" bitrate mode. For this to work properly, frame- and mb-level | |
| 1119 // bitrate controls have to be enabled as well. | |
| 1120 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1121 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; | |
| 1122 ctrl.value = 1; | |
| 1123 ctrls.push_back(ctrl); | |
| 1124 | |
| 1125 // Force bitrate control to average over a GOP (for tight bitrate | |
| 1126 // tolerance). | |
| 1127 memset(&ctrl, 0, sizeof(ctrl)); | |
| 1128 ctrl.id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; | |
| 1129 ctrl.value = 1; | |
| 1130 ctrls.push_back(ctrl); | |
| 1131 | |
| 1132 // Ignore return value as these controls are optional. | |
| 1133 SetExtCtrls(ctrls); | |
| 1134 | |
| 1135 return true; | |
| 1136 } | |
| 1137 | |
| 1138 bool V4L2VideoEncodeAccelerator::CreateInputBuffers() { | |
| 1139 DVLOG(3) << "CreateInputBuffers()"; | |
| 1140 // This function runs on encoder_thread_ after output buffers have been | |
| 1141 // provided by the client. | |
| 1142 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); | |
| 1143 DCHECK(!input_streamon_); | |
| 1144 | |
| 1145 struct v4l2_requestbuffers reqbufs; | |
| 1146 memset(&reqbufs, 0, sizeof(reqbufs)); | |
| 1147 // Driver will modify to the appropriate number of buffers. | |
| 1148 reqbufs.count = 1; | |
| 1149 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 1150 // TODO(posciak): Once we start doing zero-copy, we should decide based on | |
| 1151 // the current pipeline setup which memory type to use. This should probably | |
| 1152 // be decided based on an argument to Initialize(). | |
| 1153 if (image_processor_.get()) | |
| 1154 input_memory_type_ = V4L2_MEMORY_DMABUF; | |
| 1155 else | |
| 1156 input_memory_type_ = V4L2_MEMORY_USERPTR; | |
| 1157 | |
| 1158 reqbufs.memory = input_memory_type_; | |
| 1159 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | |
| 1160 | |
| 1161 DCHECK(input_buffer_map_.empty()); | |
| 1162 input_buffer_map_.resize(reqbufs.count); | |
| 1163 for (size_t i = 0; i < input_buffer_map_.size(); ++i) | |
| 1164 free_input_buffers_.push_back(i); | |
| 1165 | |
| 1166 return true; | |
| 1167 } | |
| 1168 | |
| 1169 bool V4L2VideoEncodeAccelerator::CreateOutputBuffers() { | |
| 1170 DVLOG(3) << "CreateOutputBuffers()"; | |
| 1171 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 1172 DCHECK(!output_streamon_); | |
| 1173 | |
| 1174 struct v4l2_requestbuffers reqbufs; | |
| 1175 memset(&reqbufs, 0, sizeof(reqbufs)); | |
| 1176 reqbufs.count = kOutputBufferCount; | |
| 1177 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 1178 reqbufs.memory = V4L2_MEMORY_MMAP; | |
| 1179 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | |
| 1180 | |
| 1181 DCHECK(output_buffer_map_.empty()); | |
| 1182 output_buffer_map_.resize(reqbufs.count); | |
| 1183 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | |
| 1184 struct v4l2_plane planes[1]; | |
| 1185 struct v4l2_buffer buffer; | |
| 1186 memset(&buffer, 0, sizeof(buffer)); | |
| 1187 memset(planes, 0, sizeof(planes)); | |
| 1188 buffer.index = i; | |
| 1189 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 1190 buffer.memory = V4L2_MEMORY_MMAP; | |
| 1191 buffer.m.planes = planes; | |
| 1192 buffer.length = arraysize(planes); | |
| 1193 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | |
| 1194 void* address = device_->Mmap(NULL, | |
| 1195 buffer.m.planes[0].length, | |
| 1196 PROT_READ | PROT_WRITE, | |
| 1197 MAP_SHARED, | |
| 1198 buffer.m.planes[0].m.mem_offset); | |
| 1199 if (address == MAP_FAILED) { | |
| 1200 PLOG(ERROR) << "CreateOutputBuffers(): mmap() failed"; | |
| 1201 return false; | |
| 1202 } | |
| 1203 output_buffer_map_[i].address = address; | |
| 1204 output_buffer_map_[i].length = buffer.m.planes[0].length; | |
| 1205 free_output_buffers_.push_back(i); | |
| 1206 } | |
| 1207 | |
| 1208 return true; | |
| 1209 } | |
| 1210 | |
| 1211 void V4L2VideoEncodeAccelerator::DestroyInputBuffers() { | |
| 1212 DVLOG(3) << "DestroyInputBuffers()"; | |
| 1213 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 1214 DCHECK(!input_streamon_); | |
| 1215 | |
| 1216 struct v4l2_requestbuffers reqbufs; | |
| 1217 memset(&reqbufs, 0, sizeof(reqbufs)); | |
| 1218 reqbufs.count = 0; | |
| 1219 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 1220 reqbufs.memory = input_memory_type_; | |
| 1221 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | |
| 1222 | |
| 1223 input_buffer_map_.clear(); | |
| 1224 free_input_buffers_.clear(); | |
| 1225 } | |
| 1226 | |
| 1227 void V4L2VideoEncodeAccelerator::DestroyOutputBuffers() { | |
| 1228 DVLOG(3) << "DestroyOutputBuffers()"; | |
| 1229 DCHECK(child_task_runner_->BelongsToCurrentThread()); | |
| 1230 DCHECK(!output_streamon_); | |
| 1231 | |
| 1232 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | |
| 1233 if (output_buffer_map_[i].address != NULL) | |
| 1234 device_->Munmap(output_buffer_map_[i].address, | |
| 1235 output_buffer_map_[i].length); | |
| 1236 } | |
| 1237 | |
| 1238 struct v4l2_requestbuffers reqbufs; | |
| 1239 memset(&reqbufs, 0, sizeof(reqbufs)); | |
| 1240 reqbufs.count = 0; | |
| 1241 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 1242 reqbufs.memory = V4L2_MEMORY_MMAP; | |
| 1243 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | |
| 1244 | |
| 1245 output_buffer_map_.clear(); | |
| 1246 free_output_buffers_.clear(); | |
| 1247 } | |
| 1248 | |
| 1249 } // namespace content | |
| OLD | NEW |