Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/gpu/v4l2_jpeg_decode_accelerator.h" | 5 #include "media/gpu/v4l2_jpeg_decode_accelerator.h" |
| 6 | 6 |
| 7 #include <errno.h> | 7 #include <errno.h> |
| 8 #include <linux/videodev2.h> | 8 #include <linux/videodev2.h> |
| 9 #include <string.h> | 9 #include <string.h> |
| 10 #include <sys/mman.h> | 10 #include <sys/mman.h> |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 100 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, | 100 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, |
| 101 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, | 101 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, |
| 102 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, | 102 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, |
| 103 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, | 103 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, |
| 104 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, | 104 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, |
| 105 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, | 105 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, |
| 106 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, | 106 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, |
| 107 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, | 107 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, |
| 108 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA}; | 108 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA}; |
| 109 | 109 |
| 110 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() | 110 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() : at_device(false) { |
| 111 : address(nullptr), length(0), at_device(false) {} | 111 memset(address, 0, sizeof(address)); |
| 112 memset(length, 0, sizeof(length)); | |
| 113 } | |
| 112 | 114 |
| 113 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() {} | 115 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() {} |
| 114 | 116 |
| 115 V4L2JpegDecodeAccelerator::JobRecord::JobRecord( | 117 V4L2JpegDecodeAccelerator::JobRecord::JobRecord( |
| 116 const BitstreamBuffer& bitstream_buffer, | 118 const BitstreamBuffer& bitstream_buffer, |
| 117 scoped_refptr<VideoFrame> video_frame) | 119 scoped_refptr<VideoFrame> video_frame) |
| 118 : bitstream_buffer_id(bitstream_buffer.id()), | 120 : bitstream_buffer_id(bitstream_buffer.id()), |
| 119 shm(bitstream_buffer, true), | 121 shm(bitstream_buffer, true), |
| 120 out_frame(video_frame) {} | 122 out_frame(video_frame) {} |
| 121 | 123 |
| 122 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() {} | 124 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() {} |
| 123 | 125 |
| 124 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator( | 126 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator( |
| 125 const scoped_refptr<V4L2Device>& device, | 127 const scoped_refptr<V4L2Device>& device, |
| 126 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) | 128 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) |
| 127 : output_buffer_pixelformat_(0), | 129 : output_buffer_pixelformat_(0), |
| 130 output_buffer_num_planes_(0), | |
| 128 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), | 131 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 129 io_task_runner_(io_task_runner), | 132 io_task_runner_(io_task_runner), |
| 130 client_(nullptr), | 133 client_(nullptr), |
| 131 device_(device), | 134 device_(device), |
| 132 decoder_thread_("V4L2JpegDecodeThread"), | 135 decoder_thread_("V4L2JpegDecodeThread"), |
| 133 device_poll_thread_("V4L2JpegDecodeDevicePollThread"), | 136 device_poll_thread_("V4L2JpegDecodeDevicePollThread"), |
| 134 input_streamon_(false), | 137 input_streamon_(false), |
| 135 output_streamon_(false), | 138 output_streamon_(false), |
| 136 weak_factory_(this) { | 139 weak_factory_(this) { |
| 140 // This class can only handle V4L2_PIX_FMT_JPEG as input, so kMaxInputPlanes | |
| 141 // can only be 1. | |
| 142 static_assert(V4L2JpegDecodeAccelerator::kMaxInputPlanes == 1, | |
| 143 "kMaxInputPlanes must be 1 as input must be V4L2_PIX_FMT_JPEG"); | |
| 137 weak_ptr_ = weak_factory_.GetWeakPtr(); | 144 weak_ptr_ = weak_factory_.GetWeakPtr(); |
| 138 } | 145 } |
| 139 | 146 |
| 140 V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() { | 147 V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() { |
| 141 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 148 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
| 142 | 149 |
| 143 if (decoder_thread_.IsRunning()) { | 150 if (decoder_thread_.IsRunning()) { |
| 144 decoder_task_runner_->PostTask( | 151 decoder_task_runner_->PostTask( |
| 145 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask, | 152 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask, |
| 146 base::Unretained(this))); | 153 base::Unretained(this))); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 187 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { | 194 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { |
| 188 DCHECK(child_task_runner_->BelongsToCurrentThread()); | 195 DCHECK(child_task_runner_->BelongsToCurrentThread()); |
| 189 | 196 |
| 190 if (!device_->Open(V4L2Device::Type::kJpegDecoder, V4L2_PIX_FMT_JPEG)) { | 197 if (!device_->Open(V4L2Device::Type::kJpegDecoder, V4L2_PIX_FMT_JPEG)) { |
| 191 LOG(ERROR) << "Failed to open device"; | 198 LOG(ERROR) << "Failed to open device"; |
| 192 return false; | 199 return false; |
| 193 } | 200 } |
| 194 | 201 |
| 195 // Capabilities check. | 202 // Capabilities check. |
| 196 struct v4l2_capability caps; | 203 struct v4l2_capability caps; |
| 197 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M; | 204 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M_MPLANE; |
| 198 memset(&caps, 0, sizeof(caps)); | 205 memset(&caps, 0, sizeof(caps)); |
| 199 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) { | 206 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) { |
| 200 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP"; | 207 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP"; |
| 201 return false; | 208 return false; |
| 202 } | 209 } |
| 203 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { | 210 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
| 204 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x" | 211 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x" |
| 205 << std::hex << caps.capabilities; | 212 << std::hex << caps.capabilities; |
| 206 return false; | 213 return false; |
| 207 } | 214 } |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 290 | 297 |
| 291 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() { | 298 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() { |
| 292 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 299 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 293 if (input_jobs_.empty()) | 300 if (input_jobs_.empty()) |
| 294 return false; | 301 return false; |
| 295 | 302 |
| 296 linked_ptr<JobRecord> job_record = input_jobs_.front(); | 303 linked_ptr<JobRecord> job_record = input_jobs_.front(); |
| 297 // Check input buffer size is enough | 304 // Check input buffer size is enough |
| 298 return (input_buffer_map_.empty() || | 305 return (input_buffer_map_.empty() || |
| 299 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) > | 306 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) > |
| 300 input_buffer_map_.front().length); | 307 input_buffer_map_.front().length[0]); |
| 301 } | 308 } |
| 302 | 309 |
| 303 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() { | 310 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() { |
| 304 DVLOG(3) << __func__; | 311 DVLOG(3) << __func__; |
| 305 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 312 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 306 | 313 |
| 307 // If running queue is not empty, we should wait until pending frames finish. | 314 // If running queue is not empty, we should wait until pending frames finish. |
| 308 if (!running_jobs_.empty()) | 315 if (!running_jobs_.empty()) |
| 309 return true; | 316 return true; |
| 310 | 317 |
| (...skipping 26 matching lines...) Expand all Loading... | |
| 337 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 344 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 338 DCHECK(!input_streamon_); | 345 DCHECK(!input_streamon_); |
| 339 DCHECK(!input_jobs_.empty()); | 346 DCHECK(!input_jobs_.empty()); |
| 340 linked_ptr<JobRecord> job_record = input_jobs_.front(); | 347 linked_ptr<JobRecord> job_record = input_jobs_.front(); |
| 341 // The input image may miss huffman table. We didn't parse the image before, | 348 // The input image may miss huffman table. We didn't parse the image before, |
| 342 // so we create more to avoid the situation of not enough memory. | 349 // so we create more to avoid the situation of not enough memory. |
| 343 // Reserve twice size to avoid recreating input buffer frequently. | 350 // Reserve twice size to avoid recreating input buffer frequently. |
| 344 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2; | 351 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2; |
| 345 struct v4l2_format format; | 352 struct v4l2_format format; |
| 346 memset(&format, 0, sizeof(format)); | 353 memset(&format, 0, sizeof(format)); |
| 347 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | 354 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 348 format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; | 355 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_JPEG; |
| 349 format.fmt.pix.sizeimage = reserve_size; | 356 format.fmt.pix_mp.plane_fmt[0].sizeimage = reserve_size; |
| 350 format.fmt.pix.field = V4L2_FIELD_ANY; | 357 format.fmt.pix_mp.field = V4L2_FIELD_ANY; |
| 358 format.fmt.pix_mp.num_planes = kMaxInputPlanes; | |
| 351 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | 359 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); |
| 352 | 360 |
| 353 struct v4l2_requestbuffers reqbufs; | 361 struct v4l2_requestbuffers reqbufs; |
| 354 memset(&reqbufs, 0, sizeof(reqbufs)); | 362 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 355 reqbufs.count = kBufferCount; | 363 reqbufs.count = kBufferCount; |
| 356 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | 364 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 357 reqbufs.memory = V4L2_MEMORY_MMAP; | 365 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 358 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | 366 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); |
| 359 | 367 |
| 360 DCHECK(input_buffer_map_.empty()); | 368 DCHECK(input_buffer_map_.empty()); |
| 361 input_buffer_map_.resize(reqbufs.count); | 369 input_buffer_map_.resize(reqbufs.count); |
| 362 | 370 |
| 363 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { | 371 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { |
| 364 free_input_buffers_.push_back(i); | 372 free_input_buffers_.push_back(i); |
| 365 | 373 |
| 366 struct v4l2_buffer buffer; | 374 struct v4l2_buffer buffer; |
| 375 struct v4l2_plane plane; | |
|
Pawel Osciak
2016/12/20 05:19:12
Perhaps s/plane/planes[kMaxInputPlanes]/ for consi
jcliang
2016/12/20 08:33:21
I'm replacing all the v4l2_plane arrays with plane
| |
| 367 memset(&buffer, 0, sizeof(buffer)); | 376 memset(&buffer, 0, sizeof(buffer)); |
| 377 memset(&plane, 0, sizeof(plane)); | |
| 368 buffer.index = i; | 378 buffer.index = i; |
| 369 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | 379 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 380 buffer.m.planes = &plane; | |
| 381 buffer.length = kMaxInputPlanes; | |
|
Pawel Osciak
2016/12/20 05:19:12
arraysize(planes)
jcliang
2016/12/20 08:33:21
Done.
| |
| 370 buffer.memory = V4L2_MEMORY_MMAP; | 382 buffer.memory = V4L2_MEMORY_MMAP; |
| 371 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | 383 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); |
| 372 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | 384 DCHECK_EQ(1u, buffer.length); |
|
Pawel Osciak
2016/12/20 05:19:12
s/1u/kMaxInputPlanes/
s/DCHECK_EQ(...)/if (!...)
jcliang
2016/12/20 08:33:21
Done.
| |
| 373 MAP_SHARED, buffer.m.offset); | 385 void* address = device_->Mmap(NULL, plane.length, PROT_READ | PROT_WRITE, |
|
Pawel Osciak
2016/12/20 05:19:12
for (i=0;i<kMaxInputPlanes;++i)
jcliang
2016/12/20 08:33:21
Done.
| |
| 374 if (address == MAP_FAILED) { | 386 MAP_SHARED, plane.m.mem_offset); |
| 375 PLOG(ERROR) << __func__ << ": mmap() failed"; | 387 input_buffer_map_[i].address[0] = address; |
|
Pawel Osciak
2016/12/20 05:19:12
s/0/i/, etc.
jcliang
2016/12/20 08:33:21
Done.
| |
| 376 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | 388 input_buffer_map_[i].length[0] = plane.length; |
| 377 return false; | |
| 378 } | |
| 379 input_buffer_map_[i].address = address; | |
| 380 input_buffer_map_[i].length = buffer.length; | |
| 381 } | 389 } |
| 382 | 390 |
| 383 return true; | 391 return true; |
| 384 } | 392 } |
| 385 | 393 |
| 386 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { | 394 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { |
| 387 DVLOG(3) << __func__; | 395 DVLOG(3) << __func__; |
| 388 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 396 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 389 DCHECK(!output_streamon_); | 397 DCHECK(!output_streamon_); |
| 390 DCHECK(!running_jobs_.empty()); | 398 DCHECK(!running_jobs_.empty()); |
| 391 linked_ptr<JobRecord> job_record = running_jobs_.front(); | 399 linked_ptr<JobRecord> job_record = running_jobs_.front(); |
| 392 | 400 |
| 393 size_t frame_size = VideoFrame::AllocationSize( | 401 size_t frame_size = VideoFrame::AllocationSize( |
| 394 PIXEL_FORMAT_I420, job_record->out_frame->coded_size()); | 402 PIXEL_FORMAT_I420, job_record->out_frame->coded_size()); |
| 395 struct v4l2_format format; | 403 struct v4l2_format format; |
| 396 memset(&format, 0, sizeof(format)); | 404 memset(&format, 0, sizeof(format)); |
| 397 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 405 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 398 format.fmt.pix.width = job_record->out_frame->coded_size().width(); | 406 format.fmt.pix_mp.width = job_record->out_frame->coded_size().width(); |
| 399 format.fmt.pix.height = job_record->out_frame->coded_size().height(); | 407 format.fmt.pix_mp.height = job_record->out_frame->coded_size().height(); |
| 400 format.fmt.pix.sizeimage = frame_size; | 408 format.fmt.pix_mp.num_planes = 1; |
|
Pawel Osciak
2016/12/20 05:19:12
Should we be using kMaxOutputPlanes?
jcliang
2016/12/20 08:33:22
I thought we should set a num_planes matching the
| |
| 401 format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; | 409 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420; |
| 402 format.fmt.pix.field = V4L2_FIELD_ANY; | 410 format.fmt.pix_mp.plane_fmt[0].sizeimage = frame_size; |
| 411 format.fmt.pix_mp.field = V4L2_FIELD_ANY; | |
| 403 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); | 412 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); |
| 404 output_buffer_pixelformat_ = format.fmt.pix.pixelformat; | 413 output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat; |
| 405 output_buffer_coded_size_.SetSize(format.fmt.pix.width, | 414 output_buffer_coded_size_.SetSize(format.fmt.pix_mp.width, |
| 406 format.fmt.pix.height); | 415 format.fmt.pix_mp.height); |
| 416 output_buffer_num_planes_ = format.fmt.pix_mp.num_planes; | |
| 407 | 417 |
| 408 struct v4l2_requestbuffers reqbufs; | 418 struct v4l2_requestbuffers reqbufs; |
| 409 memset(&reqbufs, 0, sizeof(reqbufs)); | 419 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 410 reqbufs.count = kBufferCount; | 420 reqbufs.count = kBufferCount; |
| 411 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 421 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 412 reqbufs.memory = V4L2_MEMORY_MMAP; | 422 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 413 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); | 423 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); |
| 414 | 424 |
| 415 DCHECK(output_buffer_map_.empty()); | 425 DCHECK(output_buffer_map_.empty()); |
| 416 output_buffer_map_.resize(reqbufs.count); | 426 output_buffer_map_.resize(reqbufs.count); |
| 417 | 427 |
| 418 VideoPixelFormat output_format = | 428 VideoPixelFormat output_format = |
| 419 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_); | 429 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_); |
| 420 | 430 |
| 421 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | 431 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { |
| 422 free_output_buffers_.push_back(i); | 432 free_output_buffers_.push_back(i); |
| 423 | 433 |
| 424 struct v4l2_buffer buffer; | 434 struct v4l2_buffer buffer; |
| 435 struct v4l2_plane planes[output_buffer_num_planes_]; | |
|
Pawel Osciak
2016/12/20 05:19:12
It might be good to not use a non-const here, perh
jcliang
2016/12/20 08:33:21
Done.
| |
| 425 memset(&buffer, 0, sizeof(buffer)); | 436 memset(&buffer, 0, sizeof(buffer)); |
| 437 memset(planes, 0, sizeof(planes)); | |
| 426 buffer.index = i; | 438 buffer.index = i; |
| 427 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 439 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 428 buffer.memory = V4L2_MEMORY_MMAP; | 440 buffer.memory = V4L2_MEMORY_MMAP; |
| 441 buffer.m.planes = planes; | |
| 442 buffer.length = output_buffer_num_planes_; | |
| 429 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); | 443 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); |
| 430 | 444 |
| 431 DCHECK_GE(buffer.length, | 445 DCHECK_EQ(output_buffer_num_planes_, buffer.length); |
|
Pawel Osciak
2016/12/20 05:19:12
This should preferably be an if() also.
jcliang
2016/12/20 08:33:21
Done.
| |
| 432 VideoFrame::AllocationSize( | 446 for (size_t j = 0; j < buffer.length; ++j) { |
| 433 output_format, | 447 DCHECK_GE(static_cast<int64_t>(planes[j].length), |
| 434 gfx::Size(format.fmt.pix.width, format.fmt.pix.height))); | 448 VideoFrame::PlaneSize(output_format, j, |
| 435 | 449 gfx::Size(format.fmt.pix_mp.width, |
| 436 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | 450 format.fmt.pix_mp.height)) |
| 437 MAP_SHARED, buffer.m.offset); | 451 .GetArea()); |
| 438 if (address == MAP_FAILED) { | 452 void* address = |
| 439 PLOG(ERROR) << __func__ << ": mmap() failed"; | 453 device_->Mmap(NULL, planes[j].length, PROT_READ | PROT_WRITE, |
| 440 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | 454 MAP_SHARED, planes[j].m.mem_offset); |
| 441 return false; | 455 if (address == MAP_FAILED) { |
| 456 PLOG(ERROR) << __func__ << ": mmap() failed"; | |
| 457 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | |
| 458 return false; | |
| 459 } | |
| 460 output_buffer_map_[i].address[j] = address; | |
| 461 output_buffer_map_[i].length[j] = planes[j].length; | |
| 442 } | 462 } |
| 443 output_buffer_map_[i].address = address; | |
| 444 output_buffer_map_[i].length = buffer.length; | |
| 445 } | 463 } |
| 446 | 464 |
| 447 return true; | 465 return true; |
| 448 } | 466 } |
| 449 | 467 |
| 450 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { | 468 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { |
| 451 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 469 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 452 | 470 |
| 453 free_input_buffers_.clear(); | 471 free_input_buffers_.clear(); |
| 454 | 472 |
| 455 if (input_buffer_map_.empty()) | 473 if (input_buffer_map_.empty()) |
| 456 return; | 474 return; |
| 457 | 475 |
| 458 if (input_streamon_) { | 476 if (input_streamon_) { |
| 459 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | 477 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 460 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); | 478 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); |
| 461 input_streamon_ = false; | 479 input_streamon_ = false; |
| 462 } | 480 } |
| 463 | 481 |
| 464 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { | 482 for (const auto& input_record : input_buffer_map_) { |
|
Pawel Osciak
2016/12/20 05:19:12
kMaxInputPlanes for consistency?
jcliang
2016/12/20 08:33:21
Done.
| |
| 465 BufferRecord& input_record = input_buffer_map_[i]; | 483 device_->Munmap(input_record.address[0], input_record.length[0]); |
| 466 device_->Munmap(input_record.address, input_record.length); | |
| 467 } | 484 } |
| 468 | 485 |
| 469 struct v4l2_requestbuffers reqbufs; | 486 struct v4l2_requestbuffers reqbufs; |
| 470 memset(&reqbufs, 0, sizeof(reqbufs)); | 487 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 471 reqbufs.count = 0; | 488 reqbufs.count = 0; |
| 472 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | 489 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 473 reqbufs.memory = V4L2_MEMORY_MMAP; | 490 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 474 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | 491 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); |
| 475 | 492 |
| 476 input_buffer_map_.clear(); | 493 input_buffer_map_.clear(); |
| 477 } | 494 } |
| 478 | 495 |
| 479 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { | 496 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { |
| 480 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 497 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 481 | 498 |
| 482 free_output_buffers_.clear(); | 499 free_output_buffers_.clear(); |
| 483 | 500 |
| 484 if (output_buffer_map_.empty()) | 501 if (output_buffer_map_.empty()) |
| 485 return; | 502 return; |
| 486 | 503 |
| 487 if (output_streamon_) { | 504 if (output_streamon_) { |
| 488 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 505 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 489 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); | 506 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); |
| 490 output_streamon_ = false; | 507 output_streamon_ = false; |
| 491 } | 508 } |
| 492 | 509 |
| 493 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { | 510 for (const auto& output_record : output_buffer_map_) { |
| 494 BufferRecord& output_record = output_buffer_map_[i]; | 511 for (size_t i = 0; i < output_buffer_num_planes_; ++i) { |
| 495 device_->Munmap(output_record.address, output_record.length); | 512 device_->Munmap(output_record.address[i], output_record.length[i]); |
| 513 } | |
| 496 } | 514 } |
| 497 | 515 |
| 498 struct v4l2_requestbuffers reqbufs; | 516 struct v4l2_requestbuffers reqbufs; |
| 499 memset(&reqbufs, 0, sizeof(reqbufs)); | 517 memset(&reqbufs, 0, sizeof(reqbufs)); |
| 500 reqbufs.count = 0; | 518 reqbufs.count = 0; |
| 501 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 519 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 502 reqbufs.memory = V4L2_MEMORY_MMAP; | 520 reqbufs.memory = V4L2_MEMORY_MMAP; |
| 503 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); | 521 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); |
| 504 | 522 |
| 505 output_buffer_map_.clear(); | 523 output_buffer_map_.clear(); |
| 524 output_buffer_num_planes_ = 0; | |
| 506 } | 525 } |
| 507 | 526 |
| 508 void V4L2JpegDecodeAccelerator::DevicePollTask() { | 527 void V4L2JpegDecodeAccelerator::DevicePollTask() { |
| 509 DCHECK(device_poll_task_runner_->BelongsToCurrentThread()); | 528 DCHECK(device_poll_task_runner_->BelongsToCurrentThread()); |
| 510 | 529 |
| 511 bool event_pending; | 530 bool event_pending; |
| 512 if (!device_->Poll(true, &event_pending)) { | 531 if (!device_->Poll(true, &event_pending)) { |
| 513 PLOG(ERROR) << __func__ << ": Poll device error."; | 532 PLOG(ERROR) << __func__ << ": Poll device error."; |
| 514 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | 533 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); |
| 515 return; | 534 return; |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 590 // If input buffers are required to re-create, do not enqueue input record | 609 // If input buffers are required to re-create, do not enqueue input record |
| 591 // until all pending frames are handled by device. | 610 // until all pending frames are handled by device. |
| 592 if (ShouldRecreateInputBuffers()) | 611 if (ShouldRecreateInputBuffers()) |
| 593 break; | 612 break; |
| 594 if (!EnqueueInputRecord()) | 613 if (!EnqueueInputRecord()) |
| 595 return; | 614 return; |
| 596 } | 615 } |
| 597 // Check here because we cannot STREAMON before QBUF in earlier kernel. | 616 // Check here because we cannot STREAMON before QBUF in earlier kernel. |
| 598 // (kernel version < 3.14) | 617 // (kernel version < 3.14) |
| 599 if (!input_streamon_ && InputBufferQueuedCount()) { | 618 if (!input_streamon_ && InputBufferQueuedCount()) { |
| 600 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | 619 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 601 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | 620 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); |
| 602 input_streamon_ = true; | 621 input_streamon_ = true; |
| 603 } | 622 } |
| 604 } | 623 } |
| 605 | 624 |
| 606 void V4L2JpegDecodeAccelerator::EnqueueOutput() { | 625 void V4L2JpegDecodeAccelerator::EnqueueOutput() { |
| 607 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 626 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 608 // Output record can be enqueued because the output coded sizes of the frames | 627 // Output record can be enqueued because the output coded sizes of the frames |
| 609 // currently in the pipeline are all the same. | 628 // currently in the pipeline are all the same. |
| 610 while (running_jobs_.size() > OutputBufferQueuedCount() && | 629 while (running_jobs_.size() > OutputBufferQueuedCount() && |
| 611 !free_output_buffers_.empty()) { | 630 !free_output_buffers_.empty()) { |
| 612 if (!EnqueueOutputRecord()) | 631 if (!EnqueueOutputRecord()) |
| 613 return; | 632 return; |
| 614 } | 633 } |
| 615 // Check here because we cannot STREAMON before QBUF in earlier kernel. | 634 // Check here because we cannot STREAMON before QBUF in earlier kernel. |
| 616 // (kernel version < 3.14) | 635 // (kernel version < 3.14) |
| 617 if (!output_streamon_ && OutputBufferQueuedCount()) { | 636 if (!output_streamon_ && OutputBufferQueuedCount()) { |
| 618 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 637 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 619 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); | 638 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); |
| 620 output_streamon_ = true; | 639 output_streamon_ = true; |
| 621 } | 640 } |
| 622 } | 641 } |
| 623 | 642 |
| 624 static bool CopyOutputImage(const uint32_t src_pixelformat, | 643 bool V4L2JpegDecodeAccelerator::ConvertOutputImage( |
| 625 const void* src_addr, | 644 const BufferRecord& output_buffer, |
| 626 const gfx::Size& src_coded_size, | 645 const scoped_refptr<VideoFrame>& dst_frame) { |
| 627 const scoped_refptr<VideoFrame>& dst_frame) { | |
| 628 VideoPixelFormat format = | |
| 629 V4L2Device::V4L2PixFmtToVideoPixelFormat(src_pixelformat); | |
| 630 size_t src_size = VideoFrame::AllocationSize(format, src_coded_size); | |
| 631 uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane); | 646 uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane); |
| 632 uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane); | 647 uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane); |
| 633 uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane); | 648 uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane); |
| 634 size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane); | 649 size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane); |
| 635 size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane); | 650 size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane); |
| 636 size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane); | 651 size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane); |
| 637 | 652 |
| 638 // If the source format is I420, ConvertToI420 will simply copy the frame. | 653 if (output_buffer_num_planes_ == 1) { |
| 639 if (libyuv::ConvertToI420(static_cast<uint8_t*>(const_cast<void*>(src_addr)), | 654 // Use ConvertToI420 to convert all splane buffers. |
| 640 src_size, | 655 // If the source format is I420, ConvertToI420 will simply copy the frame. |
| 641 dst_y, dst_y_stride, | 656 VideoPixelFormat format = |
| 642 dst_u, dst_u_stride, | 657 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_); |
| 643 dst_v, dst_v_stride, | 658 size_t src_size = |
| 644 0, 0, | 659 VideoFrame::AllocationSize(format, output_buffer_coded_size_); |
| 645 src_coded_size.width(), | 660 if (libyuv::ConvertToI420( |
| 646 src_coded_size.height(), | 661 static_cast<uint8_t*>(output_buffer.address[0]), src_size, dst_y, |
| 647 dst_frame->coded_size().width(), | 662 dst_y_stride, dst_u, dst_u_stride, dst_v, dst_v_stride, 0, 0, |
| 648 dst_frame->coded_size().height(), | 663 output_buffer_coded_size_.width(), |
| 649 libyuv::kRotate0, | 664 output_buffer_coded_size_.height(), dst_frame->coded_size().width(), |
| 650 src_pixelformat)) { | 665 dst_frame->coded_size().height(), libyuv::kRotate0, |
| 651 LOG(ERROR) << "ConvertToI420 failed. Source format: " << src_pixelformat; | 666 output_buffer_pixelformat_)) { |
| 667 LOG(ERROR) << "ConvertToI420 failed. Source format: " | |
| 668 << output_buffer_pixelformat_; | |
| 669 return false; | |
| 670 } | |
| 671 } else if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M || | |
|
Pawel Osciak
2016/12/20 05:19:12
We are not setting the format to V4L2_PIX_FMT_YUV4
jcliang
2016/12/20 08:33:22
The mtk-jpeg decoder only supports V4L2_PIX_FMT_YU
| |
| 672 output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M) { | |
| 673 uint8_t* src_y = static_cast<uint8_t*>(output_buffer.address[0]); | |
| 674 uint8_t* src_u = static_cast<uint8_t*>(output_buffer.address[1]); | |
| 675 uint8_t* src_v = static_cast<uint8_t*>(output_buffer.address[2]); | |
| 676 size_t src_y_stride = output_buffer_coded_size_.width(); | |
| 677 size_t src_u_stride = output_buffer_coded_size_.width() / 2; | |
| 678 size_t src_v_stride = output_buffer_coded_size_.width() / 2; | |
| 679 if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M) { | |
| 680 if (libyuv::I420Copy(src_y, src_y_stride, src_u, src_u_stride, src_v, | |
| 681 src_v_stride, dst_y, dst_y_stride, dst_u, | |
| 682 dst_u_stride, dst_v, dst_v_stride, | |
| 683 output_buffer_coded_size_.width(), | |
| 684 output_buffer_coded_size_.height())) { | |
| 685 LOG(ERROR) << "I420Copy failed"; | |
| 686 return false; | |
| 687 } | |
| 688 } else { // output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M | |
| 689 if (libyuv::I422ToI420(src_y, src_y_stride, src_u, src_u_stride, src_v, | |
| 690 src_v_stride, dst_y, dst_y_stride, dst_u, | |
| 691 dst_u_stride, dst_v, dst_v_stride, | |
| 692 output_buffer_coded_size_.width(), | |
| 693 output_buffer_coded_size_.height())) { | |
| 694 LOG(ERROR) << "I422ToI420 failed"; | |
| 695 return false; | |
| 696 } | |
| 697 } | |
| 698 } else { | |
| 699 LOG(ERROR) << "Unsupported source buffer format: " | |
| 700 << output_buffer_pixelformat_; | |
| 652 return false; | 701 return false; |
| 653 } | 702 } |
| 654 return true; | 703 return true; |
| 655 } | 704 } |
| 656 | 705 |
| 657 void V4L2JpegDecodeAccelerator::Dequeue() { | 706 void V4L2JpegDecodeAccelerator::Dequeue() { |
| 658 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 707 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 708 DCHECK_GE(output_buffer_num_planes_, 0u); | |
| 659 | 709 |
| 660 // Dequeue completed input (VIDEO_OUTPUT) buffers, | 710 // Dequeue completed input (VIDEO_OUTPUT) buffers, |
| 661 // and recycle to the free list. | 711 // and recycle to the free list. |
| 662 struct v4l2_buffer dqbuf; | 712 struct v4l2_buffer dqbuf; |
| 713 struct v4l2_plane input_plane; | |
|
Pawel Osciak
2016/12/20 05:19:12
We could use VIDEO_MAX_PLANES everywhere for v4l2_
jcliang
2016/12/20 08:33:22
Done.
| |
| 663 while (InputBufferQueuedCount() > 0) { | 714 while (InputBufferQueuedCount() > 0) { |
| 664 DCHECK(input_streamon_); | 715 DCHECK(input_streamon_); |
| 665 memset(&dqbuf, 0, sizeof(dqbuf)); | 716 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 666 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | 717 memset(&input_plane, 0, sizeof(input_plane)); |
| 718 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
| 667 dqbuf.memory = V4L2_MEMORY_MMAP; | 719 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 720 dqbuf.length = kMaxInputPlanes; | |
| 721 dqbuf.m.planes = &input_plane; | |
| 668 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | 722 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { |
| 669 if (errno == EAGAIN) { | 723 if (errno == EAGAIN) { |
| 670 // EAGAIN if we're just out of buffers to dequeue. | 724 // EAGAIN if we're just out of buffers to dequeue. |
| 671 break; | 725 break; |
| 672 } | 726 } |
| 673 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed."; | 727 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed."; |
| 674 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | 728 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); |
| 675 return; | 729 return; |
| 676 } | 730 } |
| 677 BufferRecord& input_record = input_buffer_map_[dqbuf.index]; | 731 BufferRecord& input_record = input_buffer_map_[dqbuf.index]; |
| 678 DCHECK(input_record.at_device); | 732 DCHECK(input_record.at_device); |
| 679 input_record.at_device = false; | 733 input_record.at_device = false; |
| 680 free_input_buffers_.push_back(dqbuf.index); | 734 free_input_buffers_.push_back(dqbuf.index); |
| 681 | 735 |
| 682 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { | 736 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { |
| 683 DVLOG(1) << "Dequeue input buffer error."; | 737 DVLOG(1) << "Dequeue input buffer error."; |
| 684 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); | 738 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); |
| 685 running_jobs_.pop(); | 739 running_jobs_.pop(); |
| 686 } | 740 } |
| 687 } | 741 } |
| 688 | 742 |
| 689 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. | 743 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. |
| 690 // Return the finished buffer to the client via the job ready callback. | 744 // Return the finished buffer to the client via the job ready callback. |
| 691 // If dequeued input buffer has an error, the error frame has removed from | 745 // If dequeued input buffer has an error, the error frame has removed from |
| 692 // |running_jobs_|. We only have to dequeue output buffer when we actually | 746 // |running_jobs_|. We only have to dequeue output buffer when we actually |
| 693 // have pending frames in |running_jobs_| and also enqueued output buffers. | 747 // have pending frames in |running_jobs_| and also enqueued output buffers. |
| 748 struct v4l2_plane output_planes[output_buffer_num_planes_]; | |
| 694 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) { | 749 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) { |
| 695 DCHECK(output_streamon_); | 750 DCHECK(output_streamon_); |
| 696 memset(&dqbuf, 0, sizeof(dqbuf)); | 751 memset(&dqbuf, 0, sizeof(dqbuf)); |
| 697 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 752 memset(output_planes, 0, sizeof(output_planes)); |
| 753 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 698 // From experiments, using MMAP and memory copy is still faster than | 754 // From experiments, using MMAP and memory copy is still faster than |
| 699 // USERPTR. Also, client doesn't need to consider the buffer alignment and | 755 // USERPTR. Also, client doesn't need to consider the buffer alignment and |
| 700 // JpegDecodeAccelerator API will be simpler. | 756 // JpegDecodeAccelerator API will be simpler. |
| 701 dqbuf.memory = V4L2_MEMORY_MMAP; | 757 dqbuf.memory = V4L2_MEMORY_MMAP; |
| 758 dqbuf.length = output_buffer_num_planes_; | |
| 759 dqbuf.m.planes = output_planes; | |
| 702 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { | 760 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { |
| 703 if (errno == EAGAIN) { | 761 if (errno == EAGAIN) { |
| 704 // EAGAIN if we're just out of buffers to dequeue. | 762 // EAGAIN if we're just out of buffers to dequeue. |
| 705 break; | 763 break; |
| 706 } | 764 } |
| 707 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed."; | 765 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed."; |
| 708 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); | 766 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); |
| 709 return; | 767 return; |
| 710 } | 768 } |
| 711 BufferRecord& output_record = output_buffer_map_[dqbuf.index]; | 769 BufferRecord& output_record = output_buffer_map_[dqbuf.index]; |
| 712 DCHECK(output_record.at_device); | 770 DCHECK(output_record.at_device); |
| 713 output_record.at_device = false; | 771 output_record.at_device = false; |
| 714 free_output_buffers_.push_back(dqbuf.index); | 772 free_output_buffers_.push_back(dqbuf.index); |
| 715 | 773 |
| 716 // Jobs are always processed in FIFO order. | 774 // Jobs are always processed in FIFO order. |
| 717 linked_ptr<JobRecord> job_record = running_jobs_.front(); | 775 linked_ptr<JobRecord> job_record = running_jobs_.front(); |
| 718 running_jobs_.pop(); | 776 running_jobs_.pop(); |
| 719 | 777 |
| 720 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { | 778 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { |
| 721 DVLOG(1) << "Dequeue output buffer error."; | 779 DVLOG(1) << "Dequeue output buffer error."; |
| 722 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); | 780 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); |
| 723 } else { | 781 } else { |
| 724 // Copy the decoded data from output buffer to the buffer provided by the | 782 // Copy the decoded data from output buffer to the buffer provided by the |
| 725 // client. Do format conversion when output format is not | 783 // client. Do format conversion when output format is not |
| 726 // V4L2_PIX_FMT_YUV420. | 784 // V4L2_PIX_FMT_YUV420. |
| 727 if (!CopyOutputImage(output_buffer_pixelformat_, output_record.address, | 785 if (!ConvertOutputImage(output_record, job_record->out_frame)) { |
| 728 output_buffer_coded_size_, job_record->out_frame)) { | |
| 729 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE); | 786 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE); |
| 730 return; | 787 return; |
| 731 } | 788 } |
| 732 | |
| 733 DVLOG(3) << "Decoding finished, returning bitstream buffer, id=" | 789 DVLOG(3) << "Decoding finished, returning bitstream buffer, id=" |
| 734 << job_record->bitstream_buffer_id; | 790 << job_record->bitstream_buffer_id; |
| 735 | 791 |
| 736 child_task_runner_->PostTask( | 792 child_task_runner_->PostTask( |
| 737 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady, | 793 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady, |
| 738 weak_ptr_, job_record->bitstream_buffer_id)); | 794 weak_ptr_, job_record->bitstream_buffer_id)); |
| 739 } | 795 } |
| 740 } | 796 } |
| 741 } | 797 } |
| 742 | 798 |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 827 | 883 |
| 828 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. | 884 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. |
| 829 linked_ptr<JobRecord> job_record = input_jobs_.front(); | 885 linked_ptr<JobRecord> job_record = input_jobs_.front(); |
| 830 input_jobs_.pop(); | 886 input_jobs_.pop(); |
| 831 const int index = free_input_buffers_.back(); | 887 const int index = free_input_buffers_.back(); |
| 832 BufferRecord& input_record = input_buffer_map_[index]; | 888 BufferRecord& input_record = input_buffer_map_[index]; |
| 833 DCHECK(!input_record.at_device); | 889 DCHECK(!input_record.at_device); |
| 834 | 890 |
| 835 // It will add default huffman segment if it's missing. | 891 // It will add default huffman segment if it's missing. |
| 836 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(), | 892 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(), |
| 837 input_record.address, input_record.length)) { | 893 input_record.address[0], input_record.length[0])) { |
| 838 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED); | 894 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED); |
| 839 return false; | 895 return false; |
| 840 } | 896 } |
| 841 | 897 |
| 842 struct v4l2_buffer qbuf; | 898 struct v4l2_buffer qbuf; |
| 899 struct v4l2_plane plane; | |
| 843 memset(&qbuf, 0, sizeof(qbuf)); | 900 memset(&qbuf, 0, sizeof(qbuf)); |
| 901 memset(&plane, 0, sizeof(plane)); | |
| 844 qbuf.index = index; | 902 qbuf.index = index; |
| 845 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; | 903 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
| 846 qbuf.memory = V4L2_MEMORY_MMAP; | 904 qbuf.memory = V4L2_MEMORY_MMAP; |
| 905 qbuf.length = kMaxInputPlanes; | |
| 906 plane.bytesused = input_record.length[0]; | |
| 907 qbuf.m.planes = &plane; | |
| 847 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | 908 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); |
| 848 input_record.at_device = true; | 909 input_record.at_device = true; |
| 849 running_jobs_.push(job_record); | 910 running_jobs_.push(job_record); |
| 850 free_input_buffers_.pop_back(); | 911 free_input_buffers_.pop_back(); |
| 851 | 912 |
| 852 DVLOG(3) << __func__ | 913 DVLOG(3) << __func__ |
| 853 << ": enqueued frame id=" << job_record->bitstream_buffer_id | 914 << ": enqueued frame id=" << job_record->bitstream_buffer_id |
| 854 << " to device."; | 915 << " to device."; |
| 855 return true; | 916 return true; |
| 856 } | 917 } |
| 857 | 918 |
| 858 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { | 919 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { |
| 859 DCHECK(!free_output_buffers_.empty()); | 920 DCHECK(!free_output_buffers_.empty()); |
| 921 DCHECK_GE(output_buffer_num_planes_, 0u); | |
| 860 | 922 |
| 861 // Enqueue an output (VIDEO_CAPTURE) buffer. | 923 // Enqueue an output (VIDEO_CAPTURE) buffer. |
| 862 const int index = free_output_buffers_.back(); | 924 const int index = free_output_buffers_.back(); |
| 863 BufferRecord& output_record = output_buffer_map_[index]; | 925 BufferRecord& output_record = output_buffer_map_[index]; |
| 864 DCHECK(!output_record.at_device); | 926 DCHECK(!output_record.at_device); |
| 865 struct v4l2_buffer qbuf; | 927 struct v4l2_buffer qbuf; |
| 928 struct v4l2_plane planes[output_buffer_num_planes_]; | |
| 866 memset(&qbuf, 0, sizeof(qbuf)); | 929 memset(&qbuf, 0, sizeof(qbuf)); |
| 930 memset(&planes, 0, sizeof(planes)); | |
| 867 qbuf.index = index; | 931 qbuf.index = index; |
| 868 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 932 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
| 869 qbuf.memory = V4L2_MEMORY_MMAP; | 933 qbuf.memory = V4L2_MEMORY_MMAP; |
| 934 qbuf.length = output_buffer_num_planes_; | |
| 935 qbuf.m.planes = planes; | |
| 870 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); | 936 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); |
| 871 output_record.at_device = true; | 937 output_record.at_device = true; |
| 872 free_output_buffers_.pop_back(); | 938 free_output_buffers_.pop_back(); |
| 873 return true; | 939 return true; |
| 874 } | 940 } |
| 875 | 941 |
| 876 void V4L2JpegDecodeAccelerator::StartDevicePoll() { | 942 void V4L2JpegDecodeAccelerator::StartDevicePoll() { |
| 877 DVLOG(3) << __func__ << ": starting device poll"; | 943 DVLOG(3) << __func__ << ": starting device poll"; |
| 878 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); | 944 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
| 879 DCHECK(!device_poll_thread_.IsRunning()); | 945 DCHECK(!device_poll_thread_.IsRunning()); |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 898 device_poll_thread_.Stop(); | 964 device_poll_thread_.Stop(); |
| 899 | 965 |
| 900 // Clear the interrupt now, to be sure. | 966 // Clear the interrupt now, to be sure. |
| 901 if (!device_->ClearDevicePollInterrupt()) | 967 if (!device_->ClearDevicePollInterrupt()) |
| 902 return false; | 968 return false; |
| 903 | 969 |
| 904 return true; | 970 return true; |
| 905 } | 971 } |
| 906 | 972 |
| 907 } // namespace media | 973 } // namespace media |
| OLD | NEW |