Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(264)

Side by Side Diff: media/gpu/v4l2_jpeg_decode_accelerator.cc

Issue 2559423002: media/gpu: switch v4l2_jpeg_decode_accelerator to use multi-planar APIs (Closed)
Patch Set: address the review comments Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « media/gpu/v4l2_jpeg_decode_accelerator.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/v4l2_jpeg_decode_accelerator.h" 5 #include "media/gpu/v4l2_jpeg_decode_accelerator.h"
6 6
7 #include <errno.h> 7 #include <errno.h>
8 #include <linux/videodev2.h> 8 #include <linux/videodev2.h>
9 #include <string.h> 9 #include <string.h>
10 #include <sys/mman.h> 10 #include <sys/mman.h>
11 11
12 #include <memory> 12 #include <memory>
13 13
14 #include "base/big_endian.h" 14 #include "base/big_endian.h"
15 #include "base/bind.h" 15 #include "base/bind.h"
16 #include "base/numerics/safe_conversions.h"
16 #include "base/threading/thread_task_runner_handle.h" 17 #include "base/threading/thread_task_runner_handle.h"
17 #include "media/filters/jpeg_parser.h" 18 #include "media/filters/jpeg_parser.h"
18 #include "media/gpu/v4l2_jpeg_decode_accelerator.h" 19 #include "media/gpu/v4l2_jpeg_decode_accelerator.h"
19 #include "third_party/libyuv/include/libyuv.h" 20 #include "third_party/libyuv/include/libyuv.h"
20 21
21 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_name) \ 22 #define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_name) \
22 do { \ 23 do { \
23 if (device_->Ioctl(type, arg) != 0) { \ 24 if (device_->Ioctl(type, arg) != 0) { \
24 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_name; \ 25 PLOG(ERROR) << __func__ << "(): ioctl() failed: " << type_name; \
25 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); \ 26 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); \
(...skipping 30 matching lines...) Expand all
56 do { \ 57 do { \
57 uint16_t _out; \ 58 uint16_t _out; \
58 if (!reader.ReadU16(&_out)) { \ 59 if (!reader.ReadU16(&_out)) { \
59 DVLOG(1) \ 60 DVLOG(1) \
60 << "Error in stream: unexpected EOS while trying to read " #out; \ 61 << "Error in stream: unexpected EOS while trying to read " #out; \
61 return false; \ 62 return false; \
62 } \ 63 } \
63 *(out) = _out; \ 64 *(out) = _out; \
64 } while (0) 65 } while (0)
65 66
67 namespace {
68
69 // Input pixel format (i.e. V4L2_PIX_FMT_JPEG) has only one physical plane.
70 const size_t kMaxInputPlanes = 1;
71
72 // This class can only handle V4L2_PIX_FMT_JPEG as input, so kMaxInputPlanes
73 // can only be 1.
74 static_assert(kMaxInputPlanes == 1,
75 "kMaxInputPlanes must be 1 as input must be V4L2_PIX_FMT_JPEG");
76 }
77
66 namespace media { 78 namespace media {
67 79
68 // This is default huffman segment for 8-bit precision luminance and 80 // This is default huffman segment for 8-bit precision luminance and
69 // chrominance. The default huffman segment is constructed with the tables from 81 // chrominance. The default huffman segment is constructed with the tables from
70 // JPEG standard section K.3. Actually there are no default tables. They are 82 // JPEG standard section K.3. Actually there are no default tables. They are
71 // typical tables. These tables are useful for many applications. Lots of 83 // typical tables. These tables are useful for many applications. Lots of
72 // softwares use them as standard tables such as ffmpeg. 84 // softwares use them as standard tables such as ffmpeg.
73 const uint8_t kDefaultDhtSeg[] = { 85 const uint8_t kDefaultDhtSeg[] = {
74 0xFF, 0xC4, 0x01, 0xA2, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 86 0xFF, 0xC4, 0x01, 0xA2, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01,
75 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 87 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02,
(...skipping 24 matching lines...) Expand all
100 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 112 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A,
101 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, 113 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66,
102 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 114 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A,
103 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, 115 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94,
104 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 116 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7,
105 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 117 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA,
106 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 118 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4,
107 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, 119 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7,
108 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA}; 120 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA};
109 121
110 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() 122 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() : at_device(false) {
111 : address(nullptr), length(0), at_device(false) {} 123 memset(address, 0, sizeof(address));
124 memset(length, 0, sizeof(length));
125 }
112 126
113 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() {} 127 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() {}
114 128
115 V4L2JpegDecodeAccelerator::JobRecord::JobRecord( 129 V4L2JpegDecodeAccelerator::JobRecord::JobRecord(
116 const BitstreamBuffer& bitstream_buffer, 130 const BitstreamBuffer& bitstream_buffer,
117 scoped_refptr<VideoFrame> video_frame) 131 scoped_refptr<VideoFrame> video_frame)
118 : bitstream_buffer_id(bitstream_buffer.id()), 132 : bitstream_buffer_id(bitstream_buffer.id()),
119 shm(bitstream_buffer, true), 133 shm(bitstream_buffer, true),
120 out_frame(video_frame) {} 134 out_frame(video_frame) {}
121 135
122 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() {} 136 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() {}
123 137
124 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator( 138 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator(
125 const scoped_refptr<V4L2Device>& device, 139 const scoped_refptr<V4L2Device>& device,
126 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) 140 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
127 : output_buffer_pixelformat_(0), 141 : output_buffer_pixelformat_(0),
142 output_buffer_num_planes_(0),
128 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), 143 child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
129 io_task_runner_(io_task_runner), 144 io_task_runner_(io_task_runner),
130 client_(nullptr), 145 client_(nullptr),
131 device_(device), 146 device_(device),
132 decoder_thread_("V4L2JpegDecodeThread"), 147 decoder_thread_("V4L2JpegDecodeThread"),
133 device_poll_thread_("V4L2JpegDecodeDevicePollThread"), 148 device_poll_thread_("V4L2JpegDecodeDevicePollThread"),
134 input_streamon_(false), 149 input_streamon_(false),
135 output_streamon_(false), 150 output_streamon_(false),
136 weak_factory_(this) { 151 weak_factory_(this) {
137 weak_ptr_ = weak_factory_.GetWeakPtr(); 152 weak_ptr_ = weak_factory_.GetWeakPtr();
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
187 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { 202 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) {
188 DCHECK(child_task_runner_->BelongsToCurrentThread()); 203 DCHECK(child_task_runner_->BelongsToCurrentThread());
189 204
190 if (!device_->Open(V4L2Device::Type::kJpegDecoder, V4L2_PIX_FMT_JPEG)) { 205 if (!device_->Open(V4L2Device::Type::kJpegDecoder, V4L2_PIX_FMT_JPEG)) {
191 LOG(ERROR) << "Failed to open device"; 206 LOG(ERROR) << "Failed to open device";
192 return false; 207 return false;
193 } 208 }
194 209
195 // Capabilities check. 210 // Capabilities check.
196 struct v4l2_capability caps; 211 struct v4l2_capability caps;
197 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M; 212 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M_MPLANE;
198 memset(&caps, 0, sizeof(caps)); 213 memset(&caps, 0, sizeof(caps));
199 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) { 214 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) {
200 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP"; 215 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP";
201 return false; 216 return false;
202 } 217 }
203 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 218 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
204 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x" 219 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x"
205 << std::hex << caps.capabilities; 220 << std::hex << caps.capabilities;
206 return false; 221 return false;
207 } 222 }
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
290 305
291 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() { 306 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() {
292 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 307 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
293 if (input_jobs_.empty()) 308 if (input_jobs_.empty())
294 return false; 309 return false;
295 310
296 linked_ptr<JobRecord> job_record = input_jobs_.front(); 311 linked_ptr<JobRecord> job_record = input_jobs_.front();
297 // Check input buffer size is enough 312 // Check input buffer size is enough
298 return (input_buffer_map_.empty() || 313 return (input_buffer_map_.empty() ||
299 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) > 314 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) >
300 input_buffer_map_.front().length); 315 input_buffer_map_.front().length[0]);
301 } 316 }
302 317
303 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() { 318 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() {
304 DVLOG(3) << __func__; 319 DVLOG(3) << __func__;
305 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 320 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
306 321
307 // If running queue is not empty, we should wait until pending frames finish. 322 // If running queue is not empty, we should wait until pending frames finish.
308 if (!running_jobs_.empty()) 323 if (!running_jobs_.empty())
309 return true; 324 return true;
310 325
(...skipping 26 matching lines...) Expand all
337 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 352 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
338 DCHECK(!input_streamon_); 353 DCHECK(!input_streamon_);
339 DCHECK(!input_jobs_.empty()); 354 DCHECK(!input_jobs_.empty());
340 linked_ptr<JobRecord> job_record = input_jobs_.front(); 355 linked_ptr<JobRecord> job_record = input_jobs_.front();
341 // The input image may miss huffman table. We didn't parse the image before, 356 // The input image may miss huffman table. We didn't parse the image before,
342 // so we create more to avoid the situation of not enough memory. 357 // so we create more to avoid the situation of not enough memory.
343 // Reserve twice size to avoid recreating input buffer frequently. 358 // Reserve twice size to avoid recreating input buffer frequently.
344 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2; 359 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2;
345 struct v4l2_format format; 360 struct v4l2_format format;
346 memset(&format, 0, sizeof(format)); 361 memset(&format, 0, sizeof(format));
347 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 362 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
348 format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; 363 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_JPEG;
349 format.fmt.pix.sizeimage = reserve_size; 364 format.fmt.pix_mp.plane_fmt[0].sizeimage = reserve_size;
350 format.fmt.pix.field = V4L2_FIELD_ANY; 365 format.fmt.pix_mp.field = V4L2_FIELD_ANY;
366 format.fmt.pix_mp.num_planes = kMaxInputPlanes;
351 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 367 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
352 368
353 struct v4l2_requestbuffers reqbufs; 369 struct v4l2_requestbuffers reqbufs;
354 memset(&reqbufs, 0, sizeof(reqbufs)); 370 memset(&reqbufs, 0, sizeof(reqbufs));
355 reqbufs.count = kBufferCount; 371 reqbufs.count = kBufferCount;
356 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 372 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
357 reqbufs.memory = V4L2_MEMORY_MMAP; 373 reqbufs.memory = V4L2_MEMORY_MMAP;
358 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); 374 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
359 375
360 DCHECK(input_buffer_map_.empty()); 376 DCHECK(input_buffer_map_.empty());
361 input_buffer_map_.resize(reqbufs.count); 377 input_buffer_map_.resize(reqbufs.count);
362 378
363 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 379 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
364 free_input_buffers_.push_back(i); 380 free_input_buffers_.push_back(i);
365 381
366 struct v4l2_buffer buffer; 382 struct v4l2_buffer buffer;
383 struct v4l2_plane planes[VIDEO_MAX_PLANES];
367 memset(&buffer, 0, sizeof(buffer)); 384 memset(&buffer, 0, sizeof(buffer));
385 memset(planes, 0, sizeof(planes));
368 buffer.index = i; 386 buffer.index = i;
369 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 387 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
388 buffer.m.planes = planes;
389 buffer.length = arraysize(planes);
370 buffer.memory = V4L2_MEMORY_MMAP; 390 buffer.memory = V4L2_MEMORY_MMAP;
371 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); 391 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
372 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, 392 if (buffer.length != kMaxInputPlanes) {
373 MAP_SHARED, buffer.m.offset);
374 if (address == MAP_FAILED) {
375 PLOG(ERROR) << __func__ << ": mmap() failed";
376 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
377 return false; 393 return false;
378 } 394 }
379 input_buffer_map_[i].address = address; 395 for (size_t j = 0; j < buffer.length; ++j) {
380 input_buffer_map_[i].length = buffer.length; 396 void* address =
397 device_->Mmap(NULL, planes[j].length, PROT_READ | PROT_WRITE,
398 MAP_SHARED, planes[j].m.mem_offset);
399 if (address == MAP_FAILED) {
400 PLOG(ERROR) << __func__ << ": mmap() failed";
401 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
402 return false;
403 }
404 input_buffer_map_[i].address[j] = address;
405 input_buffer_map_[i].length[j] = planes[j].length;
406 }
381 } 407 }
382 408
383 return true; 409 return true;
384 } 410 }
385 411
386 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { 412 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() {
387 DVLOG(3) << __func__; 413 DVLOG(3) << __func__;
388 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 414 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
389 DCHECK(!output_streamon_); 415 DCHECK(!output_streamon_);
390 DCHECK(!running_jobs_.empty()); 416 DCHECK(!running_jobs_.empty());
391 linked_ptr<JobRecord> job_record = running_jobs_.front(); 417 linked_ptr<JobRecord> job_record = running_jobs_.front();
392 418
393 size_t frame_size = VideoFrame::AllocationSize( 419 size_t frame_size = VideoFrame::AllocationSize(
394 PIXEL_FORMAT_I420, job_record->out_frame->coded_size()); 420 PIXEL_FORMAT_I420, job_record->out_frame->coded_size());
395 struct v4l2_format format; 421 struct v4l2_format format;
396 memset(&format, 0, sizeof(format)); 422 memset(&format, 0, sizeof(format));
397 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 423 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
398 format.fmt.pix.width = job_record->out_frame->coded_size().width(); 424 format.fmt.pix_mp.width = job_record->out_frame->coded_size().width();
399 format.fmt.pix.height = job_record->out_frame->coded_size().height(); 425 format.fmt.pix_mp.height = job_record->out_frame->coded_size().height();
400 format.fmt.pix.sizeimage = frame_size; 426 format.fmt.pix_mp.num_planes = 1;
401 format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; 427 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420;
402 format.fmt.pix.field = V4L2_FIELD_ANY; 428 format.fmt.pix_mp.plane_fmt[0].sizeimage = frame_size;
429 format.fmt.pix_mp.field = V4L2_FIELD_ANY;
403 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 430 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
404 output_buffer_pixelformat_ = format.fmt.pix.pixelformat; 431 output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
405 output_buffer_coded_size_.SetSize(format.fmt.pix.width, 432 output_buffer_coded_size_.SetSize(format.fmt.pix_mp.width,
406 format.fmt.pix.height); 433 format.fmt.pix_mp.height);
434 output_buffer_num_planes_ = format.fmt.pix_mp.num_planes;
435
436 VideoPixelFormat output_format =
437 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_);
438 if (output_format == PIXEL_FORMAT_UNKNOWN) {
439 PLOG(ERROR) << __func__ << ": unknown V4L2 pixel format: "
440 << output_buffer_pixelformat_;
441 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
442 return false;
443 }
407 444
408 struct v4l2_requestbuffers reqbufs; 445 struct v4l2_requestbuffers reqbufs;
409 memset(&reqbufs, 0, sizeof(reqbufs)); 446 memset(&reqbufs, 0, sizeof(reqbufs));
410 reqbufs.count = kBufferCount; 447 reqbufs.count = kBufferCount;
411 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 448 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
412 reqbufs.memory = V4L2_MEMORY_MMAP; 449 reqbufs.memory = V4L2_MEMORY_MMAP;
413 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); 450 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
414 451
415 DCHECK(output_buffer_map_.empty()); 452 DCHECK(output_buffer_map_.empty());
416 output_buffer_map_.resize(reqbufs.count); 453 output_buffer_map_.resize(reqbufs.count);
417 454
418 VideoPixelFormat output_format =
419 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_);
420
421 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 455 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
422 free_output_buffers_.push_back(i); 456 free_output_buffers_.push_back(i);
423 457
424 struct v4l2_buffer buffer; 458 struct v4l2_buffer buffer;
459 struct v4l2_plane planes[VIDEO_MAX_PLANES];
425 memset(&buffer, 0, sizeof(buffer)); 460 memset(&buffer, 0, sizeof(buffer));
461 memset(planes, 0, sizeof(planes));
426 buffer.index = i; 462 buffer.index = i;
427 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 463 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
428 buffer.memory = V4L2_MEMORY_MMAP; 464 buffer.memory = V4L2_MEMORY_MMAP;
465 buffer.m.planes = planes;
466 buffer.length = arraysize(planes);
429 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); 467 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
430 468
431 DCHECK_GE(buffer.length, 469 if (output_buffer_num_planes_ != buffer.length) {
432 VideoFrame::AllocationSize(
433 output_format,
434 gfx::Size(format.fmt.pix.width, format.fmt.pix.height)));
435
436 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
437 MAP_SHARED, buffer.m.offset);
438 if (address == MAP_FAILED) {
439 PLOG(ERROR) << __func__ << ": mmap() failed";
440 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
441 return false; 470 return false;
442 } 471 }
443 output_buffer_map_[i].address = address; 472 for (size_t j = 0; j < buffer.length; ++j) {
444 output_buffer_map_[i].length = buffer.length; 473 if (base::checked_cast<int64_t>(planes[j].length) <
474 VideoFrame::PlaneSize(
475 output_format, j,
476 gfx::Size(format.fmt.pix_mp.width, format.fmt.pix_mp.height))
477 .GetArea()) {
478 return false;
479 }
480 void* address =
481 device_->Mmap(NULL, planes[j].length, PROT_READ | PROT_WRITE,
482 MAP_SHARED, planes[j].m.mem_offset);
483 if (address == MAP_FAILED) {
484 PLOG(ERROR) << __func__ << ": mmap() failed";
485 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
486 return false;
487 }
488 output_buffer_map_[i].address[j] = address;
489 output_buffer_map_[i].length[j] = planes[j].length;
490 }
445 } 491 }
446 492
447 return true; 493 return true;
448 } 494 }
449 495
450 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { 496 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() {
451 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 497 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
452 498
453 free_input_buffers_.clear(); 499 free_input_buffers_.clear();
454 500
455 if (input_buffer_map_.empty()) 501 if (input_buffer_map_.empty())
456 return; 502 return;
457 503
458 if (input_streamon_) { 504 if (input_streamon_) {
459 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 505 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
460 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); 506 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type);
461 input_streamon_ = false; 507 input_streamon_ = false;
462 } 508 }
463 509
464 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 510 for (const auto& input_record : input_buffer_map_) {
465 BufferRecord& input_record = input_buffer_map_[i]; 511 for (size_t i = 0; i < kMaxInputPlanes; ++i) {
466 device_->Munmap(input_record.address, input_record.length); 512 device_->Munmap(input_record.address[i], input_record.length[i]);
513 }
467 } 514 }
468 515
469 struct v4l2_requestbuffers reqbufs; 516 struct v4l2_requestbuffers reqbufs;
470 memset(&reqbufs, 0, sizeof(reqbufs)); 517 memset(&reqbufs, 0, sizeof(reqbufs));
471 reqbufs.count = 0; 518 reqbufs.count = 0;
472 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 519 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
473 reqbufs.memory = V4L2_MEMORY_MMAP; 520 reqbufs.memory = V4L2_MEMORY_MMAP;
474 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 521 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
475 522
476 input_buffer_map_.clear(); 523 input_buffer_map_.clear();
477 } 524 }
478 525
479 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { 526 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() {
480 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 527 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
481 528
482 free_output_buffers_.clear(); 529 free_output_buffers_.clear();
483 530
484 if (output_buffer_map_.empty()) 531 if (output_buffer_map_.empty())
485 return; 532 return;
486 533
487 if (output_streamon_) { 534 if (output_streamon_) {
488 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 535 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
489 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); 536 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type);
490 output_streamon_ = false; 537 output_streamon_ = false;
491 } 538 }
492 539
493 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 540 for (const auto& output_record : output_buffer_map_) {
494 BufferRecord& output_record = output_buffer_map_[i]; 541 for (size_t i = 0; i < output_buffer_num_planes_; ++i) {
495 device_->Munmap(output_record.address, output_record.length); 542 device_->Munmap(output_record.address[i], output_record.length[i]);
543 }
496 } 544 }
497 545
498 struct v4l2_requestbuffers reqbufs; 546 struct v4l2_requestbuffers reqbufs;
499 memset(&reqbufs, 0, sizeof(reqbufs)); 547 memset(&reqbufs, 0, sizeof(reqbufs));
500 reqbufs.count = 0; 548 reqbufs.count = 0;
501 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 549 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
502 reqbufs.memory = V4L2_MEMORY_MMAP; 550 reqbufs.memory = V4L2_MEMORY_MMAP;
503 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 551 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
504 552
505 output_buffer_map_.clear(); 553 output_buffer_map_.clear();
554 output_buffer_num_planes_ = 0;
506 } 555 }
507 556
508 void V4L2JpegDecodeAccelerator::DevicePollTask() { 557 void V4L2JpegDecodeAccelerator::DevicePollTask() {
509 DCHECK(device_poll_task_runner_->BelongsToCurrentThread()); 558 DCHECK(device_poll_task_runner_->BelongsToCurrentThread());
510 559
511 bool event_pending; 560 bool event_pending;
512 if (!device_->Poll(true, &event_pending)) { 561 if (!device_->Poll(true, &event_pending)) {
513 PLOG(ERROR) << __func__ << ": Poll device error."; 562 PLOG(ERROR) << __func__ << ": Poll device error.";
514 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 563 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
515 return; 564 return;
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
590 // If input buffers are required to re-create, do not enqueue input record 639 // If input buffers are required to re-create, do not enqueue input record
591 // until all pending frames are handled by device. 640 // until all pending frames are handled by device.
592 if (ShouldRecreateInputBuffers()) 641 if (ShouldRecreateInputBuffers())
593 break; 642 break;
594 if (!EnqueueInputRecord()) 643 if (!EnqueueInputRecord())
595 return; 644 return;
596 } 645 }
597 // Check here because we cannot STREAMON before QBUF in earlier kernel. 646 // Check here because we cannot STREAMON before QBUF in earlier kernel.
598 // (kernel version < 3.14) 647 // (kernel version < 3.14)
599 if (!input_streamon_ && InputBufferQueuedCount()) { 648 if (!input_streamon_ && InputBufferQueuedCount()) {
600 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 649 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
601 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 650 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
602 input_streamon_ = true; 651 input_streamon_ = true;
603 } 652 }
604 } 653 }
605 654
606 void V4L2JpegDecodeAccelerator::EnqueueOutput() { 655 void V4L2JpegDecodeAccelerator::EnqueueOutput() {
607 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 656 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
608 // Output record can be enqueued because the output coded sizes of the frames 657 // Output record can be enqueued because the output coded sizes of the frames
609 // currently in the pipeline are all the same. 658 // currently in the pipeline are all the same.
610 while (running_jobs_.size() > OutputBufferQueuedCount() && 659 while (running_jobs_.size() > OutputBufferQueuedCount() &&
611 !free_output_buffers_.empty()) { 660 !free_output_buffers_.empty()) {
612 if (!EnqueueOutputRecord()) 661 if (!EnqueueOutputRecord())
613 return; 662 return;
614 } 663 }
615 // Check here because we cannot STREAMON before QBUF in earlier kernel. 664 // Check here because we cannot STREAMON before QBUF in earlier kernel.
616 // (kernel version < 3.14) 665 // (kernel version < 3.14)
617 if (!output_streamon_ && OutputBufferQueuedCount()) { 666 if (!output_streamon_ && OutputBufferQueuedCount()) {
618 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 667 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
619 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 668 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
620 output_streamon_ = true; 669 output_streamon_ = true;
621 } 670 }
622 } 671 }
623 672
624 static bool CopyOutputImage(const uint32_t src_pixelformat, 673 bool V4L2JpegDecodeAccelerator::ConvertOutputImage(
625 const void* src_addr, 674 const BufferRecord& output_buffer,
626 const gfx::Size& src_coded_size, 675 const scoped_refptr<VideoFrame>& dst_frame) {
627 const scoped_refptr<VideoFrame>& dst_frame) {
628 VideoPixelFormat format =
629 V4L2Device::V4L2PixFmtToVideoPixelFormat(src_pixelformat);
630 size_t src_size = VideoFrame::AllocationSize(format, src_coded_size);
631 uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane); 676 uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane);
632 uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane); 677 uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane);
633 uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane); 678 uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane);
634 size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane); 679 size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane);
635 size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane); 680 size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane);
636 size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane); 681 size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane);
637 682
638 // If the source format is I420, ConvertToI420 will simply copy the frame. 683 if (output_buffer_num_planes_ == 1) {
639 if (libyuv::ConvertToI420(static_cast<uint8_t*>(const_cast<void*>(src_addr)), 684 // Use ConvertToI420 to convert all splane buffers.
640 src_size, 685 // If the source format is I420, ConvertToI420 will simply copy the frame.
641 dst_y, dst_y_stride, 686 VideoPixelFormat format =
642 dst_u, dst_u_stride, 687 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_);
643 dst_v, dst_v_stride, 688 size_t src_size =
644 0, 0, 689 VideoFrame::AllocationSize(format, output_buffer_coded_size_);
645 src_coded_size.width(), 690 if (libyuv::ConvertToI420(
646 src_coded_size.height(), 691 static_cast<uint8_t*>(output_buffer.address[0]), src_size, dst_y,
647 dst_frame->coded_size().width(), 692 dst_y_stride, dst_u, dst_u_stride, dst_v, dst_v_stride, 0, 0,
648 dst_frame->coded_size().height(), 693 output_buffer_coded_size_.width(),
649 libyuv::kRotate0, 694 output_buffer_coded_size_.height(), dst_frame->coded_size().width(),
650 src_pixelformat)) { 695 dst_frame->coded_size().height(), libyuv::kRotate0,
651 LOG(ERROR) << "ConvertToI420 failed. Source format: " << src_pixelformat; 696 output_buffer_pixelformat_)) {
697 LOG(ERROR) << "ConvertToI420 failed. Source format: "
698 << output_buffer_pixelformat_;
699 return false;
700 }
701 } else if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M ||
702 output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M) {
703 uint8_t* src_y = static_cast<uint8_t*>(output_buffer.address[0]);
704 uint8_t* src_u = static_cast<uint8_t*>(output_buffer.address[1]);
705 uint8_t* src_v = static_cast<uint8_t*>(output_buffer.address[2]);
706 size_t src_y_stride = output_buffer_coded_size_.width();
707 size_t src_u_stride = output_buffer_coded_size_.width() / 2;
708 size_t src_v_stride = output_buffer_coded_size_.width() / 2;
709 if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M) {
710 if (libyuv::I420Copy(src_y, src_y_stride, src_u, src_u_stride, src_v,
711 src_v_stride, dst_y, dst_y_stride, dst_u,
712 dst_u_stride, dst_v, dst_v_stride,
713 output_buffer_coded_size_.width(),
714 output_buffer_coded_size_.height())) {
715 LOG(ERROR) << "I420Copy failed";
716 return false;
717 }
718 } else { // output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M
719 if (libyuv::I422ToI420(src_y, src_y_stride, src_u, src_u_stride, src_v,
720 src_v_stride, dst_y, dst_y_stride, dst_u,
721 dst_u_stride, dst_v, dst_v_stride,
722 output_buffer_coded_size_.width(),
723 output_buffer_coded_size_.height())) {
724 LOG(ERROR) << "I422ToI420 failed";
725 return false;
726 }
727 }
728 } else {
729 LOG(ERROR) << "Unsupported source buffer format: "
730 << output_buffer_pixelformat_;
652 return false; 731 return false;
653 } 732 }
654 return true; 733 return true;
655 } 734 }
656 735
657 void V4L2JpegDecodeAccelerator::Dequeue() { 736 void V4L2JpegDecodeAccelerator::Dequeue() {
658 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 737 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
659 738
660 // Dequeue completed input (VIDEO_OUTPUT) buffers, 739 // Dequeue completed input (VIDEO_OUTPUT) buffers,
661 // and recycle to the free list. 740 // and recycle to the free list.
662 struct v4l2_buffer dqbuf; 741 struct v4l2_buffer dqbuf;
742 struct v4l2_plane planes[VIDEO_MAX_PLANES];
663 while (InputBufferQueuedCount() > 0) { 743 while (InputBufferQueuedCount() > 0) {
664 DCHECK(input_streamon_); 744 DCHECK(input_streamon_);
665 memset(&dqbuf, 0, sizeof(dqbuf)); 745 memset(&dqbuf, 0, sizeof(dqbuf));
666 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 746 memset(planes, 0, sizeof(planes));
747 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
667 dqbuf.memory = V4L2_MEMORY_MMAP; 748 dqbuf.memory = V4L2_MEMORY_MMAP;
749 dqbuf.length = arraysize(planes);
750 dqbuf.m.planes = planes;
668 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { 751 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
669 if (errno == EAGAIN) { 752 if (errno == EAGAIN) {
670 // EAGAIN if we're just out of buffers to dequeue. 753 // EAGAIN if we're just out of buffers to dequeue.
671 break; 754 break;
672 } 755 }
673 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed."; 756 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed.";
674 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 757 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
675 return; 758 return;
676 } 759 }
677 BufferRecord& input_record = input_buffer_map_[dqbuf.index]; 760 BufferRecord& input_record = input_buffer_map_[dqbuf.index];
678 DCHECK(input_record.at_device); 761 DCHECK(input_record.at_device);
679 input_record.at_device = false; 762 input_record.at_device = false;
680 free_input_buffers_.push_back(dqbuf.index); 763 free_input_buffers_.push_back(dqbuf.index);
681 764
682 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { 765 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) {
683 DVLOG(1) << "Dequeue input buffer error."; 766 DVLOG(1) << "Dequeue input buffer error.";
684 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); 767 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG);
685 running_jobs_.pop(); 768 running_jobs_.pop();
686 } 769 }
687 } 770 }
688 771
689 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. 772 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list.
690 // Return the finished buffer to the client via the job ready callback. 773 // Return the finished buffer to the client via the job ready callback.
691 // If dequeued input buffer has an error, the error frame has removed from 774 // If dequeued input buffer has an error, the error frame has removed from
692 // |running_jobs_|. We only have to dequeue output buffer when we actually 775 // |running_jobs_|. We only have to dequeue output buffer when we actually
693 // have pending frames in |running_jobs_| and also enqueued output buffers. 776 // have pending frames in |running_jobs_| and also enqueued output buffers.
694 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) { 777 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) {
695 DCHECK(output_streamon_); 778 DCHECK(output_streamon_);
696 memset(&dqbuf, 0, sizeof(dqbuf)); 779 memset(&dqbuf, 0, sizeof(dqbuf));
697 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 780 memset(planes, 0, sizeof(planes));
781 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
698 // From experiments, using MMAP and memory copy is still faster than 782 // From experiments, using MMAP and memory copy is still faster than
699 // USERPTR. Also, client doesn't need to consider the buffer alignment and 783 // USERPTR. Also, client doesn't need to consider the buffer alignment and
700 // JpegDecodeAccelerator API will be simpler. 784 // JpegDecodeAccelerator API will be simpler.
701 dqbuf.memory = V4L2_MEMORY_MMAP; 785 dqbuf.memory = V4L2_MEMORY_MMAP;
786 dqbuf.length = arraysize(planes);
787 dqbuf.m.planes = planes;
702 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { 788 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
703 if (errno == EAGAIN) { 789 if (errno == EAGAIN) {
704 // EAGAIN if we're just out of buffers to dequeue. 790 // EAGAIN if we're just out of buffers to dequeue.
705 break; 791 break;
706 } 792 }
707 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed."; 793 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed.";
708 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 794 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
709 return; 795 return;
710 } 796 }
711 BufferRecord& output_record = output_buffer_map_[dqbuf.index]; 797 BufferRecord& output_record = output_buffer_map_[dqbuf.index];
712 DCHECK(output_record.at_device); 798 DCHECK(output_record.at_device);
713 output_record.at_device = false; 799 output_record.at_device = false;
714 free_output_buffers_.push_back(dqbuf.index); 800 free_output_buffers_.push_back(dqbuf.index);
715 801
716 // Jobs are always processed in FIFO order. 802 // Jobs are always processed in FIFO order.
717 linked_ptr<JobRecord> job_record = running_jobs_.front(); 803 linked_ptr<JobRecord> job_record = running_jobs_.front();
718 running_jobs_.pop(); 804 running_jobs_.pop();
719 805
720 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { 806 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) {
721 DVLOG(1) << "Dequeue output buffer error."; 807 DVLOG(1) << "Dequeue output buffer error.";
722 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); 808 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG);
723 } else { 809 } else {
724 // Copy the decoded data from output buffer to the buffer provided by the 810 // Copy the decoded data from output buffer to the buffer provided by the
725 // client. Do format conversion when output format is not 811 // client. Do format conversion when output format is not
726 // V4L2_PIX_FMT_YUV420. 812 // V4L2_PIX_FMT_YUV420.
727 if (!CopyOutputImage(output_buffer_pixelformat_, output_record.address, 813 if (!ConvertOutputImage(output_record, job_record->out_frame)) {
728 output_buffer_coded_size_, job_record->out_frame)) {
729 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE); 814 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE);
730 return; 815 return;
731 } 816 }
732
733 DVLOG(3) << "Decoding finished, returning bitstream buffer, id=" 817 DVLOG(3) << "Decoding finished, returning bitstream buffer, id="
734 << job_record->bitstream_buffer_id; 818 << job_record->bitstream_buffer_id;
735 819
736 child_task_runner_->PostTask( 820 child_task_runner_->PostTask(
737 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady, 821 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady,
738 weak_ptr_, job_record->bitstream_buffer_id)); 822 weak_ptr_, job_record->bitstream_buffer_id));
739 } 823 }
740 } 824 }
741 } 825 }
742 826
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
827 911
828 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. 912 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame.
829 linked_ptr<JobRecord> job_record = input_jobs_.front(); 913 linked_ptr<JobRecord> job_record = input_jobs_.front();
830 input_jobs_.pop(); 914 input_jobs_.pop();
831 const int index = free_input_buffers_.back(); 915 const int index = free_input_buffers_.back();
832 BufferRecord& input_record = input_buffer_map_[index]; 916 BufferRecord& input_record = input_buffer_map_[index];
833 DCHECK(!input_record.at_device); 917 DCHECK(!input_record.at_device);
834 918
835 // It will add default huffman segment if it's missing. 919 // It will add default huffman segment if it's missing.
836 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(), 920 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(),
837 input_record.address, input_record.length)) { 921 input_record.address[0], input_record.length[0])) {
838 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED); 922 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED);
839 return false; 923 return false;
840 } 924 }
841 925
842 struct v4l2_buffer qbuf; 926 struct v4l2_buffer qbuf;
927 struct v4l2_plane planes[VIDEO_MAX_PLANES];
843 memset(&qbuf, 0, sizeof(qbuf)); 928 memset(&qbuf, 0, sizeof(qbuf));
929 memset(planes, 0, sizeof(planes));
844 qbuf.index = index; 930 qbuf.index = index;
845 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 931 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
846 qbuf.memory = V4L2_MEMORY_MMAP; 932 qbuf.memory = V4L2_MEMORY_MMAP;
933 qbuf.length = arraysize(planes);
934 // There is only one plane for V4L2_PIX_FMT_JPEG.
935 planes[0].bytesused = input_record.length[0];
936 qbuf.m.planes = planes;
847 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 937 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
848 input_record.at_device = true; 938 input_record.at_device = true;
849 running_jobs_.push(job_record); 939 running_jobs_.push(job_record);
850 free_input_buffers_.pop_back(); 940 free_input_buffers_.pop_back();
851 941
852 DVLOG(3) << __func__ 942 DVLOG(3) << __func__
853 << ": enqueued frame id=" << job_record->bitstream_buffer_id 943 << ": enqueued frame id=" << job_record->bitstream_buffer_id
854 << " to device."; 944 << " to device.";
855 return true; 945 return true;
856 } 946 }
857 947
858 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { 948 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() {
859 DCHECK(!free_output_buffers_.empty()); 949 DCHECK(!free_output_buffers_.empty());
950 DCHECK_GT(output_buffer_num_planes_, 0u);
860 951
861 // Enqueue an output (VIDEO_CAPTURE) buffer. 952 // Enqueue an output (VIDEO_CAPTURE) buffer.
862 const int index = free_output_buffers_.back(); 953 const int index = free_output_buffers_.back();
863 BufferRecord& output_record = output_buffer_map_[index]; 954 BufferRecord& output_record = output_buffer_map_[index];
864 DCHECK(!output_record.at_device); 955 DCHECK(!output_record.at_device);
865 struct v4l2_buffer qbuf; 956 struct v4l2_buffer qbuf;
957 struct v4l2_plane planes[VIDEO_MAX_PLANES];
866 memset(&qbuf, 0, sizeof(qbuf)); 958 memset(&qbuf, 0, sizeof(qbuf));
959 memset(planes, 0, sizeof(planes));
867 qbuf.index = index; 960 qbuf.index = index;
868 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 961 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
869 qbuf.memory = V4L2_MEMORY_MMAP; 962 qbuf.memory = V4L2_MEMORY_MMAP;
963 qbuf.length = arraysize(planes);
964 qbuf.m.planes = planes;
870 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 965 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
871 output_record.at_device = true; 966 output_record.at_device = true;
872 free_output_buffers_.pop_back(); 967 free_output_buffers_.pop_back();
873 return true; 968 return true;
874 } 969 }
875 970
876 void V4L2JpegDecodeAccelerator::StartDevicePoll() { 971 void V4L2JpegDecodeAccelerator::StartDevicePoll() {
877 DVLOG(3) << __func__ << ": starting device poll"; 972 DVLOG(3) << __func__ << ": starting device poll";
878 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 973 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
879 DCHECK(!device_poll_thread_.IsRunning()); 974 DCHECK(!device_poll_thread_.IsRunning());
(...skipping 18 matching lines...) Expand all
898 device_poll_thread_.Stop(); 993 device_poll_thread_.Stop();
899 994
900 // Clear the interrupt now, to be sure. 995 // Clear the interrupt now, to be sure.
901 if (!device_->ClearDevicePollInterrupt()) 996 if (!device_->ClearDevicePollInterrupt())
902 return false; 997 return false;
903 998
904 return true; 999 return true;
905 } 1000 }
906 1001
907 } // namespace media 1002 } // namespace media
OLDNEW
« no previous file with comments | « media/gpu/v4l2_jpeg_decode_accelerator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698