Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(780)

Side by Side Diff: media/gpu/v4l2_jpeg_decode_accelerator.cc

Issue 2559423002: media/gpu: switch v4l2_jpeg_decode_accelerator to use multi-planar APIs (Closed)
Patch Set: address the review comments from round 3 Created 3 years, 12 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/v4l2_jpeg_decode_accelerator.h" 5 #include "media/gpu/v4l2_jpeg_decode_accelerator.h"
6 6
7 #include <errno.h> 7 #include <errno.h>
8 #include <linux/videodev2.h> 8 #include <linux/videodev2.h>
9 #include <string.h> 9 #include <string.h>
10 #include <sys/mman.h> 10 #include <sys/mman.h>
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
100 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 100 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A,
101 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, 101 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66,
102 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 102 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A,
103 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, 103 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94,
104 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 104 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7,
105 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 105 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA,
106 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 106 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4,
107 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, 107 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7,
108 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA}; 108 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA};
109 109
110 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() 110 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() : at_device(false) {
111 : address(nullptr), length(0), at_device(false) {} 111 memset(address, 0, sizeof(address));
112 memset(length, 0, sizeof(length));
113 }
112 114
113 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() {} 115 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() {}
114 116
115 V4L2JpegDecodeAccelerator::JobRecord::JobRecord( 117 V4L2JpegDecodeAccelerator::JobRecord::JobRecord(
116 const BitstreamBuffer& bitstream_buffer, 118 const BitstreamBuffer& bitstream_buffer,
117 scoped_refptr<VideoFrame> video_frame) 119 scoped_refptr<VideoFrame> video_frame)
118 : bitstream_buffer_id(bitstream_buffer.id()), 120 : bitstream_buffer_id(bitstream_buffer.id()),
119 shm(bitstream_buffer, true), 121 shm(bitstream_buffer, true),
120 out_frame(video_frame) {} 122 out_frame(video_frame) {}
121 123
122 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() {} 124 V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() {}
123 125
124 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator( 126 V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator(
125 const scoped_refptr<V4L2Device>& device, 127 const scoped_refptr<V4L2Device>& device,
126 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) 128 const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
127 : output_buffer_pixelformat_(0), 129 : output_buffer_pixelformat_(0),
130 output_buffer_num_planes_(0),
128 child_task_runner_(base::ThreadTaskRunnerHandle::Get()), 131 child_task_runner_(base::ThreadTaskRunnerHandle::Get()),
129 io_task_runner_(io_task_runner), 132 io_task_runner_(io_task_runner),
130 client_(nullptr), 133 client_(nullptr),
131 device_(device), 134 device_(device),
132 decoder_thread_("V4L2JpegDecodeThread"), 135 decoder_thread_("V4L2JpegDecodeThread"),
133 device_poll_thread_("V4L2JpegDecodeDevicePollThread"), 136 device_poll_thread_("V4L2JpegDecodeDevicePollThread"),
134 input_streamon_(false), 137 input_streamon_(false),
135 output_streamon_(false), 138 output_streamon_(false),
136 weak_factory_(this) { 139 weak_factory_(this) {
140 // This class can only handle V4L2_PIX_FMT_JPEG as input, so kMaxInputPlanes
141 // can only be 1.
142 static_assert(V4L2JpegDecodeAccelerator::kMaxInputPlanes == 1,
143 "kMaxInputPlanes must be 1 as input must be V4L2_PIX_FMT_JPEG");
137 weak_ptr_ = weak_factory_.GetWeakPtr(); 144 weak_ptr_ = weak_factory_.GetWeakPtr();
138 } 145 }
139 146
140 V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() { 147 V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() {
141 DCHECK(child_task_runner_->BelongsToCurrentThread()); 148 DCHECK(child_task_runner_->BelongsToCurrentThread());
142 149
143 if (decoder_thread_.IsRunning()) { 150 if (decoder_thread_.IsRunning()) {
144 decoder_task_runner_->PostTask( 151 decoder_task_runner_->PostTask(
145 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask, 152 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask,
146 base::Unretained(this))); 153 base::Unretained(this)));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
187 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { 194 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) {
188 DCHECK(child_task_runner_->BelongsToCurrentThread()); 195 DCHECK(child_task_runner_->BelongsToCurrentThread());
189 196
190 if (!device_->Open(V4L2Device::Type::kJpegDecoder, V4L2_PIX_FMT_JPEG)) { 197 if (!device_->Open(V4L2Device::Type::kJpegDecoder, V4L2_PIX_FMT_JPEG)) {
191 LOG(ERROR) << "Failed to open device"; 198 LOG(ERROR) << "Failed to open device";
192 return false; 199 return false;
193 } 200 }
194 201
195 // Capabilities check. 202 // Capabilities check.
196 struct v4l2_capability caps; 203 struct v4l2_capability caps;
197 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M; 204 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M_MPLANE;
198 memset(&caps, 0, sizeof(caps)); 205 memset(&caps, 0, sizeof(caps));
199 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) { 206 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) {
200 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP"; 207 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP";
201 return false; 208 return false;
202 } 209 }
203 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 210 if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
204 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x" 211 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x"
205 << std::hex << caps.capabilities; 212 << std::hex << caps.capabilities;
206 return false; 213 return false;
207 } 214 }
(...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after
290 297
291 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() { 298 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() {
292 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 299 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
293 if (input_jobs_.empty()) 300 if (input_jobs_.empty())
294 return false; 301 return false;
295 302
296 linked_ptr<JobRecord> job_record = input_jobs_.front(); 303 linked_ptr<JobRecord> job_record = input_jobs_.front();
297 // Check input buffer size is enough 304 // Check input buffer size is enough
298 return (input_buffer_map_.empty() || 305 return (input_buffer_map_.empty() ||
299 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) > 306 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) >
300 input_buffer_map_.front().length); 307 input_buffer_map_.front().length[0]);
301 } 308 }
302 309
303 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() { 310 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() {
304 DVLOG(3) << __func__; 311 DVLOG(3) << __func__;
305 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 312 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
306 313
307 // If running queue is not empty, we should wait until pending frames finish. 314 // If running queue is not empty, we should wait until pending frames finish.
308 if (!running_jobs_.empty()) 315 if (!running_jobs_.empty())
309 return true; 316 return true;
310 317
(...skipping 26 matching lines...) Expand all
337 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 344 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
338 DCHECK(!input_streamon_); 345 DCHECK(!input_streamon_);
339 DCHECK(!input_jobs_.empty()); 346 DCHECK(!input_jobs_.empty());
340 linked_ptr<JobRecord> job_record = input_jobs_.front(); 347 linked_ptr<JobRecord> job_record = input_jobs_.front();
341 // The input image may miss huffman table. We didn't parse the image before, 348 // The input image may miss huffman table. We didn't parse the image before,
342 // so we create more to avoid the situation of not enough memory. 349 // so we create more to avoid the situation of not enough memory.
343 // Reserve twice size to avoid recreating input buffer frequently. 350 // Reserve twice size to avoid recreating input buffer frequently.
344 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2; 351 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2;
345 struct v4l2_format format; 352 struct v4l2_format format;
346 memset(&format, 0, sizeof(format)); 353 memset(&format, 0, sizeof(format));
347 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 354 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
348 format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; 355 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_JPEG;
349 format.fmt.pix.sizeimage = reserve_size; 356 format.fmt.pix_mp.plane_fmt[0].sizeimage = reserve_size;
350 format.fmt.pix.field = V4L2_FIELD_ANY; 357 format.fmt.pix_mp.field = V4L2_FIELD_ANY;
358 format.fmt.pix_mp.num_planes = kMaxInputPlanes;
351 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 359 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
352 360
353 struct v4l2_requestbuffers reqbufs; 361 struct v4l2_requestbuffers reqbufs;
354 memset(&reqbufs, 0, sizeof(reqbufs)); 362 memset(&reqbufs, 0, sizeof(reqbufs));
355 reqbufs.count = kBufferCount; 363 reqbufs.count = kBufferCount;
356 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 364 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
357 reqbufs.memory = V4L2_MEMORY_MMAP; 365 reqbufs.memory = V4L2_MEMORY_MMAP;
358 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); 366 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
359 367
360 DCHECK(input_buffer_map_.empty()); 368 DCHECK(input_buffer_map_.empty());
361 input_buffer_map_.resize(reqbufs.count); 369 input_buffer_map_.resize(reqbufs.count);
362 370
363 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 371 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
364 free_input_buffers_.push_back(i); 372 free_input_buffers_.push_back(i);
365 373
366 struct v4l2_buffer buffer; 374 struct v4l2_buffer buffer;
375 struct v4l2_plane planes[VIDEO_MAX_PLANES];
367 memset(&buffer, 0, sizeof(buffer)); 376 memset(&buffer, 0, sizeof(buffer));
377 memset(planes, 0, sizeof(planes));
368 buffer.index = i; 378 buffer.index = i;
369 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 379 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
380 buffer.m.planes = planes;
381 buffer.length = arraysize(planes);
370 buffer.memory = V4L2_MEMORY_MMAP; 382 buffer.memory = V4L2_MEMORY_MMAP;
371 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); 383 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
372 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, 384 if (buffer.length != kMaxInputPlanes) {
373 MAP_SHARED, buffer.m.offset);
374 if (address == MAP_FAILED) {
375 PLOG(ERROR) << __func__ << ": mmap() failed";
376 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
377 return false; 385 return false;
378 } 386 }
379 input_buffer_map_[i].address = address; 387 for (size_t j = 0; j < buffer.length; ++j) {
380 input_buffer_map_[i].length = buffer.length; 388 void* address =
389 device_->Mmap(NULL, planes[j].length, PROT_READ | PROT_WRITE,
Pawel Osciak 2016/12/23 06:05:52 We should verify return value from Mmap().
jcliang 2016/12/23 07:50:10 Done.
390 MAP_SHARED, planes[j].m.mem_offset);
391 input_buffer_map_[i].address[j] = address;
392 input_buffer_map_[i].length[j] = planes[j].length;
393 }
381 } 394 }
382 395
383 return true; 396 return true;
384 } 397 }
385 398
386 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { 399 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() {
387 DVLOG(3) << __func__; 400 DVLOG(3) << __func__;
388 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 401 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
389 DCHECK(!output_streamon_); 402 DCHECK(!output_streamon_);
390 DCHECK(!running_jobs_.empty()); 403 DCHECK(!running_jobs_.empty());
391 linked_ptr<JobRecord> job_record = running_jobs_.front(); 404 linked_ptr<JobRecord> job_record = running_jobs_.front();
392 405
393 size_t frame_size = VideoFrame::AllocationSize( 406 size_t frame_size = VideoFrame::AllocationSize(
394 PIXEL_FORMAT_I420, job_record->out_frame->coded_size()); 407 PIXEL_FORMAT_I420, job_record->out_frame->coded_size());
395 struct v4l2_format format; 408 struct v4l2_format format;
396 memset(&format, 0, sizeof(format)); 409 memset(&format, 0, sizeof(format));
397 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 410 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
398 format.fmt.pix.width = job_record->out_frame->coded_size().width(); 411 format.fmt.pix_mp.width = job_record->out_frame->coded_size().width();
399 format.fmt.pix.height = job_record->out_frame->coded_size().height(); 412 format.fmt.pix_mp.height = job_record->out_frame->coded_size().height();
400 format.fmt.pix.sizeimage = frame_size; 413 format.fmt.pix_mp.num_planes = 1;
401 format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; 414 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420;
402 format.fmt.pix.field = V4L2_FIELD_ANY; 415 format.fmt.pix_mp.plane_fmt[0].sizeimage = frame_size;
416 format.fmt.pix_mp.field = V4L2_FIELD_ANY;
403 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 417 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
404 output_buffer_pixelformat_ = format.fmt.pix.pixelformat; 418 output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
405 output_buffer_coded_size_.SetSize(format.fmt.pix.width, 419 output_buffer_coded_size_.SetSize(format.fmt.pix_mp.width,
406 format.fmt.pix.height); 420 format.fmt.pix_mp.height);
421 output_buffer_num_planes_ = format.fmt.pix_mp.num_planes;
407 422
408 struct v4l2_requestbuffers reqbufs; 423 struct v4l2_requestbuffers reqbufs;
409 memset(&reqbufs, 0, sizeof(reqbufs)); 424 memset(&reqbufs, 0, sizeof(reqbufs));
410 reqbufs.count = kBufferCount; 425 reqbufs.count = kBufferCount;
411 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 426 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
412 reqbufs.memory = V4L2_MEMORY_MMAP; 427 reqbufs.memory = V4L2_MEMORY_MMAP;
413 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); 428 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
414 429
415 DCHECK(output_buffer_map_.empty()); 430 DCHECK(output_buffer_map_.empty());
416 output_buffer_map_.resize(reqbufs.count); 431 output_buffer_map_.resize(reqbufs.count);
417 432
418 VideoPixelFormat output_format = 433 VideoPixelFormat output_format =
419 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_); 434 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_);
Pawel Osciak 2016/12/23 06:05:52 if (output_format == PIXEL_FORMAT_UNKNOWN) // er
jcliang 2016/12/23 07:50:10 Done.
420 435
421 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 436 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
422 free_output_buffers_.push_back(i); 437 free_output_buffers_.push_back(i);
423 438
424 struct v4l2_buffer buffer; 439 struct v4l2_buffer buffer;
440 struct v4l2_plane planes[VIDEO_MAX_PLANES];
425 memset(&buffer, 0, sizeof(buffer)); 441 memset(&buffer, 0, sizeof(buffer));
442 memset(planes, 0, sizeof(planes));
426 buffer.index = i; 443 buffer.index = i;
427 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 444 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
428 buffer.memory = V4L2_MEMORY_MMAP; 445 buffer.memory = V4L2_MEMORY_MMAP;
446 buffer.m.planes = planes;
447 buffer.length = arraysize(planes);
429 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); 448 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
430 449
431 DCHECK_GE(buffer.length, 450 if (output_buffer_num_planes_ != buffer.length) {
432 VideoFrame::AllocationSize(
433 output_format,
434 gfx::Size(format.fmt.pix.width, format.fmt.pix.height)));
435
436 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
437 MAP_SHARED, buffer.m.offset);
438 if (address == MAP_FAILED) {
439 PLOG(ERROR) << __func__ << ": mmap() failed";
440 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
441 return false; 451 return false;
442 } 452 }
443 output_buffer_map_[i].address = address; 453 for (size_t j = 0; j < buffer.length; ++j) {
444 output_buffer_map_[i].length = buffer.length; 454 if (static_cast<int64_t>(planes[j].length) <
Pawel Osciak 2016/12/23 06:05:52 checked_cast<size_t> perhaps...
jcliang 2016/12/23 07:50:10 Using base::checked_cast<> is a good idea, but Vid
455 VideoFrame::PlaneSize(
456 output_format, j,
457 gfx::Size(format.fmt.pix_mp.width, format.fmt.pix_mp.height))
458 .GetArea()) {
459 return false;
460 }
461 void* address =
462 device_->Mmap(NULL, planes[j].length, PROT_READ | PROT_WRITE,
463 MAP_SHARED, planes[j].m.mem_offset);
464 if (address == MAP_FAILED) {
465 PLOG(ERROR) << __func__ << ": mmap() failed";
466 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
467 return false;
468 }
469 output_buffer_map_[i].address[j] = address;
470 output_buffer_map_[i].length[j] = planes[j].length;
471 }
445 } 472 }
446 473
447 return true; 474 return true;
448 } 475 }
449 476
450 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { 477 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() {
451 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 478 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
452 479
453 free_input_buffers_.clear(); 480 free_input_buffers_.clear();
454 481
455 if (input_buffer_map_.empty()) 482 if (input_buffer_map_.empty())
456 return; 483 return;
457 484
458 if (input_streamon_) { 485 if (input_streamon_) {
459 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 486 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
460 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); 487 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type);
461 input_streamon_ = false; 488 input_streamon_ = false;
462 } 489 }
463 490
464 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 491 for (const auto& input_record : input_buffer_map_) {
465 BufferRecord& input_record = input_buffer_map_[i]; 492 for (size_t i = 0; i < kMaxInputPlanes; ++i) {
466 device_->Munmap(input_record.address, input_record.length); 493 device_->Munmap(input_record.address[i], input_record.length[i]);
494 }
467 } 495 }
468 496
469 struct v4l2_requestbuffers reqbufs; 497 struct v4l2_requestbuffers reqbufs;
470 memset(&reqbufs, 0, sizeof(reqbufs)); 498 memset(&reqbufs, 0, sizeof(reqbufs));
471 reqbufs.count = 0; 499 reqbufs.count = 0;
472 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 500 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
473 reqbufs.memory = V4L2_MEMORY_MMAP; 501 reqbufs.memory = V4L2_MEMORY_MMAP;
474 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 502 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
475 503
476 input_buffer_map_.clear(); 504 input_buffer_map_.clear();
477 } 505 }
478 506
479 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { 507 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() {
480 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 508 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
481 509
482 free_output_buffers_.clear(); 510 free_output_buffers_.clear();
483 511
484 if (output_buffer_map_.empty()) 512 if (output_buffer_map_.empty())
485 return; 513 return;
486 514
487 if (output_streamon_) { 515 if (output_streamon_) {
488 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 516 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
489 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); 517 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type);
490 output_streamon_ = false; 518 output_streamon_ = false;
491 } 519 }
492 520
493 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 521 for (const auto& output_record : output_buffer_map_) {
494 BufferRecord& output_record = output_buffer_map_[i]; 522 for (size_t i = 0; i < output_buffer_num_planes_; ++i) {
495 device_->Munmap(output_record.address, output_record.length); 523 device_->Munmap(output_record.address[i], output_record.length[i]);
524 }
496 } 525 }
497 526
498 struct v4l2_requestbuffers reqbufs; 527 struct v4l2_requestbuffers reqbufs;
499 memset(&reqbufs, 0, sizeof(reqbufs)); 528 memset(&reqbufs, 0, sizeof(reqbufs));
500 reqbufs.count = 0; 529 reqbufs.count = 0;
501 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 530 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
502 reqbufs.memory = V4L2_MEMORY_MMAP; 531 reqbufs.memory = V4L2_MEMORY_MMAP;
503 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 532 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
504 533
505 output_buffer_map_.clear(); 534 output_buffer_map_.clear();
535 output_buffer_num_planes_ = 0;
506 } 536 }
507 537
508 void V4L2JpegDecodeAccelerator::DevicePollTask() { 538 void V4L2JpegDecodeAccelerator::DevicePollTask() {
509 DCHECK(device_poll_task_runner_->BelongsToCurrentThread()); 539 DCHECK(device_poll_task_runner_->BelongsToCurrentThread());
510 540
511 bool event_pending; 541 bool event_pending;
512 if (!device_->Poll(true, &event_pending)) { 542 if (!device_->Poll(true, &event_pending)) {
513 PLOG(ERROR) << __func__ << ": Poll device error."; 543 PLOG(ERROR) << __func__ << ": Poll device error.";
514 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 544 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
515 return; 545 return;
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
590 // If input buffers are required to re-create, do not enqueue input record 620 // If input buffers are required to re-create, do not enqueue input record
591 // until all pending frames are handled by device. 621 // until all pending frames are handled by device.
592 if (ShouldRecreateInputBuffers()) 622 if (ShouldRecreateInputBuffers())
593 break; 623 break;
594 if (!EnqueueInputRecord()) 624 if (!EnqueueInputRecord())
595 return; 625 return;
596 } 626 }
597 // Check here because we cannot STREAMON before QBUF in earlier kernel. 627 // Check here because we cannot STREAMON before QBUF in earlier kernel.
598 // (kernel version < 3.14) 628 // (kernel version < 3.14)
599 if (!input_streamon_ && InputBufferQueuedCount()) { 629 if (!input_streamon_ && InputBufferQueuedCount()) {
600 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 630 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
601 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 631 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
602 input_streamon_ = true; 632 input_streamon_ = true;
603 } 633 }
604 } 634 }
605 635
606 void V4L2JpegDecodeAccelerator::EnqueueOutput() { 636 void V4L2JpegDecodeAccelerator::EnqueueOutput() {
607 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 637 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
608 // Output record can be enqueued because the output coded sizes of the frames 638 // Output record can be enqueued because the output coded sizes of the frames
609 // currently in the pipeline are all the same. 639 // currently in the pipeline are all the same.
610 while (running_jobs_.size() > OutputBufferQueuedCount() && 640 while (running_jobs_.size() > OutputBufferQueuedCount() &&
611 !free_output_buffers_.empty()) { 641 !free_output_buffers_.empty()) {
612 if (!EnqueueOutputRecord()) 642 if (!EnqueueOutputRecord())
613 return; 643 return;
614 } 644 }
615 // Check here because we cannot STREAMON before QBUF in earlier kernel. 645 // Check here because we cannot STREAMON before QBUF in earlier kernel.
616 // (kernel version < 3.14) 646 // (kernel version < 3.14)
617 if (!output_streamon_ && OutputBufferQueuedCount()) { 647 if (!output_streamon_ && OutputBufferQueuedCount()) {
618 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 648 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
619 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 649 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
620 output_streamon_ = true; 650 output_streamon_ = true;
621 } 651 }
622 } 652 }
623 653
624 static bool CopyOutputImage(const uint32_t src_pixelformat, 654 bool V4L2JpegDecodeAccelerator::ConvertOutputImage(
625 const void* src_addr, 655 const BufferRecord& output_buffer,
626 const gfx::Size& src_coded_size, 656 const scoped_refptr<VideoFrame>& dst_frame) {
627 const scoped_refptr<VideoFrame>& dst_frame) {
628 VideoPixelFormat format =
629 V4L2Device::V4L2PixFmtToVideoPixelFormat(src_pixelformat);
630 size_t src_size = VideoFrame::AllocationSize(format, src_coded_size);
631 uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane); 657 uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane);
632 uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane); 658 uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane);
633 uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane); 659 uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane);
634 size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane); 660 size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane);
635 size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane); 661 size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane);
636 size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane); 662 size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane);
637 663
638 // If the source format is I420, ConvertToI420 will simply copy the frame. 664 if (output_buffer_num_planes_ == 1) {
639 if (libyuv::ConvertToI420(static_cast<uint8_t*>(const_cast<void*>(src_addr)), 665 // Use ConvertToI420 to convert all splane buffers.
640 src_size, 666 // If the source format is I420, ConvertToI420 will simply copy the frame.
641 dst_y, dst_y_stride, 667 VideoPixelFormat format =
642 dst_u, dst_u_stride, 668 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_);
643 dst_v, dst_v_stride, 669 size_t src_size =
644 0, 0, 670 VideoFrame::AllocationSize(format, output_buffer_coded_size_);
645 src_coded_size.width(), 671 if (libyuv::ConvertToI420(
646 src_coded_size.height(), 672 static_cast<uint8_t*>(output_buffer.address[0]), src_size, dst_y,
647 dst_frame->coded_size().width(), 673 dst_y_stride, dst_u, dst_u_stride, dst_v, dst_v_stride, 0, 0,
648 dst_frame->coded_size().height(), 674 output_buffer_coded_size_.width(),
649 libyuv::kRotate0, 675 output_buffer_coded_size_.height(), dst_frame->coded_size().width(),
650 src_pixelformat)) { 676 dst_frame->coded_size().height(), libyuv::kRotate0,
651 LOG(ERROR) << "ConvertToI420 failed. Source format: " << src_pixelformat; 677 output_buffer_pixelformat_)) {
678 LOG(ERROR) << "ConvertToI420 failed. Source format: "
679 << output_buffer_pixelformat_;
680 return false;
681 }
682 } else if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M ||
683 output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M) {
684 uint8_t* src_y = static_cast<uint8_t*>(output_buffer.address[0]);
685 uint8_t* src_u = static_cast<uint8_t*>(output_buffer.address[1]);
686 uint8_t* src_v = static_cast<uint8_t*>(output_buffer.address[2]);
687 size_t src_y_stride = output_buffer_coded_size_.width();
688 size_t src_u_stride = output_buffer_coded_size_.width() / 2;
689 size_t src_v_stride = output_buffer_coded_size_.width() / 2;
690 if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M) {
691 if (libyuv::I420Copy(src_y, src_y_stride, src_u, src_u_stride, src_v,
692 src_v_stride, dst_y, dst_y_stride, dst_u,
693 dst_u_stride, dst_v, dst_v_stride,
694 output_buffer_coded_size_.width(),
695 output_buffer_coded_size_.height())) {
696 LOG(ERROR) << "I420Copy failed";
697 return false;
698 }
699 } else { // output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M
700 if (libyuv::I422ToI420(src_y, src_y_stride, src_u, src_u_stride, src_v,
701 src_v_stride, dst_y, dst_y_stride, dst_u,
702 dst_u_stride, dst_v, dst_v_stride,
703 output_buffer_coded_size_.width(),
704 output_buffer_coded_size_.height())) {
705 LOG(ERROR) << "I422ToI420 failed";
706 return false;
707 }
708 }
709 } else {
710 LOG(ERROR) << "Unsupported source buffer format: "
711 << output_buffer_pixelformat_;
652 return false; 712 return false;
653 } 713 }
654 return true; 714 return true;
655 } 715 }
656 716
657 void V4L2JpegDecodeAccelerator::Dequeue() { 717 void V4L2JpegDecodeAccelerator::Dequeue() {
658 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 718 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
719 DCHECK_GE(output_buffer_num_planes_, 0u);
Pawel Osciak 2016/12/23 06:05:52 Should this be GT?
jcliang 2016/12/23 07:50:10 Hmm, this check should be removed. The first call
659 720
660 // Dequeue completed input (VIDEO_OUTPUT) buffers, 721 // Dequeue completed input (VIDEO_OUTPUT) buffers,
661 // and recycle to the free list. 722 // and recycle to the free list.
662 struct v4l2_buffer dqbuf; 723 struct v4l2_buffer dqbuf;
724 struct v4l2_plane planes[VIDEO_MAX_PLANES];
663 while (InputBufferQueuedCount() > 0) { 725 while (InputBufferQueuedCount() > 0) {
664 DCHECK(input_streamon_); 726 DCHECK(input_streamon_);
665 memset(&dqbuf, 0, sizeof(dqbuf)); 727 memset(&dqbuf, 0, sizeof(dqbuf));
666 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 728 memset(planes, 0, sizeof(planes));
729 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
667 dqbuf.memory = V4L2_MEMORY_MMAP; 730 dqbuf.memory = V4L2_MEMORY_MMAP;
731 dqbuf.length = arraysize(planes);
732 dqbuf.m.planes = planes;
668 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { 733 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
669 if (errno == EAGAIN) { 734 if (errno == EAGAIN) {
670 // EAGAIN if we're just out of buffers to dequeue. 735 // EAGAIN if we're just out of buffers to dequeue.
671 break; 736 break;
672 } 737 }
673 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed."; 738 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed.";
674 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 739 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
675 return; 740 return;
676 } 741 }
677 BufferRecord& input_record = input_buffer_map_[dqbuf.index]; 742 BufferRecord& input_record = input_buffer_map_[dqbuf.index];
678 DCHECK(input_record.at_device); 743 DCHECK(input_record.at_device);
679 input_record.at_device = false; 744 input_record.at_device = false;
680 free_input_buffers_.push_back(dqbuf.index); 745 free_input_buffers_.push_back(dqbuf.index);
681 746
682 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { 747 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) {
683 DVLOG(1) << "Dequeue input buffer error."; 748 DVLOG(1) << "Dequeue input buffer error.";
684 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); 749 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG);
685 running_jobs_.pop(); 750 running_jobs_.pop();
686 } 751 }
687 } 752 }
688 753
689 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. 754 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list.
690 // Return the finished buffer to the client via the job ready callback. 755 // Return the finished buffer to the client via the job ready callback.
691 // If dequeued input buffer has an error, the error frame has removed from 756 // If dequeued input buffer has an error, the error frame has removed from
692 // |running_jobs_|. We only have to dequeue output buffer when we actually 757 // |running_jobs_|. We only have to dequeue output buffer when we actually
693 // have pending frames in |running_jobs_| and also enqueued output buffers. 758 // have pending frames in |running_jobs_| and also enqueued output buffers.
694 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) { 759 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) {
695 DCHECK(output_streamon_); 760 DCHECK(output_streamon_);
696 memset(&dqbuf, 0, sizeof(dqbuf)); 761 memset(&dqbuf, 0, sizeof(dqbuf));
697 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 762 memset(planes, 0, sizeof(planes));
763 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
698 // From experiments, using MMAP and memory copy is still faster than 764 // From experiments, using MMAP and memory copy is still faster than
699 // USERPTR. Also, client doesn't need to consider the buffer alignment and 765 // USERPTR. Also, client doesn't need to consider the buffer alignment and
700 // JpegDecodeAccelerator API will be simpler. 766 // JpegDecodeAccelerator API will be simpler.
701 dqbuf.memory = V4L2_MEMORY_MMAP; 767 dqbuf.memory = V4L2_MEMORY_MMAP;
768 dqbuf.length = arraysize(planes);
769 dqbuf.m.planes = planes;
702 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { 770 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
703 if (errno == EAGAIN) { 771 if (errno == EAGAIN) {
704 // EAGAIN if we're just out of buffers to dequeue. 772 // EAGAIN if we're just out of buffers to dequeue.
705 break; 773 break;
706 } 774 }
707 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed."; 775 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed.";
708 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 776 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
709 return; 777 return;
710 } 778 }
711 BufferRecord& output_record = output_buffer_map_[dqbuf.index]; 779 BufferRecord& output_record = output_buffer_map_[dqbuf.index];
712 DCHECK(output_record.at_device); 780 DCHECK(output_record.at_device);
713 output_record.at_device = false; 781 output_record.at_device = false;
714 free_output_buffers_.push_back(dqbuf.index); 782 free_output_buffers_.push_back(dqbuf.index);
715 783
716 // Jobs are always processed in FIFO order. 784 // Jobs are always processed in FIFO order.
717 linked_ptr<JobRecord> job_record = running_jobs_.front(); 785 linked_ptr<JobRecord> job_record = running_jobs_.front();
718 running_jobs_.pop(); 786 running_jobs_.pop();
719 787
720 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { 788 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) {
721 DVLOG(1) << "Dequeue output buffer error."; 789 DVLOG(1) << "Dequeue output buffer error.";
722 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); 790 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG);
723 } else { 791 } else {
724 // Copy the decoded data from output buffer to the buffer provided by the 792 // Copy the decoded data from output buffer to the buffer provided by the
725 // client. Do format conversion when output format is not 793 // client. Do format conversion when output format is not
726 // V4L2_PIX_FMT_YUV420. 794 // V4L2_PIX_FMT_YUV420.
727 if (!CopyOutputImage(output_buffer_pixelformat_, output_record.address, 795 if (!ConvertOutputImage(output_record, job_record->out_frame)) {
728 output_buffer_coded_size_, job_record->out_frame)) {
729 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE); 796 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE);
730 return; 797 return;
731 } 798 }
732
733 DVLOG(3) << "Decoding finished, returning bitstream buffer, id=" 799 DVLOG(3) << "Decoding finished, returning bitstream buffer, id="
734 << job_record->bitstream_buffer_id; 800 << job_record->bitstream_buffer_id;
735 801
736 child_task_runner_->PostTask( 802 child_task_runner_->PostTask(
737 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady, 803 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady,
738 weak_ptr_, job_record->bitstream_buffer_id)); 804 weak_ptr_, job_record->bitstream_buffer_id));
739 } 805 }
740 } 806 }
741 } 807 }
742 808
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
827 893
828 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. 894 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame.
829 linked_ptr<JobRecord> job_record = input_jobs_.front(); 895 linked_ptr<JobRecord> job_record = input_jobs_.front();
830 input_jobs_.pop(); 896 input_jobs_.pop();
831 const int index = free_input_buffers_.back(); 897 const int index = free_input_buffers_.back();
832 BufferRecord& input_record = input_buffer_map_[index]; 898 BufferRecord& input_record = input_buffer_map_[index];
833 DCHECK(!input_record.at_device); 899 DCHECK(!input_record.at_device);
834 900
835 // It will add default huffman segment if it's missing. 901 // It will add default huffman segment if it's missing.
836 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(), 902 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(),
837 input_record.address, input_record.length)) { 903 input_record.address[0], input_record.length[0])) {
838 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED); 904 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED);
839 return false; 905 return false;
840 } 906 }
841 907
842 struct v4l2_buffer qbuf; 908 struct v4l2_buffer qbuf;
909 struct v4l2_plane planes[VIDEO_MAX_PLANES];
843 memset(&qbuf, 0, sizeof(qbuf)); 910 memset(&qbuf, 0, sizeof(qbuf));
911 memset(planes, 0, sizeof(planes));
844 qbuf.index = index; 912 qbuf.index = index;
845 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 913 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
846 qbuf.memory = V4L2_MEMORY_MMAP; 914 qbuf.memory = V4L2_MEMORY_MMAP;
915 qbuf.length = arraysize(planes);
916 // There is only one plane for V4L2_PIX_FMT_JPEG.
917 planes[0].bytesused = input_record.length[0];
918 qbuf.m.planes = planes;
847 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 919 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
848 input_record.at_device = true; 920 input_record.at_device = true;
849 running_jobs_.push(job_record); 921 running_jobs_.push(job_record);
850 free_input_buffers_.pop_back(); 922 free_input_buffers_.pop_back();
851 923
852 DVLOG(3) << __func__ 924 DVLOG(3) << __func__
853 << ": enqueued frame id=" << job_record->bitstream_buffer_id 925 << ": enqueued frame id=" << job_record->bitstream_buffer_id
854 << " to device."; 926 << " to device.";
855 return true; 927 return true;
856 } 928 }
857 929
858 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { 930 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() {
859 DCHECK(!free_output_buffers_.empty()); 931 DCHECK(!free_output_buffers_.empty());
932 DCHECK_GE(output_buffer_num_planes_, 0u);
Pawel Osciak 2016/12/23 06:05:52 GT?
jcliang 2016/12/23 07:50:10 Done.
860 933
861 // Enqueue an output (VIDEO_CAPTURE) buffer. 934 // Enqueue an output (VIDEO_CAPTURE) buffer.
862 const int index = free_output_buffers_.back(); 935 const int index = free_output_buffers_.back();
863 BufferRecord& output_record = output_buffer_map_[index]; 936 BufferRecord& output_record = output_buffer_map_[index];
864 DCHECK(!output_record.at_device); 937 DCHECK(!output_record.at_device);
865 struct v4l2_buffer qbuf; 938 struct v4l2_buffer qbuf;
939 struct v4l2_plane planes[VIDEO_MAX_PLANES];
866 memset(&qbuf, 0, sizeof(qbuf)); 940 memset(&qbuf, 0, sizeof(qbuf));
941 memset(planes, 0, sizeof(planes));
867 qbuf.index = index; 942 qbuf.index = index;
868 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 943 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
869 qbuf.memory = V4L2_MEMORY_MMAP; 944 qbuf.memory = V4L2_MEMORY_MMAP;
945 qbuf.length = arraysize(planes);
946 qbuf.m.planes = planes;
870 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 947 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
871 output_record.at_device = true; 948 output_record.at_device = true;
872 free_output_buffers_.pop_back(); 949 free_output_buffers_.pop_back();
873 return true; 950 return true;
874 } 951 }
875 952
876 void V4L2JpegDecodeAccelerator::StartDevicePoll() { 953 void V4L2JpegDecodeAccelerator::StartDevicePoll() {
877 DVLOG(3) << __func__ << ": starting device poll"; 954 DVLOG(3) << __func__ << ": starting device poll";
878 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 955 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
879 DCHECK(!device_poll_thread_.IsRunning()); 956 DCHECK(!device_poll_thread_.IsRunning());
(...skipping 18 matching lines...) Expand all
898 device_poll_thread_.Stop(); 975 device_poll_thread_.Stop();
899 976
900 // Clear the interrupt now, to be sure. 977 // Clear the interrupt now, to be sure.
901 if (!device_->ClearDevicePollInterrupt()) 978 if (!device_->ClearDevicePollInterrupt())
902 return false; 979 return false;
903 980
904 return true; 981 return true;
905 } 982 }
906 983
907 } // namespace media 984 } // namespace media
OLDNEW
« media/gpu/v4l2_jpeg_decode_accelerator.h ('K') | « media/gpu/v4l2_jpeg_decode_accelerator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698