Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(158)

Side by Side Diff: media/gpu/v4l2_jpeg_decode_accelerator.cc

Issue 2559423002: media/gpu: switch v4l2_jpeg_decode_accelerator to use multi-planar APIs (Closed)
Patch Set: remove PIXEL_FORMAT_I422 Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/gpu/v4l2_jpeg_decode_accelerator.h" 5 #include "media/gpu/v4l2_jpeg_decode_accelerator.h"
6 6
7 #include <errno.h> 7 #include <errno.h>
8 #include <linux/videodev2.h> 8 #include <linux/videodev2.h>
9 #include <string.h> 9 #include <string.h>
10 #include <sys/mman.h> 10 #include <sys/mman.h>
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
101 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, 101 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66,
102 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 102 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A,
103 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, 103 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94,
104 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 104 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7,
105 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 105 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA,
106 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 106 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4,
107 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, 107 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7,
108 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA}; 108 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA};
109 109
110 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord() 110 V4L2JpegDecodeAccelerator::BufferRecord::BufferRecord()
111 : address(nullptr), length(0), at_device(false) {} 111 : num_planes(0), at_device(false) {
112 memset(address, 0, sizeof(address));
113 memset(length, 0, sizeof(length));
114 }
112 115
113 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() {} 116 V4L2JpegDecodeAccelerator::BufferRecord::~BufferRecord() {}
114 117
115 V4L2JpegDecodeAccelerator::JobRecord::JobRecord( 118 V4L2JpegDecodeAccelerator::JobRecord::JobRecord(
116 const BitstreamBuffer& bitstream_buffer, 119 const BitstreamBuffer& bitstream_buffer,
117 scoped_refptr<VideoFrame> video_frame) 120 scoped_refptr<VideoFrame> video_frame)
118 : bitstream_buffer_id(bitstream_buffer.id()), 121 : bitstream_buffer_id(bitstream_buffer.id()),
119 shm(bitstream_buffer, true), 122 shm(bitstream_buffer, true),
120 out_frame(video_frame) {} 123 out_frame(video_frame) {}
121 124
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after
187 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { 190 bool V4L2JpegDecodeAccelerator::Initialize(Client* client) {
188 DCHECK(child_task_runner_->BelongsToCurrentThread()); 191 DCHECK(child_task_runner_->BelongsToCurrentThread());
189 192
190 if (!device_->Open(V4L2Device::Type::kJpegDecoder, V4L2_PIX_FMT_JPEG)) { 193 if (!device_->Open(V4L2Device::Type::kJpegDecoder, V4L2_PIX_FMT_JPEG)) {
191 LOG(ERROR) << "Failed to open device"; 194 LOG(ERROR) << "Failed to open device";
192 return false; 195 return false;
193 } 196 }
194 197
195 // Capabilities check. 198 // Capabilities check.
196 struct v4l2_capability caps; 199 struct v4l2_capability caps;
197 const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M; 200 const __u32 kCapsRequiredMplane =
201 V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M_MPLANE;
202 const __u32 kCapsRequiredSplane = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M;
198 memset(&caps, 0, sizeof(caps)); 203 memset(&caps, 0, sizeof(caps));
199 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) { 204 if (device_->Ioctl(VIDIOC_QUERYCAP, &caps) != 0) {
200 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP"; 205 PLOG(ERROR) << __func__ << ": ioctl() failed: VIDIOC_QUERYCAP";
201 return false; 206 return false;
202 } 207 }
203 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 208 if ((caps.capabilities & kCapsRequiredMplane) == kCapsRequiredMplane) {
209 input_buf_type_ = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
210 output_buf_type_ = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
211 } else if ((caps.capabilities & kCapsRequiredSplane) == kCapsRequiredSplane) {
212 input_buf_type_ = V4L2_BUF_TYPE_VIDEO_OUTPUT;
213 output_buf_type_ = V4L2_BUF_TYPE_VIDEO_CAPTURE;
214 } else {
204 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x" 215 LOG(ERROR) << __func__ << ": VIDIOC_QUERYCAP, caps check failed: 0x"
205 << std::hex << caps.capabilities; 216 << std::hex << caps.capabilities;
206 return false; 217 return false;
207 } 218 }
208 219
209 // Subscribe to the source change event. 220 // Subscribe to the source change event.
210 struct v4l2_event_subscription sub; 221 struct v4l2_event_subscription sub;
211 memset(&sub, 0, sizeof(sub)); 222 memset(&sub, 0, sizeof(sub));
212 sub.type = V4L2_EVENT_SOURCE_CHANGE; 223 sub.type = V4L2_EVENT_SOURCE_CHANGE;
213 if (device_->Ioctl(VIDIOC_SUBSCRIBE_EVENT, &sub) != 0) { 224 if (device_->Ioctl(VIDIOC_SUBSCRIBE_EVENT, &sub) != 0) {
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
290 301
291 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() { 302 bool V4L2JpegDecodeAccelerator::ShouldRecreateInputBuffers() {
292 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 303 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
293 if (input_jobs_.empty()) 304 if (input_jobs_.empty())
294 return false; 305 return false;
295 306
296 linked_ptr<JobRecord> job_record = input_jobs_.front(); 307 linked_ptr<JobRecord> job_record = input_jobs_.front();
297 // Check input buffer size is enough 308 // Check input buffer size is enough
298 return (input_buffer_map_.empty() || 309 return (input_buffer_map_.empty() ||
299 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) > 310 (job_record->shm.size() + sizeof(kDefaultDhtSeg)) >
300 input_buffer_map_.front().length); 311 input_buffer_map_.front().length[0]);
301 } 312 }
302 313
303 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() { 314 bool V4L2JpegDecodeAccelerator::RecreateInputBuffers() {
304 DVLOG(3) << __func__; 315 DVLOG(3) << __func__;
305 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 316 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
306 317
307 // If running queue is not empty, we should wait until pending frames finish. 318 // If running queue is not empty, we should wait until pending frames finish.
308 if (!running_jobs_.empty()) 319 if (!running_jobs_.empty())
309 return true; 320 return true;
310 321
(...skipping 26 matching lines...) Expand all
337 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 348 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
338 DCHECK(!input_streamon_); 349 DCHECK(!input_streamon_);
339 DCHECK(!input_jobs_.empty()); 350 DCHECK(!input_jobs_.empty());
340 linked_ptr<JobRecord> job_record = input_jobs_.front(); 351 linked_ptr<JobRecord> job_record = input_jobs_.front();
341 // The input image may miss huffman table. We didn't parse the image before, 352 // The input image may miss huffman table. We didn't parse the image before,
342 // so we create more to avoid the situation of not enough memory. 353 // so we create more to avoid the situation of not enough memory.
343 // Reserve twice size to avoid recreating input buffer frequently. 354 // Reserve twice size to avoid recreating input buffer frequently.
344 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2; 355 size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2;
345 struct v4l2_format format; 356 struct v4l2_format format;
346 memset(&format, 0, sizeof(format)); 357 memset(&format, 0, sizeof(format));
347 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 358 format.type = input_buf_type_;
348 format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; 359 if (V4L2_TYPE_IS_MULTIPLANAR(input_buf_type_)) {
349 format.fmt.pix.sizeimage = reserve_size; 360 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_JPEG;
350 format.fmt.pix.field = V4L2_FIELD_ANY; 361 format.fmt.pix_mp.plane_fmt[0].sizeimage = reserve_size;
362 format.fmt.pix_mp.field = V4L2_FIELD_ANY;
363 } else {
364 format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG;
365 format.fmt.pix.sizeimage = reserve_size;
366 format.fmt.pix.field = V4L2_FIELD_ANY;
367 }
351 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 368 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
352 369
353 struct v4l2_requestbuffers reqbufs; 370 struct v4l2_requestbuffers reqbufs;
354 memset(&reqbufs, 0, sizeof(reqbufs)); 371 memset(&reqbufs, 0, sizeof(reqbufs));
355 reqbufs.count = kBufferCount; 372 reqbufs.count = kBufferCount;
356 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 373 reqbufs.type = input_buf_type_;
357 reqbufs.memory = V4L2_MEMORY_MMAP; 374 reqbufs.memory = V4L2_MEMORY_MMAP;
358 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); 375 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
359 376
360 DCHECK(input_buffer_map_.empty()); 377 DCHECK(input_buffer_map_.empty());
361 input_buffer_map_.resize(reqbufs.count); 378 input_buffer_map_.resize(reqbufs.count);
362 379
363 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 380 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
364 free_input_buffers_.push_back(i); 381 free_input_buffers_.push_back(i);
365 382
366 struct v4l2_buffer buffer; 383 struct v4l2_buffer buffer;
384 struct v4l2_plane plane;
367 memset(&buffer, 0, sizeof(buffer)); 385 memset(&buffer, 0, sizeof(buffer));
386 memset(&plane, 0, sizeof(plane));
368 buffer.index = i; 387 buffer.index = i;
369 buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 388 buffer.type = input_buf_type_;
389 if (V4L2_TYPE_IS_MULTIPLANAR(input_buf_type_)) {
390 buffer.m.planes = &plane;
391 buffer.length = kInputPlanes;
392 }
370 buffer.memory = V4L2_MEMORY_MMAP; 393 buffer.memory = V4L2_MEMORY_MMAP;
371 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); 394 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
372 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, 395 input_buffer_map_[i].num_planes = kInputPlanes;
373 MAP_SHARED, buffer.m.offset); 396 uint32_t length, offset;
397 if (V4L2_TYPE_IS_MULTIPLANAR(input_buf_type_)) {
398 length = plane.length;
399 offset = plane.m.mem_offset;
400 } else {
401 length = buffer.length;
402 offset = buffer.m.offset;
403 }
404 void* address =
405 device_->Mmap(NULL, length, PROT_READ | PROT_WRITE, MAP_SHARED, offset);
374 if (address == MAP_FAILED) { 406 if (address == MAP_FAILED) {
375 PLOG(ERROR) << __func__ << ": mmap() failed"; 407 PLOG(ERROR) << __func__ << ": mmap() failed";
376 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 408 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
377 return false; 409 return false;
378 } 410 }
379 input_buffer_map_[i].address = address; 411 input_buffer_map_[i].address[0] = address;
380 input_buffer_map_[i].length = buffer.length; 412 input_buffer_map_[i].length[0] = length;
381 } 413 }
382 414
383 return true; 415 return true;
384 } 416 }
385 417
386 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { 418 bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() {
387 DVLOG(3) << __func__; 419 DVLOG(3) << __func__;
388 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 420 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
389 DCHECK(!output_streamon_); 421 DCHECK(!output_streamon_);
390 DCHECK(!running_jobs_.empty()); 422 DCHECK(!running_jobs_.empty());
391 linked_ptr<JobRecord> job_record = running_jobs_.front(); 423 linked_ptr<JobRecord> job_record = running_jobs_.front();
392 424
393 size_t frame_size = VideoFrame::AllocationSize( 425 size_t frame_size = VideoFrame::AllocationSize(
394 PIXEL_FORMAT_I420, job_record->out_frame->coded_size()); 426 PIXEL_FORMAT_I420, job_record->out_frame->coded_size());
395 struct v4l2_format format; 427 struct v4l2_format format;
396 memset(&format, 0, sizeof(format)); 428 memset(&format, 0, sizeof(format));
397 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 429 format.type = output_buf_type_;
398 format.fmt.pix.width = job_record->out_frame->coded_size().width(); 430 if (V4L2_TYPE_IS_MULTIPLANAR(output_buf_type_)) {
399 format.fmt.pix.height = job_record->out_frame->coded_size().height(); 431 format.fmt.pix_mp.width = job_record->out_frame->coded_size().width();
400 format.fmt.pix.sizeimage = frame_size; 432 format.fmt.pix_mp.height = job_record->out_frame->coded_size().height();
401 format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; 433 format.fmt.pix_mp.num_planes = 1;
henryhsu 2016/12/15 09:56:18 s/1/kInputPlanes/
jcliang 2016/12/15 14:55:47 Technically the 1 here means differently from kInp
402 format.fmt.pix.field = V4L2_FIELD_ANY; 434 format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420;
435 format.fmt.pix_mp.plane_fmt[0].sizeimage = frame_size;
436 format.fmt.pix_mp.field = V4L2_FIELD_ANY;
437 } else {
438 format.fmt.pix.width = job_record->out_frame->coded_size().width();
439 format.fmt.pix.height = job_record->out_frame->coded_size().height();
440 format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
441 format.fmt.pix.sizeimage = frame_size;
442 format.fmt.pix.field = V4L2_FIELD_ANY;
443 }
403 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 444 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
404 output_buffer_pixelformat_ = format.fmt.pix.pixelformat; 445 if (V4L2_TYPE_IS_MULTIPLANAR(output_buf_type_)) {
405 output_buffer_coded_size_.SetSize(format.fmt.pix.width, 446 output_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
406 format.fmt.pix.height); 447 output_buffer_coded_size_.SetSize(format.fmt.pix_mp.width,
448 format.fmt.pix_mp.height);
449 } else {
450 output_buffer_pixelformat_ = format.fmt.pix.pixelformat;
451 output_buffer_coded_size_.SetSize(format.fmt.pix.width,
452 format.fmt.pix.height);
453 }
407 454
408 struct v4l2_requestbuffers reqbufs; 455 struct v4l2_requestbuffers reqbufs;
409 memset(&reqbufs, 0, sizeof(reqbufs)); 456 memset(&reqbufs, 0, sizeof(reqbufs));
410 reqbufs.count = kBufferCount; 457 reqbufs.count = kBufferCount;
411 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 458 reqbufs.type = output_buf_type_;
412 reqbufs.memory = V4L2_MEMORY_MMAP; 459 reqbufs.memory = V4L2_MEMORY_MMAP;
413 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); 460 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
414 461
415 DCHECK(output_buffer_map_.empty()); 462 DCHECK(output_buffer_map_.empty());
416 output_buffer_map_.resize(reqbufs.count); 463 output_buffer_map_.resize(reqbufs.count);
417 464
418 VideoPixelFormat output_format = 465 VideoPixelFormat output_format =
419 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_); 466 V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_);
420 467
421 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 468 for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
422 free_output_buffers_.push_back(i); 469 free_output_buffers_.push_back(i);
423 470
424 struct v4l2_buffer buffer; 471 struct v4l2_buffer buffer;
472 struct v4l2_plane planes[kOutputPlanes];
425 memset(&buffer, 0, sizeof(buffer)); 473 memset(&buffer, 0, sizeof(buffer));
474 memset(planes, 0, sizeof(planes));
426 buffer.index = i; 475 buffer.index = i;
427 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 476 buffer.type = output_buf_type_;
428 buffer.memory = V4L2_MEMORY_MMAP; 477 buffer.memory = V4L2_MEMORY_MMAP;
478 if (V4L2_TYPE_IS_MULTIPLANAR(output_buf_type_)) {
479 buffer.m.planes = planes;
480 buffer.length = kOutputPlanes;
481 }
429 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); 482 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
430 483
431 DCHECK_GE(buffer.length, 484 if (V4L2_TYPE_IS_MULTIPLANAR(output_buf_type_)) {
432 VideoFrame::AllocationSize( 485 uint32_t total_length = 0;
433 output_format, 486 for (uint32_t i = 0; i < kOutputPlanes; ++i) {
434 gfx::Size(format.fmt.pix.width, format.fmt.pix.height))); 487 total_length += planes[i].length;
488 }
489 DCHECK_GE(total_length,
490 VideoFrame::AllocationSize(
491 output_format,
492 gfx::Size(format.fmt.pix.width, format.fmt.pix.height)));
henryhsu 2016/12/15 09:56:17 format.fmt.pix_mp.width and format.fmt.pix_mp.heig
jcliang 2016/12/15 14:55:47 Done.
493 } else {
494 DCHECK_GE(buffer.length,
495 VideoFrame::AllocationSize(
496 output_format,
497 gfx::Size(format.fmt.pix.width, format.fmt.pix.height)));
498 }
435 499
436 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, 500 if (V4L2_TYPE_IS_MULTIPLANAR(output_buf_type_)) {
437 MAP_SHARED, buffer.m.offset); 501 output_buffer_map_[i].num_planes = buffer.length;
438 if (address == MAP_FAILED) { 502 for (size_t j = 0; j < buffer.length; ++j) {
439 PLOG(ERROR) << __func__ << ": mmap() failed"; 503 void* address =
440 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 504 device_->Mmap(NULL, planes[j].length, PROT_READ | PROT_WRITE,
441 return false; 505 MAP_SHARED, planes[j].m.mem_offset);
506 if (address == MAP_FAILED) {
507 PLOG(ERROR) << __func__ << ": mmap() failed";
508 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
509 return false;
510 }
511 output_buffer_map_[i].address[j] = address;
512 output_buffer_map_[i].length[j] = planes[j].length;
513 }
514 } else {
515 output_buffer_map_[i].num_planes = 1;
516 void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
517 MAP_SHARED, buffer.m.offset);
518 if (address == MAP_FAILED) {
519 PLOG(ERROR) << __func__ << ": mmap() failed";
520 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
521 return false;
522 }
523 output_buffer_map_[i].address[0] = address;
524 output_buffer_map_[i].length[0] = buffer.length;
442 } 525 }
443 output_buffer_map_[i].address = address;
444 output_buffer_map_[i].length = buffer.length;
445 } 526 }
446 527
447 return true; 528 return true;
448 } 529 }
449 530
450 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { 531 void V4L2JpegDecodeAccelerator::DestroyInputBuffers() {
451 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 532 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
452 533
453 free_input_buffers_.clear(); 534 free_input_buffers_.clear();
454 535
455 if (input_buffer_map_.empty()) 536 if (input_buffer_map_.empty())
456 return; 537 return;
457 538
458 if (input_streamon_) { 539 if (input_streamon_) {
459 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 540 __u32 type = input_buf_type_;
460 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); 541 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type);
461 input_streamon_ = false; 542 input_streamon_ = false;
462 } 543 }
463 544
464 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 545 for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
465 BufferRecord& input_record = input_buffer_map_[i]; 546 BufferRecord& input_record = input_buffer_map_[i];
466 device_->Munmap(input_record.address, input_record.length); 547 device_->Munmap(input_record.address[0], input_record.length[0]);
467 } 548 }
468 549
469 struct v4l2_requestbuffers reqbufs; 550 struct v4l2_requestbuffers reqbufs;
470 memset(&reqbufs, 0, sizeof(reqbufs)); 551 memset(&reqbufs, 0, sizeof(reqbufs));
471 reqbufs.count = 0; 552 reqbufs.count = 0;
472 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 553 reqbufs.type = input_buf_type_;
473 reqbufs.memory = V4L2_MEMORY_MMAP; 554 reqbufs.memory = V4L2_MEMORY_MMAP;
474 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 555 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
475 556
476 input_buffer_map_.clear(); 557 input_buffer_map_.clear();
477 } 558 }
478 559
479 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { 560 void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() {
480 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 561 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
481 562
482 free_output_buffers_.clear(); 563 free_output_buffers_.clear();
483 564
484 if (output_buffer_map_.empty()) 565 if (output_buffer_map_.empty())
485 return; 566 return;
486 567
487 if (output_streamon_) { 568 if (output_streamon_) {
488 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 569 __u32 type = output_buf_type_;
489 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type); 570 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMOFF, &type);
490 output_streamon_ = false; 571 output_streamon_ = false;
491 } 572 }
492 573
493 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 574 for (const auto& output_record : output_buffer_map_) {
494 BufferRecord& output_record = output_buffer_map_[i]; 575 for (size_t i = 0; i < output_record.num_planes; ++i) {
495 device_->Munmap(output_record.address, output_record.length); 576 device_->Munmap(output_record.address[i], output_record.length[i]);
577 }
496 } 578 }
497 579
498 struct v4l2_requestbuffers reqbufs; 580 struct v4l2_requestbuffers reqbufs;
499 memset(&reqbufs, 0, sizeof(reqbufs)); 581 memset(&reqbufs, 0, sizeof(reqbufs));
500 reqbufs.count = 0; 582 reqbufs.count = 0;
501 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 583 reqbufs.type = output_buf_type_;
502 reqbufs.memory = V4L2_MEMORY_MMAP; 584 reqbufs.memory = V4L2_MEMORY_MMAP;
503 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 585 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
504 586
505 output_buffer_map_.clear(); 587 output_buffer_map_.clear();
506 } 588 }
507 589
508 void V4L2JpegDecodeAccelerator::DevicePollTask() { 590 void V4L2JpegDecodeAccelerator::DevicePollTask() {
509 DCHECK(device_poll_task_runner_->BelongsToCurrentThread()); 591 DCHECK(device_poll_task_runner_->BelongsToCurrentThread());
510 592
511 bool event_pending; 593 bool event_pending;
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
590 // If input buffers are required to re-create, do not enqueue input record 672 // If input buffers are required to re-create, do not enqueue input record
591 // until all pending frames are handled by device. 673 // until all pending frames are handled by device.
592 if (ShouldRecreateInputBuffers()) 674 if (ShouldRecreateInputBuffers())
593 break; 675 break;
594 if (!EnqueueInputRecord()) 676 if (!EnqueueInputRecord())
595 return; 677 return;
596 } 678 }
597 // Check here because we cannot STREAMON before QBUF in earlier kernel. 679 // Check here because we cannot STREAMON before QBUF in earlier kernel.
598 // (kernel version < 3.14) 680 // (kernel version < 3.14)
599 if (!input_streamon_ && InputBufferQueuedCount()) { 681 if (!input_streamon_ && InputBufferQueuedCount()) {
600 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 682 __u32 type = input_buf_type_;
601 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 683 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
602 input_streamon_ = true; 684 input_streamon_ = true;
603 } 685 }
604 } 686 }
605 687
606 void V4L2JpegDecodeAccelerator::EnqueueOutput() { 688 void V4L2JpegDecodeAccelerator::EnqueueOutput() {
607 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 689 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
608 // Output record can be enqueued because the output coded sizes of the frames 690 // Output record can be enqueued because the output coded sizes of the frames
609 // currently in the pipeline are all the same. 691 // currently in the pipeline are all the same.
610 while (running_jobs_.size() > OutputBufferQueuedCount() && 692 while (running_jobs_.size() > OutputBufferQueuedCount() &&
611 !free_output_buffers_.empty()) { 693 !free_output_buffers_.empty()) {
612 if (!EnqueueOutputRecord()) 694 if (!EnqueueOutputRecord())
613 return; 695 return;
614 } 696 }
615 // Check here because we cannot STREAMON before QBUF in earlier kernel. 697 // Check here because we cannot STREAMON before QBUF in earlier kernel.
616 // (kernel version < 3.14) 698 // (kernel version < 3.14)
617 if (!output_streamon_ && OutputBufferQueuedCount()) { 699 if (!output_streamon_ && OutputBufferQueuedCount()) {
618 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 700 __u32 type = output_buf_type_;
619 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 701 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
620 output_streamon_ = true; 702 output_streamon_ = true;
621 } 703 }
622 } 704 }
623 705
624 static bool CopyOutputImage(const uint32_t src_pixelformat, 706 bool V4L2JpegDecodeAccelerator::CopyOutputImage(
625 const void* src_addr, 707 const uint32_t src_pixelformat,
626 const gfx::Size& src_coded_size, 708 const BufferRecord& src_buffer,
627 const scoped_refptr<VideoFrame>& dst_frame) { 709 const gfx::Size& src_coded_size,
628 VideoPixelFormat format = 710 const scoped_refptr<VideoFrame>& dst_frame) {
629 V4L2Device::V4L2PixFmtToVideoPixelFormat(src_pixelformat);
630 size_t src_size = VideoFrame::AllocationSize(format, src_coded_size);
631 uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane); 711 uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane);
632 uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane); 712 uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane);
633 uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane); 713 uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane);
634 size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane); 714 size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane);
635 size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane); 715 size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane);
636 size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane); 716 size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane);
637 717
638 // If the source format is I420, ConvertToI420 will simply copy the frame. 718 if (src_buffer.num_planes == 1) {
wuchengli 2016/12/15 09:18:25 Can we remove num_planes == 1 case and use mplane
jcliang 2016/12/15 14:55:47 We have to keep the num_planes == 1 case to handle
639 if (libyuv::ConvertToI420(static_cast<uint8_t*>(const_cast<void*>(src_addr)), 719 // Use ConvertToI420 to convert all splane buffers.
640 src_size, 720 // If the source format is I420, ConvertToI420 will simply copy the frame.
641 dst_y, dst_y_stride, 721 VideoPixelFormat format =
642 dst_u, dst_u_stride, 722 V4L2Device::V4L2PixFmtToVideoPixelFormat(src_pixelformat);
643 dst_v, dst_v_stride, 723 size_t src_size = VideoFrame::AllocationSize(format, src_coded_size);
644 0, 0, 724 if (libyuv::ConvertToI420(
645 src_coded_size.width(), 725 static_cast<uint8_t*>(src_buffer.address[0]), src_size, dst_y,
646 src_coded_size.height(), 726 dst_y_stride, dst_u, dst_u_stride, dst_v, dst_v_stride, 0, 0,
647 dst_frame->coded_size().width(), 727 src_coded_size.width(), src_coded_size.height(),
648 dst_frame->coded_size().height(), 728 dst_frame->coded_size().width(), dst_frame->coded_size().height(),
649 libyuv::kRotate0, 729 libyuv::kRotate0, src_pixelformat)) {
650 src_pixelformat)) { 730 LOG(ERROR) << "ConvertToI420 failed. Source format: " << src_pixelformat;
651 LOG(ERROR) << "ConvertToI420 failed. Source format: " << src_pixelformat; 731 return false;
732 }
733 } else if (src_pixelformat == V4L2_PIX_FMT_YUV420M ||
734 src_pixelformat == V4L2_PIX_FMT_YUV422M) {
735 uint8_t* src_y = static_cast<uint8_t*>(src_buffer.address[0]);
736 uint8_t* src_u = static_cast<uint8_t*>(src_buffer.address[1]);
737 uint8_t* src_v = static_cast<uint8_t*>(src_buffer.address[2]);
738 size_t src_y_stride = output_buffer_coded_size_.width();
739 size_t src_u_stride = output_buffer_coded_size_.width() / 2;
740 size_t src_v_stride = output_buffer_coded_size_.width() / 2;
741 if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M) {
742 if (libyuv::I420Copy(src_y, src_y_stride, src_u, src_u_stride, src_v,
743 src_v_stride, dst_y, dst_y_stride, dst_u,
744 dst_u_stride, dst_v, dst_v_stride,
745 output_buffer_coded_size_.width(),
746 output_buffer_coded_size_.height())) {
747 LOG(ERROR) << "I420Copy failed";
748 return false;
749 }
750 } else { // output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M
751 if (libyuv::I422ToI420(src_y, src_y_stride, src_u, src_u_stride, src_v,
752 src_v_stride, dst_y, dst_y_stride, dst_u,
753 dst_u_stride, dst_v, dst_v_stride,
754 output_buffer_coded_size_.width(),
755 output_buffer_coded_size_.height())) {
756 LOG(ERROR) << "I422ToI420 failed";
757 return false;
758 }
759 }
760 } else {
761 LOG(ERROR) << "Unsupported source buffer format: " << src_pixelformat;
652 return false; 762 return false;
653 } 763 }
654 return true; 764 return true;
655 } 765 }
656 766
657 void V4L2JpegDecodeAccelerator::Dequeue() { 767 void V4L2JpegDecodeAccelerator::Dequeue() {
658 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 768 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
659 769
660 // Dequeue completed input (VIDEO_OUTPUT) buffers, 770 // Dequeue completed input (VIDEO_OUTPUT) buffers,
661 // and recycle to the free list. 771 // and recycle to the free list.
662 struct v4l2_buffer dqbuf; 772 struct v4l2_buffer dqbuf;
773 struct v4l2_plane planes[kOutputPlanes];
663 while (InputBufferQueuedCount() > 0) { 774 while (InputBufferQueuedCount() > 0) {
664 DCHECK(input_streamon_); 775 DCHECK(input_streamon_);
665 memset(&dqbuf, 0, sizeof(dqbuf)); 776 memset(&dqbuf, 0, sizeof(dqbuf));
666 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 777 memset(planes, 0, sizeof(planes));
778 dqbuf.type = input_buf_type_;
667 dqbuf.memory = V4L2_MEMORY_MMAP; 779 dqbuf.memory = V4L2_MEMORY_MMAP;
780 if (V4L2_TYPE_IS_MULTIPLANAR(input_buf_type_)) {
781 dqbuf.length = 1;
henryhsu 2016/12/15 09:56:17 s/1/kInputPlanes/
jcliang 2016/12/15 14:55:47 Done.
782 dqbuf.m.planes = planes;
henryhsu 2016/12/15 09:56:18 |planes| has kOutputPlanes elements. It assumes kO
jcliang 2016/12/15 14:55:47 Done.
783 }
668 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { 784 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
669 if (errno == EAGAIN) { 785 if (errno == EAGAIN) {
670 // EAGAIN if we're just out of buffers to dequeue. 786 // EAGAIN if we're just out of buffers to dequeue.
671 break; 787 break;
672 } 788 }
673 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed."; 789 PLOG(ERROR) << "ioctl() failed: input buffer VIDIOC_DQBUF failed.";
674 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 790 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
675 return; 791 return;
676 } 792 }
677 BufferRecord& input_record = input_buffer_map_[dqbuf.index]; 793 BufferRecord& input_record = input_buffer_map_[dqbuf.index];
678 DCHECK(input_record.at_device); 794 DCHECK(input_record.at_device);
679 input_record.at_device = false; 795 input_record.at_device = false;
680 free_input_buffers_.push_back(dqbuf.index); 796 free_input_buffers_.push_back(dqbuf.index);
681 797
682 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { 798 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) {
683 DVLOG(1) << "Dequeue input buffer error."; 799 DVLOG(1) << "Dequeue input buffer error.";
684 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); 800 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG);
685 running_jobs_.pop(); 801 running_jobs_.pop();
686 } 802 }
687 } 803 }
688 804
689 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. 805 // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list.
690 // Return the finished buffer to the client via the job ready callback. 806 // Return the finished buffer to the client via the job ready callback.
691 // If dequeued input buffer has an error, the error frame has removed from 807 // If dequeued input buffer has an error, the error frame has removed from
692 // |running_jobs_|. We only have to dequeue output buffer when we actually 808 // |running_jobs_|. We only have to dequeue output buffer when we actually
693 // have pending frames in |running_jobs_| and also enqueued output buffers. 809 // have pending frames in |running_jobs_| and also enqueued output buffers.
694 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) { 810 while (!running_jobs_.empty() && OutputBufferQueuedCount() > 0) {
695 DCHECK(output_streamon_); 811 DCHECK(output_streamon_);
696 memset(&dqbuf, 0, sizeof(dqbuf)); 812 memset(&dqbuf, 0, sizeof(dqbuf));
697 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 813 memset(planes, 0, sizeof(planes));
814 dqbuf.type = output_buf_type_;
698 // From experiments, using MMAP and memory copy is still faster than 815 // From experiments, using MMAP and memory copy is still faster than
699 // USERPTR. Also, client doesn't need to consider the buffer alignment and 816 // USERPTR. Also, client doesn't need to consider the buffer alignment and
700 // JpegDecodeAccelerator API will be simpler. 817 // JpegDecodeAccelerator API will be simpler.
701 dqbuf.memory = V4L2_MEMORY_MMAP; 818 dqbuf.memory = V4L2_MEMORY_MMAP;
819 if (V4L2_TYPE_IS_MULTIPLANAR(output_buf_type_)) {
820 dqbuf.length = kOutputPlanes;
821 dqbuf.m.planes = planes;
822 }
702 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { 823 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
703 if (errno == EAGAIN) { 824 if (errno == EAGAIN) {
704 // EAGAIN if we're just out of buffers to dequeue. 825 // EAGAIN if we're just out of buffers to dequeue.
705 break; 826 break;
706 } 827 }
707 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed."; 828 PLOG(ERROR) << "ioctl() failed: output buffer VIDIOC_DQBUF failed.";
708 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); 829 PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
709 return; 830 return;
710 } 831 }
711 BufferRecord& output_record = output_buffer_map_[dqbuf.index]; 832 BufferRecord& output_record = output_buffer_map_[dqbuf.index];
712 DCHECK(output_record.at_device); 833 DCHECK(output_record.at_device);
713 output_record.at_device = false; 834 output_record.at_device = false;
714 free_output_buffers_.push_back(dqbuf.index); 835 free_output_buffers_.push_back(dqbuf.index);
715 836
716 // Jobs are always processed in FIFO order. 837 // Jobs are always processed in FIFO order.
717 linked_ptr<JobRecord> job_record = running_jobs_.front(); 838 linked_ptr<JobRecord> job_record = running_jobs_.front();
718 running_jobs_.pop(); 839 running_jobs_.pop();
719 840
720 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) { 841 if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) {
721 DVLOG(1) << "Dequeue output buffer error."; 842 DVLOG(1) << "Dequeue output buffer error.";
722 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG); 843 PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG);
723 } else { 844 } else {
724 // Copy the decoded data from output buffer to the buffer provided by the 845 // Copy the decoded data from output buffer to the buffer provided by the
725 // client. Do format conversion when output format is not 846 // client. Do format conversion when output format is not
726 // V4L2_PIX_FMT_YUV420. 847 // V4L2_PIX_FMT_YUV420.
727 if (!CopyOutputImage(output_buffer_pixelformat_, output_record.address, 848 if (!CopyOutputImage(output_buffer_pixelformat_, output_record,
728 output_buffer_coded_size_, job_record->out_frame)) { 849 output_buffer_coded_size_, job_record->out_frame)) {
729 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE); 850 PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE);
730 return; 851 return;
731 } 852 }
732
733 DVLOG(3) << "Decoding finished, returning bitstream buffer, id=" 853 DVLOG(3) << "Decoding finished, returning bitstream buffer, id="
734 << job_record->bitstream_buffer_id; 854 << job_record->bitstream_buffer_id;
735 855
736 child_task_runner_->PostTask( 856 child_task_runner_->PostTask(
737 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady, 857 FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::VideoFrameReady,
738 weak_ptr_, job_record->bitstream_buffer_id)); 858 weak_ptr_, job_record->bitstream_buffer_id));
739 } 859 }
740 } 860 }
741 } 861 }
742 862
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
827 947
828 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. 948 // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame.
829 linked_ptr<JobRecord> job_record = input_jobs_.front(); 949 linked_ptr<JobRecord> job_record = input_jobs_.front();
830 input_jobs_.pop(); 950 input_jobs_.pop();
831 const int index = free_input_buffers_.back(); 951 const int index = free_input_buffers_.back();
832 BufferRecord& input_record = input_buffer_map_[index]; 952 BufferRecord& input_record = input_buffer_map_[index];
833 DCHECK(!input_record.at_device); 953 DCHECK(!input_record.at_device);
834 954
835 // It will add default huffman segment if it's missing. 955 // It will add default huffman segment if it's missing.
836 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(), 956 if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(),
837 input_record.address, input_record.length)) { 957 input_record.address[0], input_record.length[0])) {
838 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED); 958 PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED);
839 return false; 959 return false;
840 } 960 }
841 961
842 struct v4l2_buffer qbuf; 962 struct v4l2_buffer qbuf;
963 struct v4l2_plane plane;
843 memset(&qbuf, 0, sizeof(qbuf)); 964 memset(&qbuf, 0, sizeof(qbuf));
965 memset(&plane, 0, sizeof(plane));
844 qbuf.index = index; 966 qbuf.index = index;
845 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 967 qbuf.type = input_buf_type_;
846 qbuf.memory = V4L2_MEMORY_MMAP; 968 qbuf.memory = V4L2_MEMORY_MMAP;
969 if (V4L2_TYPE_IS_MULTIPLANAR(input_buf_type_)) {
970 qbuf.length = 1;
971 plane.bytesused = input_record.length[0];
972 qbuf.m.planes = &plane;
973 }
847 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 974 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
848 input_record.at_device = true; 975 input_record.at_device = true;
849 running_jobs_.push(job_record); 976 running_jobs_.push(job_record);
850 free_input_buffers_.pop_back(); 977 free_input_buffers_.pop_back();
851 978
852 DVLOG(3) << __func__ 979 DVLOG(3) << __func__
853 << ": enqueued frame id=" << job_record->bitstream_buffer_id 980 << ": enqueued frame id=" << job_record->bitstream_buffer_id
854 << " to device."; 981 << " to device.";
855 return true; 982 return true;
856 } 983 }
857 984
858 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { 985 bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() {
859 DCHECK(!free_output_buffers_.empty()); 986 DCHECK(!free_output_buffers_.empty());
860 987
861 // Enqueue an output (VIDEO_CAPTURE) buffer. 988 // Enqueue an output (VIDEO_CAPTURE) buffer.
862 const int index = free_output_buffers_.back(); 989 const int index = free_output_buffers_.back();
863 BufferRecord& output_record = output_buffer_map_[index]; 990 BufferRecord& output_record = output_buffer_map_[index];
864 DCHECK(!output_record.at_device); 991 DCHECK(!output_record.at_device);
865 struct v4l2_buffer qbuf; 992 struct v4l2_buffer qbuf;
993 struct v4l2_plane planes[kOutputPlanes];
866 memset(&qbuf, 0, sizeof(qbuf)); 994 memset(&qbuf, 0, sizeof(qbuf));
995 memset(&planes, 0, sizeof(planes));
867 qbuf.index = index; 996 qbuf.index = index;
868 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 997 qbuf.type = output_buf_type_;
869 qbuf.memory = V4L2_MEMORY_MMAP; 998 qbuf.memory = V4L2_MEMORY_MMAP;
999 if (V4L2_TYPE_IS_MULTIPLANAR(output_buf_type_)) {
1000 qbuf.length = output_record.num_planes;
1001 qbuf.m.planes = planes;
1002 }
870 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 1003 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
871 output_record.at_device = true; 1004 output_record.at_device = true;
872 free_output_buffers_.pop_back(); 1005 free_output_buffers_.pop_back();
873 return true; 1006 return true;
874 } 1007 }
875 1008
876 void V4L2JpegDecodeAccelerator::StartDevicePoll() { 1009 void V4L2JpegDecodeAccelerator::StartDevicePoll() {
877 DVLOG(3) << __func__ << ": starting device poll"; 1010 DVLOG(3) << __func__ << ": starting device poll";
878 DCHECK(decoder_task_runner_->BelongsToCurrentThread()); 1011 DCHECK(decoder_task_runner_->BelongsToCurrentThread());
879 DCHECK(!device_poll_thread_.IsRunning()); 1012 DCHECK(!device_poll_thread_.IsRunning());
(...skipping 18 matching lines...) Expand all
898 device_poll_thread_.Stop(); 1031 device_poll_thread_.Stop();
899 1032
900 // Clear the interrupt now, to be sure. 1033 // Clear the interrupt now, to be sure.
901 if (!device_->ClearDevicePollInterrupt()) 1034 if (!device_->ClearDevicePollInterrupt())
902 return false; 1035 return false;
903 1036
904 return true; 1037 return true;
905 } 1038 }
906 1039
907 } // namespace media 1040 } // namespace media
OLDNEW
« media/gpu/v4l2_jpeg_decode_accelerator.h ('K') | « media/gpu/v4l2_jpeg_decode_accelerator.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698