Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(580)

Unified Diff: content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc

Issue 1125263005: MJPEG acceleration for V4L2 (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc
diff --git a/content/common/gpu/media/v4l2_image_processor.cc b/content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc
similarity index 51%
copy from content/common/gpu/media/v4l2_image_processor.cc
copy to content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc
index 88946615877fff817e72e85e3b67f34f336b1146..41c28bb4b3e9bcf42e1595b0fdfa4fda89430f73 100644
--- a/content/common/gpu/media/v4l2_image_processor.cc
+++ b/content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc
@@ -1,4 +1,4 @@
-// Copyright 2014 The Chromium Authors. All rights reserved.
+// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -14,14 +14,12 @@
#include "base/callback.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/numerics/safe_conversions.h"
-#include "content/common/gpu/media/v4l2_image_processor.h"
+#include "content/common/gpu/media/v4l2_jpeg_decode_accelerator.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/video_frame.h"
-#define NOTIFY_ERROR() \
- do { \
- LOG(ERROR) << "calling NotifyError()"; \
- NotifyError(); \
- } while (0)
+//#undef DVLOG
+//#define DVLOG VLOG
#define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \
do { \
@@ -45,155 +43,131 @@
namespace content {
-V4L2ImageProcessor::InputRecord::InputRecord() : at_device(false) {
+V4L2JpegDecodeAccelerator::InputRecord::InputRecord() : at_device(false) {
}
-V4L2ImageProcessor::InputRecord::~InputRecord() {
+V4L2JpegDecodeAccelerator::InputRecord::~InputRecord() {
}
-V4L2ImageProcessor::OutputRecord::OutputRecord()
- : at_device(false), at_client(false) {
+V4L2JpegDecodeAccelerator::OutputRecord::OutputRecord()
+ : address(nullptr), length(0), at_device(false) {
}
-V4L2ImageProcessor::OutputRecord::~OutputRecord() {
+V4L2JpegDecodeAccelerator::OutputRecord::~OutputRecord() {
}
-V4L2ImageProcessor::JobRecord::JobRecord() {
+V4L2JpegDecodeAccelerator::JobRecord::JobRecord(
+ media::BitstreamBuffer bitstream_buffer,
+ scoped_refptr<media::VideoFrame> video_frame)
+ : bitstream_buffer(bitstream_buffer), frame(video_frame) {
}
-V4L2ImageProcessor::JobRecord::~JobRecord() {
+V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() {
}
-V4L2ImageProcessor::V4L2ImageProcessor(const scoped_refptr<V4L2Device>& device)
- : input_format_(media::VideoFrame::UNKNOWN),
- output_format_(media::VideoFrame::UNKNOWN),
- input_format_fourcc_(0),
- output_format_fourcc_(0),
- input_planes_count_(0),
- output_planes_count_(0),
- child_message_loop_proxy_(base::MessageLoopProxy::current()),
+V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator(
+ const scoped_refptr<V4L2Device>& device,
+ const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy)
+ : child_message_loop_proxy_(base::MessageLoopProxy::current()),
+ io_message_loop_proxy_(io_message_loop_proxy),
device_(device),
- device_thread_("V4L2ImageProcessorThread"),
- device_poll_thread_("V4L2ImageProcessorDevicePollThread"),
+ device_thread_("V4L2JpegDecodeThread"),
+ device_poll_thread_("V4L2JpegDecodeDevicePollThread"),
input_streamon_(false),
input_buffer_queued_count_(0),
output_streamon_(false),
output_buffer_queued_count_(0),
device_weak_factory_(this) {
+ device_weak_ = device_weak_factory_.GetWeakPtr();
}
-V4L2ImageProcessor::~V4L2ImageProcessor() {
- DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
+V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() {
DCHECK(!device_thread_.IsRunning());
DCHECK(!device_poll_thread_.IsRunning());
-
DestroyInputBuffers();
DestroyOutputBuffers();
}
-void V4L2ImageProcessor::NotifyError() {
- if (!child_message_loop_proxy_->BelongsToCurrentThread())
- child_message_loop_proxy_->PostTask(FROM_HERE, error_cb_);
- else
- error_cb_.Run();
-}
-
-bool V4L2ImageProcessor::Initialize(media::VideoFrame::Format input_format,
- media::VideoFrame::Format output_format,
- gfx::Size input_visible_size,
- gfx::Size output_visible_size,
- gfx::Size output_allocated_size,
- const base::Closure& error_cb) {
- DCHECK(!error_cb.is_null());
- error_cb_ = error_cb;
-
- // TODO(posciak): Replace Exynos-specific format/parameter hardcoding in this
- // class with proper capability enumeration.
- DCHECK_EQ(input_format, media::VideoFrame::I420);
- DCHECK_EQ(output_format, media::VideoFrame::NV12);
-
- input_format_ = input_format;
- output_format_ = output_format;
- input_format_fourcc_ = V4L2Device::VideoFrameFormatToV4L2PixFmt(input_format);
- output_format_fourcc_ =
- V4L2Device::VideoFrameFormatToV4L2PixFmt(output_format);
-
- if (!input_format_fourcc_ || !output_format_fourcc_) {
- LOG(ERROR) << "Unrecognized format(s)";
- return false;
+void V4L2JpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id,
+ Error error) {
+ if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
wuchengli 2015/05/25 10:29:23 Don't use this pattern. See piman's comment in htt
henryhsu 2015/06/05 03:28:56 Done.
+ child_message_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&V4L2JpegDecodeAccelerator::NotifyError,
+ device_weak_, bitstream_buffer_id, error));
+ return;
+ }
+ LOG(ERROR) << "Notifying of error " << error << " for buffer id "
+ << bitstream_buffer_id;
+ if (client_) {
+ client_->NotifyError(bitstream_buffer_id, error);
+ client_ptr_factory_.reset();
}
+}
- input_visible_size_ = input_visible_size;
- output_visible_size_ = output_visible_size;
- output_allocated_size_ = output_allocated_size;
+bool V4L2JpegDecodeAccelerator::Initialize(Client* client) {
+ DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
- input_planes_count_ = media::VideoFrame::NumPlanes(input_format);
- DCHECK_LE(input_planes_count_, static_cast<size_t>(VIDEO_MAX_PLANES));
- output_planes_count_ = media::VideoFrame::NumPlanes(output_format);
- DCHECK_LE(output_planes_count_, static_cast<size_t>(VIDEO_MAX_PLANES));
+ client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
+ client_ = client_ptr_factory_->GetWeakPtr();
kcwu 2015/05/26 10:47:23 No need to use weak pointer for |client|. |client|
henryhsu 2015/06/05 03:28:56 Done.
// Capabilities check.
struct v4l2_capability caps;
- memset(&caps, 0, sizeof(caps));
- const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE |
- V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING;
+ // TODO(henryhsu): Do we need V4L2_CAP_STREAMING capability?
+ const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE |
wuchengli 2015/05/25 10:29:23 We should only need V4L2_CAP_VIDEO_M2M_MPLANE | V4
henryhsu 2015/06/05 03:28:55 s5p-jpeg doesn't support multiple planar. I think
+ V4L2_CAP_VIDEO_OUTPUT;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
- LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: "
- "caps check failed: 0x" << std::hex << caps.capabilities;
+ LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
+ ", caps check failed: 0x" << std::hex << caps.capabilities;
return false;
}
- if (!CreateInputBuffers() || !CreateOutputBuffers())
- return false;
-
if (!device_thread_.Start()) {
LOG(ERROR) << "Initialize(): encoder thread failed to start";
return false;
}
- // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
+ // StartDevicePoll will NotifyError on failure, so IgnoreResult is fine here.
kcwu 2015/05/26 10:47:22 Nobody use the return value of StartDevicePoll. Ho
henryhsu 2015/06/05 03:28:55 Done.
device_thread_.message_loop()->PostTask(
FROM_HERE,
- base::Bind(base::IgnoreResult(&V4L2ImageProcessor::StartDevicePoll),
- base::Unretained(this)));
+ base::Bind(
+ base::IgnoreResult(&V4L2JpegDecodeAccelerator::StartDevicePoll),
+ base::Unretained(this)));
- DVLOG(1) << "V4L2ImageProcessor initialized for "
- << " input_format:"
- << media::VideoFrame::FormatToString(input_format)
- << ", output_format:"
- << media::VideoFrame::FormatToString(output_format)
- << ", input_visible_size: " << input_visible_size.ToString()
- << ", input_allocated_size: " << input_allocated_size_.ToString()
- << ", output_visible_size: " << output_visible_size.ToString()
- << ", output_allocated_size: " << output_allocated_size.ToString();
+ DVLOG(1) << "V4L2JpegDecodeAccelerator initialized.";
return true;
}
-void V4L2ImageProcessor::Process(const scoped_refptr<media::VideoFrame>& frame,
- const FrameReadyCB& cb) {
- DVLOG(3) << __func__ << ": ts=" << frame->timestamp().InMilliseconds();
+void V4L2JpegDecodeAccelerator::Decode(
+ const media::BitstreamBuffer& bitstream_buffer,
+ const scoped_refptr<media::VideoFrame>& video_frame) {
+ DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
+ << ", size=" << bitstream_buffer.size();
+ DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
+ // TODO(henryhsu): DCHECK or DCHECK
+ DCHECK_EQ(video_frame->format(), media::VideoFrame::I420);
- scoped_ptr<JobRecord> job_record(new JobRecord());
- job_record->frame = frame;
- job_record->ready_cb = cb;
+ scoped_ptr<JobRecord> job_record(
+ new JobRecord(bitstream_buffer, video_frame));
device_thread_.message_loop()->PostTask(
FROM_HERE,
- base::Bind(&V4L2ImageProcessor::ProcessTask,
+ base::Bind(&V4L2JpegDecodeAccelerator::DecodeTask,
base::Unretained(this),
base::Passed(&job_record)));
}
-void V4L2ImageProcessor::ProcessTask(scoped_ptr<JobRecord> job_record) {
+void V4L2JpegDecodeAccelerator::DecodeTask(scoped_ptr<JobRecord> job_record) {
DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
-
input_queue_.push(make_linked_ptr(job_record.release()));
+ if (!CheckBufferAttributes())
+ return;
Enqueue();
}
-void V4L2ImageProcessor::Destroy() {
+void V4L2JpegDecodeAccelerator::Destroy() {
DVLOG(3) << __func__;
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
@@ -201,7 +175,8 @@ void V4L2ImageProcessor::Destroy() {
if (device_thread_.IsRunning()) {
device_thread_.message_loop()->PostTask(
FROM_HERE,
- base::Bind(&V4L2ImageProcessor::DestroyTask, base::Unretained(this)));
+ base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask,
+ base::Unretained(this)));
// Wait for tasks to finish/early-exit.
device_thread_.Stop();
} else {
@@ -209,11 +184,10 @@ void V4L2ImageProcessor::Destroy() {
DCHECK(!device_poll_thread_.IsRunning());
DCHECK(!device_weak_factory_.HasWeakPtrs());
}
-
delete this;
}
-void V4L2ImageProcessor::DestroyTask() {
+void V4L2JpegDecodeAccelerator::DestroyTask() {
DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
device_weak_factory_.InvalidateWeakPtrs();
@@ -222,171 +196,179 @@ void V4L2ImageProcessor::DestroyTask() {
StopDevicePoll();
}
-bool V4L2ImageProcessor::CreateInputBuffers() {
+bool V4L2JpegDecodeAccelerator::CheckBufferAttributes() {
DVLOG(3) << __func__;
- DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
- DCHECK(!input_streamon_);
+ DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
+ DCHECK(!input_queue_.empty());
+ linked_ptr<JobRecord> job_record = input_queue_.front();
- struct v4l2_control control;
- memset(&control, 0, sizeof(control));
- control.id = V4L2_CID_ROTATE;
- control.value = 0;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CTRL, &control);
+ // Check image resolution and format are the same as previous.
+ if (job_record->frame->format() != output_format_ ||
+ job_record->frame->coded_size() != image_coded_size_) {
+ image_coded_size_ = job_record->frame->coded_size();
+ output_format_ = job_record->frame->format();
+ if (input_streamon_ || output_streamon_) {
+// ResetBuffers();
+// input_queue_.push(job_record);
+ } else if (!CreateInputBuffers() || !CreateOutputBuffers()) {
+ return false;
+ }
+ }
+ return true;
+}
- memset(&control, 0, sizeof(control));
- control.id = V4L2_CID_HFLIP;
- control.value = 0;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CTRL, &control);
+//void V4L2JpegDecodeAccelerator::ResetBuffers() {
+// DVLOG(3) << __func__;
+// if (!StopDevicePoll())
+// return;
+//}
- memset(&control, 0, sizeof(control));
- control.id = V4L2_CID_VFLIP;
- control.value = 0;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CTRL, &control);
+bool V4L2JpegDecodeAccelerator::CreateInputBuffers() {
+ DVLOG(3) << __func__;
+ DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
+ DCHECK(!input_streamon_);
- memset(&control, 0, sizeof(control));
- control.id = V4L2_CID_ALPHA_COMPONENT;
- control.value = 255;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CTRL, &control);
+ DCHECK(!input_queue_.empty());
+ linked_ptr<JobRecord> job_record = input_queue_.front();
+ size_t reserve_size = job_record->bitstream_buffer.size() * 2;
struct v4l2_format format;
memset(&format, 0, sizeof(format));
- format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- format.fmt.pix_mp.width = input_visible_size_.width();
- format.fmt.pix_mp.height = input_visible_size_.height();
- format.fmt.pix_mp.pixelformat = input_format_fourcc_;
- format.fmt.pix_mp.num_planes = input_planes_count_;
- for (size_t i = 0; i < input_planes_count_; ++i) {
- format.fmt.pix_mp.plane_fmt[i].sizeimage =
- media::VideoFrame::PlaneAllocationSize(
- input_format_, i, input_allocated_size_);
- format.fmt.pix_mp.plane_fmt[i].bytesperline =
- base::checked_cast<__u32>(input_allocated_size_.width());
- }
+ format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ format.fmt.pix.width = job_record->frame->coded_size().width();
+ format.fmt.pix.height = job_record->frame->coded_size().height();
+ format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG;
+ format.fmt.pix.sizeimage = reserve_size;
+ format.fmt.pix.field = V4L2_FIELD_ANY;
+ format.fmt.pix.bytesperline = 0;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
- input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format);
- DCHECK(gfx::Rect(input_allocated_size_).Contains(
- gfx::Rect(input_visible_size_)));
-
- struct v4l2_crop crop;
- memset(&crop, 0, sizeof(crop));
- crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- crop.c.left = 0;
- crop.c.top = 0;
- crop.c.width = base::checked_cast<__u32>(input_visible_size_.width());
- crop.c.height = base::checked_cast<__u32>(input_visible_size_.height());
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop);
-
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = kInputBufferCount;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- reqbufs.memory = V4L2_MEMORY_USERPTR;
+ reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ reqbufs.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
DCHECK(input_buffer_map_.empty());
input_buffer_map_.resize(reqbufs.count);
- for (size_t i = 0; i < input_buffer_map_.size(); ++i)
+ for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
free_input_buffers_.push_back(i);
+ struct v4l2_buffer buffer;
+ memset(&buffer, 0, sizeof(buffer));
+ buffer.index = i;
+ buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ buffer.memory = V4L2_MEMORY_MMAP;
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
+ void* address = device_->Mmap(NULL,
+ buffer.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ buffer.m.offset);
+ if (address == MAP_FAILED) {
+ PLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
+ return false;
+ }
+ input_buffer_map_[i].address = address;
+ input_buffer_map_[i].length = buffer.length;
+ }
+
return true;
}
-bool V4L2ImageProcessor::CreateOutputBuffers() {
+bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() {
DVLOG(3) << __func__;
- DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
+ DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
DCHECK(!output_streamon_);
+ DCHECK(!input_queue_.empty());
+ linked_ptr<JobRecord> job_record = input_queue_.front();
+
+ size_t frame_size = media::VideoFrame::AllocationSize(
+ output_format_,job_record->frame->coded_size());
+ // TODO(henryhsu): not support YUV410M format
+ uint32 output_format_fourcc_ = V4L2_PIX_FMT_YUV420;
+ // V4L2Device::VideoFrameFormatToV4L2PixFmt(output_format_);
struct v4l2_format format;
memset(&format, 0, sizeof(format));
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- format.fmt.pix_mp.width = output_allocated_size_.width();
- format.fmt.pix_mp.height = output_allocated_size_.height();
- format.fmt.pix_mp.pixelformat = output_format_fourcc_;
- format.fmt.pix_mp.num_planes = output_planes_count_;
- for (size_t i = 0; i < output_planes_count_; ++i) {
- format.fmt.pix_mp.plane_fmt[i].sizeimage =
- media::VideoFrame::PlaneAllocationSize(
- output_format_, i, output_allocated_size_);
- format.fmt.pix_mp.plane_fmt[i].bytesperline =
- base::checked_cast<__u32>(output_allocated_size_.width());
- }
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ format.fmt.pix.width = job_record->frame->coded_size().width();
+ format.fmt.pix.height = job_record->frame->coded_size().height();
+ format.fmt.pix.sizeimage = frame_size;
+ format.fmt.pix.pixelformat = output_format_fourcc_;
+ format.fmt.pix.field = V4L2_FIELD_ANY;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
- gfx::Size adjusted_allocated_size =
- V4L2Device::CodedSizeFromV4L2Format(format);
- DCHECK(gfx::Rect(adjusted_allocated_size).Contains(
- gfx::Rect(output_allocated_size_)));
- output_allocated_size_ = adjusted_allocated_size;
-
- struct v4l2_crop crop;
- memset(&crop, 0, sizeof(crop));
- crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- crop.c.left = 0;
- crop.c.top = 0;
- crop.c.width = base::checked_cast<__u32>(output_visible_size_.width());
- crop.c.height = base::checked_cast<__u32>(output_visible_size_.height());
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop);
-
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = kOutputBufferCount;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbufs.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
DCHECK(output_buffer_map_.empty());
output_buffer_map_.resize(reqbufs.count);
+
for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
- OutputRecord& output_record = output_buffer_map_[i];
- output_record.fds.resize(output_planes_count_);
- for (size_t j = 0; j < output_planes_count_; ++j) {
- struct v4l2_exportbuffer expbuf;
- memset(&expbuf, 0, sizeof(expbuf));
- expbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- expbuf.index = i;
- expbuf.plane = j;
- expbuf.flags = O_CLOEXEC;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_EXPBUF, &expbuf);
- output_record.fds[j] = expbuf.fd;
- }
free_output_buffers_.push_back(i);
+
+ struct v4l2_buffer buffer;
+ memset(&buffer, 0, sizeof(buffer));
+ buffer.index = i;
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buffer.memory = V4L2_MEMORY_MMAP;
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
+ void* address = device_->Mmap(NULL,
+ buffer.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ buffer.m.offset);
+ if (address == MAP_FAILED) {
+ PLOG(ERROR) << "CreateOutputBuffers(): mmap() failed";
+ return false;
+ }
+ output_buffer_map_[i].address = address;
+ output_buffer_map_[i].length = buffer.length;
}
return true;
}
-void V4L2ImageProcessor::DestroyInputBuffers() {
+void V4L2JpegDecodeAccelerator::DestroyInputBuffers() {
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
DCHECK(!input_streamon_);
+ for (size_t buf = 0; buf < input_buffer_map_.size(); ++buf) {
+ InputRecord& input_record = input_buffer_map_[buf];
+ device_->Munmap(input_record.address, input_record.length);
+ }
+
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = 0;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- reqbufs.memory = V4L2_MEMORY_USERPTR;
+ reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ reqbufs.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
input_buffer_map_.clear();
free_input_buffers_.clear();
}
-void V4L2ImageProcessor::DestroyOutputBuffers() {
+void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() {
DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
DCHECK(!output_streamon_);
for (size_t buf = 0; buf < output_buffer_map_.size(); ++buf) {
OutputRecord& output_record = output_buffer_map_[buf];
- for (size_t plane = 0; plane < output_record.fds.size(); ++plane)
- close(output_record.fds[plane]);
- output_record.fds.clear();
+ device_->Munmap(output_record.address, output_record.length);
}
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = 0;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbufs.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
@@ -394,12 +376,12 @@ void V4L2ImageProcessor::DestroyOutputBuffers() {
free_output_buffers_.clear();
}
-void V4L2ImageProcessor::DevicePollTask(bool poll_device) {
+void V4L2JpegDecodeAccelerator::DevicePollTask(bool poll_device) {
DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
bool event_pending;
if (!device_->Poll(poll_device, &event_pending)) {
- NOTIFY_ERROR();
+ NotifyError(-1, media::JpegDecodeAccelerator::PLATFORM_FAILURE);
return;
}
@@ -407,11 +389,11 @@ void V4L2ImageProcessor::DevicePollTask(bool poll_device) {
// touch encoder state from this thread.
device_thread_.message_loop()->PostTask(
FROM_HERE,
- base::Bind(&V4L2ImageProcessor::ServiceDeviceTask,
+ base::Bind(&V4L2JpegDecodeAccelerator::ServiceDeviceTask,
base::Unretained(this)));
}
-void V4L2ImageProcessor::ServiceDeviceTask() {
+void V4L2JpegDecodeAccelerator::ServiceDeviceTask() {
DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
// ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
// so either:
@@ -424,15 +406,16 @@ void V4L2ImageProcessor::ServiceDeviceTask() {
Dequeue();
Enqueue();
- if (!device_->ClearDevicePollInterrupt())
+ if (!device_->ClearDevicePollInterrupt()) {
return;
+ }
bool poll_device =
(input_buffer_queued_count_ > 0 && output_buffer_queued_count_ > 0);
device_poll_thread_.message_loop()->PostTask(
FROM_HERE,
- base::Bind(&V4L2ImageProcessor::DevicePollTask,
+ base::Bind(&V4L2JpegDecodeAccelerator::DevicePollTask,
base::Unretained(this),
poll_device));
@@ -448,7 +431,7 @@ void V4L2ImageProcessor::ServiceDeviceTask() {
free_output_buffers_.size() << "]";
}
-void V4L2ImageProcessor::Enqueue() {
+void V4L2JpegDecodeAccelerator::Enqueue() {
DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
const int old_inputs_queued = input_buffer_queued_count_;
@@ -459,11 +442,12 @@ void V4L2ImageProcessor::Enqueue() {
if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
// We started up a previously empty queue.
// Queue state changed; signal interrupt.
- if (!device_->SetDevicePollInterrupt())
+ if (!device_->SetDevicePollInterrupt()) {
kcwu 2015/05/26 10:47:23 Why do we need to SetDevicePollInterrupt? Assume t
henryhsu 2015/06/05 03:28:55 As discussed before, I removed this to save one tr
return;
+ }
// VIDIOC_STREAMON if we haven't yet.
if (!input_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
input_streamon_ = true;
}
@@ -479,11 +463,12 @@ void V4L2ImageProcessor::Enqueue() {
if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
// We just started up a previously empty queue.
// Queue state changed; signal interrupt.
- if (!device_->SetDevicePollInterrupt())
+ if (!device_->SetDevicePollInterrupt()) {
return;
+ }
// Start VIDIOC_STREAMON if we haven't yet.
if (!output_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
output_streamon_ = true;
}
@@ -492,34 +477,29 @@ void V4L2ImageProcessor::Enqueue() {
DCHECK_LE(output_buffer_queued_count_, 1);
}
-void V4L2ImageProcessor::Dequeue() {
+void V4L2JpegDecodeAccelerator::Dequeue() {
DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
// Dequeue completed input (VIDEO_OUTPUT) buffers,
// and recycle to the free list.
struct v4l2_buffer dqbuf;
- struct v4l2_plane planes[VIDEO_MAX_PLANES];
while (input_buffer_queued_count_ > 0) {
DCHECK(input_streamon_);
memset(&dqbuf, 0, sizeof(dqbuf));
- memset(&planes, 0, sizeof(planes));
- dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- dqbuf.memory = V4L2_MEMORY_USERPTR;
- dqbuf.m.planes = planes;
- dqbuf.length = input_planes_count_;
+ dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ dqbuf.memory = V4L2_MEMORY_MMAP;
if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
if (errno == EAGAIN) {
// EAGAIN if we're just out of buffers to dequeue.
break;
}
PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
- NOTIFY_ERROR();
+ NotifyError(dqbuf.index, media::JpegDecodeAccelerator::PLATFORM_FAILURE);
return;
}
InputRecord& input_record = input_buffer_map_[dqbuf.index];
DCHECK(input_record.at_device);
input_record.at_device = false;
- input_record.frame = NULL;
free_input_buffers_.push_back(dqbuf.index);
input_buffer_queued_count_--;
}
@@ -529,24 +509,21 @@ void V4L2ImageProcessor::Dequeue() {
while (output_buffer_queued_count_ > 0) {
DCHECK(output_streamon_);
memset(&dqbuf, 0, sizeof(dqbuf));
- memset(&planes, 0, sizeof(planes));
- dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
dqbuf.memory = V4L2_MEMORY_MMAP;
- dqbuf.m.planes = planes;
- dqbuf.length = output_planes_count_;
if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
if (errno == EAGAIN) {
// EAGAIN if we're just out of buffers to dequeue.
break;
}
PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF";
- NOTIFY_ERROR();
+ NotifyError(dqbuf.index, media::JpegDecodeAccelerator::PLATFORM_FAILURE);
return;
}
OutputRecord& output_record = output_buffer_map_[dqbuf.index];
DCHECK(output_record.at_device);
output_record.at_device = false;
- output_record.at_client = true;
+ free_output_buffers_.push_back(dqbuf.index);
output_buffer_queued_count_--;
// Jobs are always processed in FIFO order.
@@ -554,41 +531,19 @@ void V4L2ImageProcessor::Dequeue() {
linked_ptr<JobRecord> job_record = running_jobs_.front();
running_jobs_.pop();
- scoped_refptr<media::VideoFrame> output_frame =
- media::VideoFrame::WrapExternalDmabufs(
- output_format_,
- output_allocated_size_,
- gfx::Rect(output_visible_size_),
- output_visible_size_,
- output_record.fds,
- job_record->frame->timestamp(),
- media::BindToCurrentLoop(
- base::Bind(&V4L2ImageProcessor::ReuseOutputBuffer,
- device_weak_factory_.GetWeakPtr(),
- dqbuf.index)));
+ memcpy(job_record->frame->data(media::VideoFrame::kYPlane),
+ output_record.address,
+ output_record.length);
DVLOG(3) << "Processing finished, returning frame, ts="
- << output_frame->timestamp().InMilliseconds();
+ << job_record->frame->timestamp().InMilliseconds();
- child_message_loop_proxy_->PostTask(
- FROM_HERE, base::Bind(job_record->ready_cb, output_frame));
+ if (client_)
+ client_->VideoFrameReady(job_record->bitstream_buffer.id());
}
}
-void V4L2ImageProcessor::ReuseOutputBuffer(int index) {
- DVLOG(3) << "Reusing output buffer, index=" << index;
- DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
-
- OutputRecord& output_record = output_buffer_map_[index];
- DCHECK(output_record.at_client);
- DCHECK(!output_record.at_device);
- output_record.at_client = false;
- free_output_buffers_.push_back(index);
-
- Enqueue();
-}
-
-bool V4L2ImageProcessor::EnqueueInputRecord() {
+bool V4L2JpegDecodeAccelerator::EnqueueInputRecord() {
DCHECK(!input_queue_.empty());
DCHECK(!free_input_buffers_.empty());
@@ -598,23 +553,20 @@ bool V4L2ImageProcessor::EnqueueInputRecord() {
const int index = free_input_buffers_.back();
InputRecord& input_record = input_buffer_map_[index];
DCHECK(!input_record.at_device);
- input_record.frame = job_record->frame;
+
+ scoped_ptr<base::SharedMemory> shm(
+ new base::SharedMemory(job_record->bitstream_buffer.handle(), true));
+ if (!shm->Map(job_record->bitstream_buffer.size())) {
+ LOG(ERROR) << "Decode(): could not map bitstream_buffer";
+ NotifyError(job_record->bitstream_buffer.id(), UNREADABLE_INPUT);
+ return false;
+ }
struct v4l2_buffer qbuf;
- struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
memset(&qbuf, 0, sizeof(qbuf));
- memset(qbuf_planes, 0, sizeof(qbuf_planes));
+ memcpy(input_record.address, shm->memory(), input_record.length);
qbuf.index = index;
- qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- qbuf.memory = V4L2_MEMORY_USERPTR;
- qbuf.m.planes = qbuf_planes;
- qbuf.length = input_planes_count_;
- for (size_t i = 0; i < input_planes_count_; ++i) {
- qbuf.m.planes[i].bytesused = media::VideoFrame::PlaneAllocationSize(
- input_record.frame->format(), i, input_allocated_size_);
- qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused;
- qbuf.m.planes[i].m.userptr =
- reinterpret_cast<unsigned long>(input_record.frame->data(i));
- }
+ qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
+ qbuf.memory = V4L2_MEMORY_MMAP;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
input_record.at_device = true;
running_jobs_.push(job_record);
@@ -627,7 +579,7 @@ bool V4L2ImageProcessor::EnqueueInputRecord() {
return true;
}
-bool V4L2ImageProcessor::EnqueueOutputRecord() {
+bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() {
DCHECK(!free_output_buffers_.empty());
// Enqueue an output (VIDEO_CAPTURE) buffer.
@@ -635,14 +587,10 @@ bool V4L2ImageProcessor::EnqueueOutputRecord() {
OutputRecord& output_record = output_buffer_map_[index];
DCHECK(!output_record.at_device);
struct v4l2_buffer qbuf;
- struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
memset(&qbuf, 0, sizeof(qbuf));
- memset(qbuf_planes, 0, sizeof(qbuf_planes));
qbuf.index = index;
- qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
qbuf.memory = V4L2_MEMORY_MMAP;
- qbuf.m.planes = qbuf_planes;
- qbuf.length = output_planes_count_;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
output_record.at_device = true;
free_output_buffers_.pop_back();
@@ -650,7 +598,7 @@ bool V4L2ImageProcessor::EnqueueOutputRecord() {
return true;
}
-bool V4L2ImageProcessor::StartDevicePoll() {
+bool V4L2JpegDecodeAccelerator::StartDevicePoll() {
DVLOG(3) << __func__ << ": starting device poll";
DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
DCHECK(!device_poll_thread_.IsRunning());
@@ -658,7 +606,7 @@ bool V4L2ImageProcessor::StartDevicePoll() {
// Start up the device poll thread and schedule its first DevicePollTask().
if (!device_poll_thread_.Start()) {
LOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
- NOTIFY_ERROR();
+ NotifyError(-1, media::JpegDecodeAccelerator::PLATFORM_FAILURE);
return false;
}
// Enqueue a poll task with no devices to poll on - will wait only for the
@@ -666,33 +614,36 @@ bool V4L2ImageProcessor::StartDevicePoll() {
device_poll_thread_.message_loop()->PostTask(
FROM_HERE,
base::Bind(
- &V4L2ImageProcessor::DevicePollTask, base::Unretained(this), false));
+ &V4L2JpegDecodeAccelerator::DevicePollTask,
+ base::Unretained(this), false));
return true;
}
-bool V4L2ImageProcessor::StopDevicePoll() {
+bool V4L2JpegDecodeAccelerator::StopDevicePoll() {
DVLOG(3) << __func__ << ": stopping device poll";
if (device_thread_.IsRunning())
DCHECK_EQ(device_thread_.message_loop(), base::MessageLoop::current());
// Signal the DevicePollTask() to stop, and stop the device poll thread.
- if (!device_->SetDevicePollInterrupt())
+ if (!device_->SetDevicePollInterrupt()) {
return false;
+ }
device_poll_thread_.Stop();
// Clear the interrupt now, to be sure.
- if (!device_->ClearDevicePollInterrupt())
+ if (!device_->ClearDevicePollInterrupt()) {
return false;
+ }
if (input_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
}
input_streamon_ = false;
if (output_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
}
output_streamon_ = false;
@@ -708,7 +659,6 @@ bool V4L2ImageProcessor::StopDevicePoll() {
for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
InputRecord& input_record = input_buffer_map_[i];
input_record.at_device = false;
- input_record.frame = NULL;
free_input_buffers_.push_back(i);
}
input_buffer_queued_count_ = 0;
@@ -717,8 +667,7 @@ bool V4L2ImageProcessor::StopDevicePoll() {
for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
OutputRecord& output_record = output_buffer_map_[i];
output_record.at_device = false;
- if (!output_record.at_client)
- free_output_buffers_.push_back(i);
+ free_output_buffers_.push_back(i);
}
output_buffer_queued_count_ = 0;

Powered by Google App Engine
This is Rietveld 408576698