Index: content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc |
diff --git a/content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc b/content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..3cd2a28413d48b458397b4e14d287c4cfd4e4856 |
--- /dev/null |
+++ b/content/common/gpu/media/v4l2_jpeg_decode_accelerator.cc |
@@ -0,0 +1,716 @@ |
+// Copyright 2015 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include <fcntl.h> |
+#include <linux/videodev2.h> |
+#include <poll.h> |
+#include <sys/eventfd.h> |
+#include <sys/ioctl.h> |
+#include <sys/mman.h> |
+ |
+#include "base/bind.h" |
+#include "base/bind_helpers.h" |
+#include "base/callback.h" |
+#include "base/thread_task_runner_handle.h" |
+#include "content/common/gpu/media/v4l2_jpeg_decode_accelerator.h" |
+#include "media/base/bind_to_current_loop.h" |
+#include "media/base/video_frame.h" |
+ |
+#define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \ |
+ do { \ |
+ if (device_->Ioctl(type, arg) != 0) { \ |
+ PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
+ return value; \ |
+ } \ |
+ } while (0) |
+ |
+#define IOCTL_OR_ERROR_RETURN(type, arg) \ |
+ IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0)) |
+ |
+#define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ |
+ IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false) |
wuchengli
2015/06/11 11:19:46
|type| will be incorrectly expanded. Fix this by t
henryhsu
2015/06/12 05:48:00
Done.
|
+ |
+#define IOCTL_OR_LOG_ERROR(type, arg) \ |
+ do { \ |
+ if (device_->Ioctl(type, arg) != 0) \ |
+ PLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ |
+ } while (0) |
+ |
+#define TIME_ENTER() \ |
wuchengli
2015/06/11 11:19:46
Remove or use TRACE_EVENT0.
henryhsu
2015/06/12 05:48:00
Done.
|
+ {} |
+ |
+#define TIME_LEAVE() \ |
wuchengli
2015/06/11 11:19:46
Remove all TIME_LEAVE. Expand a macro to a scoped
henryhsu
2015/06/12 05:48:00
Done.
|
+ {} |
+ |
+ |
+namespace content { |
+ |
+V4L2JpegDecodeAccelerator::InputRecord::InputRecord() : at_device(false) { |
+} |
+ |
+V4L2JpegDecodeAccelerator::InputRecord::~InputRecord() { |
+} |
+ |
+V4L2JpegDecodeAccelerator::OutputRecord::OutputRecord() |
+ : address(nullptr), length(0), at_device(false) { |
+} |
+ |
+V4L2JpegDecodeAccelerator::OutputRecord::~OutputRecord() { |
+} |
+ |
+V4L2JpegDecodeAccelerator::JobRecord::JobRecord( |
+ media::BitstreamBuffer bitstream_buffer, |
+ scoped_refptr<media::VideoFrame> video_frame) |
+ : bitstream_buffer(bitstream_buffer), frame(video_frame) { |
+} |
+ |
+V4L2JpegDecodeAccelerator::JobRecord::~JobRecord() { |
+} |
+ |
+V4L2JpegDecodeAccelerator::V4L2JpegDecodeAccelerator( |
+ const scoped_refptr<V4L2Device>& device, |
+ const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) |
+ : reset_buffer_flag_(false), |
+ child_task_runner_(base::ThreadTaskRunnerHandle::Get()), |
+ io_task_runner_(io_task_runner), |
+ device_(device), |
+ decoder_thread_("V4L2JpegDecodeThread"), |
+ device_poll_thread_("V4L2JpegDecodeDevicePollThread"), |
+ input_streamon_(false), |
+ input_buffer_queued_count_(0), |
+ output_streamon_(false), |
+ output_buffer_queued_count_(0), |
+ device_weak_factory_(this) { |
+ device_weak_ = device_weak_factory_.GetWeakPtr(); |
+} |
+ |
+V4L2JpegDecodeAccelerator::~V4L2JpegDecodeAccelerator() { |
+ DCHECK(child_task_runner_->BelongsToCurrentThread()); |
+ |
+ // If the device thread is running, destroy using posted task. |
+ if (decoder_thread_.IsRunning()) { |
+ decoder_task_runner_->PostTask( |
+ FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DestroyTask, |
+ base::Unretained(this))); |
+ // Wait for tasks to finish/early-exit. |
+ decoder_thread_.Stop(); |
+ } |
+ DCHECK(!decoder_thread_.IsRunning()); |
+ DCHECK(!device_poll_thread_.IsRunning()); |
+ DCHECK(!device_weak_factory_.HasWeakPtrs()); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::DestroyTask() { |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ |
+ device_weak_factory_.InvalidateWeakPtrs(); |
+ // Stop streaming and the device_poll_thread_. |
+ StopDevicePoll(false); |
+ |
+ DestroyInputBuffers(); |
+ DestroyOutputBuffers(); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id, |
+ Error error) { |
+ DCHECK(child_task_runner_->BelongsToCurrentThread()); |
+ LOG(ERROR) << "Notifying of error " << error << " for buffer id " |
+ << bitstream_buffer_id; |
+ DCHECK(client_); |
+ client_->NotifyError(bitstream_buffer_id, error); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::NotifyErrorFromDecoderThread( |
+ int32_t bitstream_buffer_id, |
+ Error error) { |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ child_task_runner_->PostTask( |
+ FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::NotifyError, |
+ device_weak_, bitstream_buffer_id, error)); |
+} |
+ |
+bool V4L2JpegDecodeAccelerator::Initialize(Client* client) { |
+ TIME_ENTER(); |
+ DCHECK(child_task_runner_->BelongsToCurrentThread()); |
+ |
+ client_ = client; |
+ |
+ // Capabilities check. |
+ struct v4l2_capability caps; |
+ const __u32 kCapsRequired = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_M2M; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); |
+ if ((caps.capabilities & kCapsRequired) != kCapsRequired) { |
+ LOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP" |
+ ", caps check failed: 0x" << std::hex << caps.capabilities; |
+ return false; |
+ } |
+ |
+ if (!decoder_thread_.Start()) { |
+ LOG(ERROR) << "Initialize(): encoder thread failed to start"; |
+ return false; |
+ } |
+ decoder_task_runner_ = decoder_thread_.task_runner(); |
+ |
+ decoder_task_runner_->PostTask( |
+ FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::StartDevicePoll, |
+ base::Unretained(this))); |
+ |
+ DVLOG(1) << "V4L2JpegDecodeAccelerator initialized."; |
+ |
+ TIME_LEAVE(); |
+ return true; |
+} |
+ |
+void V4L2JpegDecodeAccelerator::Decode( |
+ const media::BitstreamBuffer& bitstream_buffer, |
+ const scoped_refptr<media::VideoFrame>& video_frame) { |
+ TIME_ENTER(); |
+ DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id() |
+ << ", size=" << bitstream_buffer.size(); |
+ DCHECK(io_task_runner_->BelongsToCurrentThread()); |
+ DCHECK_EQ(video_frame->format(), media::VideoFrame::I420); |
+ |
+ scoped_ptr<JobRecord> job_record( |
+ new JobRecord(bitstream_buffer, video_frame)); |
+ |
+ decoder_task_runner_->PostTask( |
+ FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DecodeTask, |
+ base::Unretained(this), base::Passed(&job_record))); |
+ TIME_LEAVE(); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::DecodeTask(scoped_ptr<JobRecord> job_record) { |
+ TIME_ENTER(); |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ input_queue_.push(make_linked_ptr(job_record.release())); |
+ if (!CheckBufferAttributes()) |
+ return; |
+ if (!reset_buffer_flag_) |
+ Enqueue(); |
+ TIME_LEAVE(); |
+} |
+ |
+bool V4L2JpegDecodeAccelerator::CheckBufferAttributes() { |
+ TIME_ENTER(); |
+ DVLOG(3) << __func__; |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ DCHECK(!input_queue_.empty()); |
+ linked_ptr<JobRecord> job_record = input_queue_.front(); |
+ uint32_t reset_input_buffer = 0, reset_output_buffer = 0; |
+ |
+ // Check input buffer size is enough |
+ if (input_buffer_map_.empty() || |
+ job_record->bitstream_buffer.size() > input_buffer_map_.front().length) { |
+ reset_input_buffer = kResetInputBuffer; |
+ } |
+ |
+ // Check image resolution and format are the same as previous. |
+ if (job_record->frame->format() != output_format_ || |
+ job_record->frame->coded_size() != image_coded_size_) { |
+ size_t frame_size = media::VideoFrame::AllocationSize( |
+ job_record->frame->format(), job_record->frame->coded_size()); |
+ if (output_buffer_map_.empty() || |
+ frame_size > output_buffer_map_.front().length) { |
+ reset_output_buffer = kResetOutputBuffer; |
+ } |
+ } |
+ |
+ if (reset_input_buffer || reset_output_buffer) { |
+ if (input_streamon_ || output_streamon_) { |
+ reset_buffer_flag_ = reset_input_buffer | reset_output_buffer; |
+ ResetBuffers(); |
+ } else { |
+ image_coded_size_ = job_record->frame->coded_size(); |
+ output_format_ = job_record->frame->format(); |
+ if (!CreateInputBuffers() || !CreateOutputBuffers()) { |
+ LOG(ERROR) << "Create Input/Output buffer failed."; |
+ return false; |
+ } |
+ } |
+ } |
+ TIME_LEAVE(); |
+ return true; |
+} |
+ |
+void V4L2JpegDecodeAccelerator::ResetBuffers() { |
+ DVLOG(3) << __func__; |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ if (input_buffer_queued_count_ || output_buffer_queued_count_) |
+ return; |
+ |
+ if (!StopDevicePoll(true)) { |
+ LOG(ERROR) << "Stop device poll thread failed when renew buffers."; |
+ } |
+ |
+ DCHECK(!input_queue_.empty()); |
+ linked_ptr<JobRecord> job_record = input_queue_.front(); |
+ |
+ if (reset_buffer_flag_ & kResetInputBuffer) { |
+ DestroyInputBuffers(); |
+ CreateInputBuffers(); |
+ } |
+ |
+ if (reset_buffer_flag_ & kResetOutputBuffer) { |
+ DestroyOutputBuffers(); |
+ |
+ image_coded_size_ = job_record->frame->coded_size(); |
+ output_format_ = job_record->frame->format(); |
+ CreateOutputBuffers(); |
+ } |
+ |
+ reset_buffer_flag_ = 0; |
+ Enqueue(); |
+ StartDevicePoll(); |
+} |
+ |
+bool V4L2JpegDecodeAccelerator::CreateInputBuffers() { |
+ TIME_ENTER(); |
+ DVLOG(3) << __func__; |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ DCHECK(!input_streamon_); |
+ |
+ DCHECK(!input_queue_.empty()); |
+ linked_ptr<JobRecord> job_record = input_queue_.front(); |
+ size_t reserve_size = job_record->bitstream_buffer.size() * 2; |
+ |
+ struct v4l2_format format; |
+ memset(&format, 0, sizeof(format)); |
+ format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
+ format.fmt.pix.width = job_record->frame->coded_size().width(); |
+ format.fmt.pix.height = job_record->frame->coded_size().height(); |
+ format.fmt.pix.pixelformat = V4L2_PIX_FMT_JPEG; |
+ format.fmt.pix.sizeimage = reserve_size; |
+ format.fmt.pix.field = V4L2_FIELD_ANY; |
+ format.fmt.pix.bytesperline = 0; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); |
+ |
+ struct v4l2_requestbuffers reqbufs; |
+ memset(&reqbufs, 0, sizeof(reqbufs)); |
+ reqbufs.count = kInputBufferCount; |
+ reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
+ reqbufs.memory = V4L2_MEMORY_MMAP; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); |
+ |
+ DCHECK(input_buffer_map_.empty()); |
+ input_buffer_map_.resize(reqbufs.count); |
+ |
+ for (size_t i = 0; i < input_buffer_map_.size(); ++i) { |
+ free_input_buffers_.push_back(i); |
+ |
+ struct v4l2_buffer buffer; |
+ memset(&buffer, 0, sizeof(buffer)); |
+ buffer.index = i; |
+ buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
+ buffer.memory = V4L2_MEMORY_MMAP; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); |
+ void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, |
+ MAP_SHARED, buffer.m.offset); |
+ if (address == MAP_FAILED) { |
+ PLOG(ERROR) << "CreateInputBuffers(): mmap() failed"; |
+ return false; |
+ } |
+ input_buffer_map_[i].address = address; |
+ input_buffer_map_[i].length = buffer.length; |
+ } |
+ |
+ TIME_LEAVE(); |
+ return true; |
+} |
+ |
+bool V4L2JpegDecodeAccelerator::CreateOutputBuffers() { |
+ TIME_ENTER(); |
+ DVLOG(3) << __func__; |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ DCHECK(!output_streamon_); |
+ |
+ DCHECK(!input_queue_.empty()); |
+ linked_ptr<JobRecord> job_record = input_queue_.front(); |
+ |
+ size_t frame_size = media::VideoFrame::AllocationSize( |
+ output_format_, job_record->frame->coded_size()); |
+ uint32_t output_format_fourcc_ = V4L2_PIX_FMT_YUV420; |
+ struct v4l2_format format; |
+ memset(&format, 0, sizeof(format)); |
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
+ format.fmt.pix.width = job_record->frame->coded_size().width(); |
+ format.fmt.pix.height = job_record->frame->coded_size().height(); |
+ format.fmt.pix.sizeimage = frame_size; |
+ format.fmt.pix.pixelformat = output_format_fourcc_; |
+ format.fmt.pix.field = V4L2_FIELD_ANY; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); |
+ |
+ struct v4l2_requestbuffers reqbufs; |
+ memset(&reqbufs, 0, sizeof(reqbufs)); |
+ reqbufs.count = kOutputBufferCount; |
+ reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
+ reqbufs.memory = V4L2_MEMORY_MMAP; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); |
+ |
+ DCHECK(output_buffer_map_.empty()); |
+ output_buffer_map_.resize(reqbufs.count); |
+ |
+ for (size_t i = 0; i < output_buffer_map_.size(); ++i) { |
+ free_output_buffers_.push_back(i); |
+ |
+ struct v4l2_buffer buffer; |
+ memset(&buffer, 0, sizeof(buffer)); |
+ buffer.index = i; |
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
+ buffer.memory = V4L2_MEMORY_MMAP; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); |
+ void* address = device_->Mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, |
+ MAP_SHARED, buffer.m.offset); |
+ if (address == MAP_FAILED) { |
+ PLOG(ERROR) << "CreateOutputBuffers(): mmap() failed"; |
+ return false; |
+ } |
+ output_buffer_map_[i].address = address; |
+ output_buffer_map_[i].length = buffer.length; |
+ } |
+ |
+ TIME_LEAVE(); |
+ return true; |
+} |
+ |
+void V4L2JpegDecodeAccelerator::DestroyInputBuffers() { |
+ TIME_ENTER(); |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ DCHECK(!input_streamon_); |
+ |
+ for (size_t buf = 0; buf < input_buffer_map_.size(); ++buf) { |
+ InputRecord& input_record = input_buffer_map_[buf]; |
+ device_->Munmap(input_record.address, input_record.length); |
+ } |
+ |
+ struct v4l2_requestbuffers reqbufs; |
+ memset(&reqbufs, 0, sizeof(reqbufs)); |
+ reqbufs.count = 0; |
+ reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
+ reqbufs.memory = V4L2_MEMORY_MMAP; |
+ IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); |
+ |
+ input_buffer_map_.clear(); |
+ free_input_buffers_.clear(); |
+ TIME_LEAVE(); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::DestroyOutputBuffers() { |
+ TIME_ENTER(); |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ DCHECK(!output_streamon_); |
+ |
+ for (size_t buf = 0; buf < output_buffer_map_.size(); ++buf) { |
+ OutputRecord& output_record = output_buffer_map_[buf]; |
+ device_->Munmap(output_record.address, output_record.length); |
+ } |
+ |
+ struct v4l2_requestbuffers reqbufs; |
+ memset(&reqbufs, 0, sizeof(reqbufs)); |
+ reqbufs.count = 0; |
+ reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
+ reqbufs.memory = V4L2_MEMORY_MMAP; |
+ IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); |
+ |
+ output_buffer_map_.clear(); |
+ free_output_buffers_.clear(); |
+ TIME_LEAVE(); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::DevicePollTask() { |
+ DCHECK(device_poll_task_runner_->BelongsToCurrentThread()); |
+ |
+ bool event_pending; |
+ TIME_ENTER(); |
+ if (!device_->Poll(true, &event_pending)) { |
+ NotifyError(-1, media::JpegDecodeAccelerator::PLATFORM_FAILURE); |
+ return; |
+ } |
+ TIME_LEAVE(); |
+ |
+ // All processing should happen on ServiceDeviceTask(), since we shouldn't |
+ // touch encoder state from this thread. |
+ decoder_task_runner_->PostTask( |
+ FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::ServiceDeviceTask, |
+ base::Unretained(this))); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::ServiceDeviceTask() { |
+ TIME_ENTER(); |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), |
+ // so either: |
+ // * device_poll_thread_ is running normally |
+ // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down, |
+ // in which case we should early-out. |
+ if (!device_poll_thread_.IsRunning()) |
+ return; |
+ |
+ Dequeue(); |
+ Enqueue(); |
+ |
+ if (!device_->ClearDevicePollInterrupt()) { |
+ return; |
+ } |
+ |
+ device_poll_task_runner_->PostTask( |
+ FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DevicePollTask, |
+ base::Unretained(this))); |
+ |
+ DVLOG(2) << __func__ << ": buffer counts: INPUT[" |
+ << input_queue_.size() << "] => DEVICE[" |
+ << free_input_buffers_.size() << "+" |
+ << input_buffer_queued_count_ << "/" |
+ << input_buffer_map_.size() << "->" |
+ << free_output_buffers_.size() << "+" |
+ << output_buffer_queued_count_ << "/" |
+ << output_buffer_map_.size() << "] => CLIENT[" |
+ << output_buffer_map_.size() - output_buffer_queued_count_ - |
+ free_output_buffers_.size() << "]"; |
+ TIME_LEAVE(); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::Enqueue() { |
+ TIME_ENTER(); |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ |
+ const int old_inputs_queued = input_buffer_queued_count_; |
+ while (!input_queue_.empty() && !free_input_buffers_.empty()) { |
+ if (!EnqueueInputRecord()) |
+ return; |
+ } |
+ if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) { |
+ // Start VIDIOC_STREAMON if we haven't yet. |
+ if (!input_streamon_) { |
+ __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
+ IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); |
+ input_streamon_ = true; |
+ } |
+ } |
+ |
+ const int old_outputs_queued = output_buffer_queued_count_; |
+ while (output_buffer_queued_count_ < input_buffer_queued_count_ && |
+ !free_output_buffers_.empty()) { |
+ if (!EnqueueOutputRecord()) |
+ return; |
+ } |
+ if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) { |
+ // Start VIDIOC_STREAMON if we haven't yet. |
+ if (!output_streamon_) { |
+ __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
+ IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); |
+ output_streamon_ = true; |
+ } |
+ } |
+ TIME_LEAVE(); |
+} |
+ |
+void V4L2JpegDecodeAccelerator::Dequeue() { |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ |
+ // Dequeue completed input (VIDEO_OUTPUT) buffers, |
+ // and recycle to the free list. |
+ struct v4l2_buffer dqbuf; |
+ while (input_buffer_queued_count_ > 0) { |
+ DCHECK(input_streamon_); |
+ memset(&dqbuf, 0, sizeof(dqbuf)); |
+ dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
+ dqbuf.memory = V4L2_MEMORY_MMAP; |
+ if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { |
+ if (errno == EAGAIN) { |
+ // EAGAIN if we're just out of buffers to dequeue. |
+ break; |
+ } |
+ PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; |
+ NotifyError(dqbuf.index, media::JpegDecodeAccelerator::PLATFORM_FAILURE); |
+ return; |
+ } |
+ InputRecord& input_record = input_buffer_map_[dqbuf.index]; |
+ DCHECK(input_record.at_device); |
+ input_record.at_device = false; |
+ free_input_buffers_.push_back(dqbuf.index); |
+ input_buffer_queued_count_--; |
+ } |
+ |
+ // Dequeue completed output (VIDEO_CAPTURE) buffers, recycle to the free list. |
+ // Return the finished buffer to the client via the job ready callback. |
+ while (output_buffer_queued_count_ > 0) { |
+ DCHECK(output_streamon_); |
+ memset(&dqbuf, 0, sizeof(dqbuf)); |
+ dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
+ dqbuf.memory = V4L2_MEMORY_MMAP; |
+ if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { |
+ if (errno == EAGAIN) { |
+ // EAGAIN if we're just out of buffers to dequeue. |
+ break; |
+ } |
+ PLOG(ERROR) << "ioctl() failed: VIDIOC_DQBUF"; |
+ NotifyError(dqbuf.index, media::JpegDecodeAccelerator::PLATFORM_FAILURE); |
+ return; |
+ } |
+ OutputRecord& output_record = output_buffer_map_[dqbuf.index]; |
+ DCHECK(output_record.at_device); |
+ output_record.at_device = false; |
+ free_output_buffers_.push_back(dqbuf.index); |
+ output_buffer_queued_count_--; |
+ |
+ // Jobs are always processed in FIFO order. |
+ DCHECK(!running_jobs_.empty()); |
+ linked_ptr<JobRecord> job_record = running_jobs_.front(); |
+ running_jobs_.pop(); |
+ |
+ TIME_ENTER(); |
+ memcpy(job_record->frame->data(media::VideoFrame::kYPlane), |
+ output_record.address, output_record.length); |
+ |
+ DVLOG(3) << "Processing finished, returning frame, ts=" |
+ << job_record->frame->timestamp().InMilliseconds(); |
+ |
+ TIME_LEAVE(); |
+ DCHECK(client_); |
+ client_->VideoFrameReady(job_record->bitstream_buffer.id()); |
+ if (reset_buffer_flag_) |
+ ResetBuffers(); |
+ } |
+} |
+ |
+bool V4L2JpegDecodeAccelerator::EnqueueInputRecord() { |
+ TIME_ENTER(); |
+ DCHECK(!input_queue_.empty()); |
+ DCHECK(!free_input_buffers_.empty()); |
+ |
+ // Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame. |
+ linked_ptr<JobRecord> job_record = input_queue_.front(); |
+ input_queue_.pop(); |
+ const int index = free_input_buffers_.back(); |
+ InputRecord& input_record = input_buffer_map_[index]; |
+ DCHECK(!input_record.at_device); |
+ |
+ scoped_ptr<base::SharedMemory> shm( |
+ new base::SharedMemory(job_record->bitstream_buffer.handle(), true)); |
+ if (!shm->Map(job_record->bitstream_buffer.size())) { |
+ LOG(ERROR) << "Decode(): could not map bitstream_buffer"; |
+ NotifyError(job_record->bitstream_buffer.id(), UNREADABLE_INPUT); |
+ return false; |
+ } |
+ struct v4l2_buffer qbuf; |
+ memset(&qbuf, 0, sizeof(qbuf)); |
+ memcpy(input_record.address, shm->memory(), |
+ job_record->bitstream_buffer.size()); |
+ qbuf.index = index; |
+ qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
+ qbuf.memory = V4L2_MEMORY_MMAP; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); |
+ input_record.at_device = true; |
+ running_jobs_.push(job_record); |
+ free_input_buffers_.pop_back(); |
+ input_buffer_queued_count_++; |
+ |
+ DVLOG(3) << __func__ << ": enqueued frame ts=" |
+ << job_record->frame->timestamp().InMilliseconds() << " to device."; |
+ |
+ TIME_LEAVE(); |
+ return true; |
+} |
+ |
+bool V4L2JpegDecodeAccelerator::EnqueueOutputRecord() { |
+ TIME_ENTER(); |
+ DCHECK(!free_output_buffers_.empty()); |
+ |
+ // Enqueue an output (VIDEO_CAPTURE) buffer. |
+ const int index = free_output_buffers_.back(); |
+ OutputRecord& output_record = output_buffer_map_[index]; |
+ DCHECK(!output_record.at_device); |
+ struct v4l2_buffer qbuf; |
+ memset(&qbuf, 0, sizeof(qbuf)); |
+ qbuf.index = index; |
+ qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
+ qbuf.memory = V4L2_MEMORY_MMAP; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); |
+ output_record.at_device = true; |
+ free_output_buffers_.pop_back(); |
+ output_buffer_queued_count_++; |
+ TIME_LEAVE(); |
+ return true; |
+} |
+ |
+void V4L2JpegDecodeAccelerator::StartDevicePoll() { |
+ TIME_ENTER(); |
+ DVLOG(3) << __func__ << ": starting device poll"; |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ DCHECK(!device_poll_thread_.IsRunning()); |
+ |
+ // Start up the device poll thread and schedule its first DevicePollTask(). |
+ if (!device_poll_thread_.Start()) { |
+ LOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; |
+ NotifyError(-1, media::JpegDecodeAccelerator::PLATFORM_FAILURE); |
+ return; |
+ } |
+ device_poll_task_runner_ = device_poll_thread_.task_runner(); |
+ |
+ // Start a poll task and will wait only for the poll interrupt. |
kcwu
2015/06/09 12:17:44
We always poll with poll_device=true now, so it no
henryhsu
2015/06/12 05:48:00
Removed this from here
|
+ device_poll_task_runner_->PostTask( |
+ FROM_HERE, base::Bind(&V4L2JpegDecodeAccelerator::DevicePollTask, |
+ base::Unretained(this))); |
+ |
+ TIME_LEAVE(); |
+} |
+ |
+bool V4L2JpegDecodeAccelerator::StopDevicePoll(bool keep_input_queue) { |
+ TIME_ENTER(); |
+ DVLOG(3) << __func__ << ": stopping device poll"; |
+ if (decoder_thread_.IsRunning()) |
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread()); |
+ |
+ // Signal the DevicePollTask() to stop, and stop the device poll thread. |
+ if (!device_->SetDevicePollInterrupt()) |
+ return false; |
+ |
+ device_poll_thread_.Stop(); |
+ |
+ // Clear the interrupt now, to be sure. |
+ if (!device_->ClearDevicePollInterrupt()) |
+ return false; |
+ |
+ if (input_streamon_) { |
+ __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); |
+ } |
+ input_streamon_ = false; |
+ |
+ if (output_streamon_) { |
+ __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); |
+ } |
+ output_streamon_ = false; |
+ |
+ // Reset all our accounting info. |
+ if (!keep_input_queue) { |
+ while (!input_queue_.empty()) |
+ input_queue_.pop(); |
+ } |
+ |
+ while (!running_jobs_.empty()) |
+ running_jobs_.pop(); |
+ |
+ free_input_buffers_.clear(); |
+ for (size_t i = 0; i < input_buffer_map_.size(); ++i) { |
+ InputRecord& input_record = input_buffer_map_[i]; |
+ input_record.at_device = false; |
+ free_input_buffers_.push_back(i); |
+ } |
+ input_buffer_queued_count_ = 0; |
+ |
+ free_output_buffers_.clear(); |
+ for (size_t i = 0; i < output_buffer_map_.size(); ++i) { |
+ OutputRecord& output_record = output_buffer_map_[i]; |
+ output_record.at_device = false; |
+ free_output_buffers_.push_back(i); |
+ } |
+ output_buffer_queued_count_ = 0; |
+ |
+ TIME_LEAVE(); |
+ return true; |
+} |
+ |
+} // namespace content |