Chromium Code Reviews| Index: media/video/capture/linux/v4l2_video_capture_delegate.cc |
| diff --git a/media/video/capture/linux/v4l2_video_capture_delegate.cc b/media/video/capture/linux/v4l2_video_capture_delegate.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..3ba1113d338cb6e69dddcb65bee4a97d58c68bdb |
| --- /dev/null |
| +++ b/media/video/capture/linux/v4l2_video_capture_delegate.cc |
| @@ -0,0 +1,605 @@ |
| +// Copyright 2015 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include "media/video/capture/linux/v4l2_video_capture_delegate.h" |
| + |
| +#include <poll.h> |
| +#include <sys/fcntl.h> |
| +#include <sys/ioctl.h> |
| +#include <sys/mman.h> |
| + |
| +#include "base/bind.h" |
| +#include "base/files/file_enumerator.h" |
| +#include "base/posix/eintr_wrapper.h" |
| +#include "base/strings/stringprintf.h" |
| +#include "media/base/bind_to_current_loop.h" |
| +#include "media/video/capture/linux/video_capture_device_linux.h" |
| + |
| +namespace media { |
| + |
| +// Max number of video buffers VideoCaptureDeviceLinux can allocate. |
| +const uint32_t kMaxVideoBuffers = 2; |
| +// Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. |
| +const int kCaptureTimeoutMs = 200; |
| +// The number of continuous timeouts tolerated before treated as error. |
| +const int kContinuousTimeoutLimit = 10; |
| +// MJPEG is preferred if the requested width or height is larger than this. |
| +const int kMjpegWidth = 640; |
| +const int kMjpegHeight = 480; |
| +// Typical framerate, in fps |
| +const int kTypicalFramerate = 30; |
| + |
| +// V4L2 color formats supported by V4L2CaptureDelegateSinglePlane. This list is |
| +// ordered by precedence of use. |
| +static const uint32_t kSinglePlaneSupportedFormats[] = { |
| + V4L2_PIX_FMT_YUV420, |
| + V4L2_PIX_FMT_YUYV, |
| + V4L2_PIX_FMT_UYVY, |
| + // JPEG works as MJPEG on some gspca webcams from field reports. |
| + V4L2_PIX_FMT_JPEG}; |
| + |
| +// List of supported formats and their respective amount of sub-buffers for |
| +// V4L2CaptureDelegateMultiPlane. |
| +static const struct { |
| + uint32_t fourcc; |
| + size_t num_planes; |
| +} kMultiPlaneSupportedFormats[] = { |
| + {V4L2_PIX_FMT_YUV420M, 3} |
| + // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. |
| +}; |
| + |
| +// Returns the input fourcc as a std::string four char representation. |
| +static std::string FourccToString(uint32_t fourcc) { |
| + return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, |
| + (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); |
| +} |
| + |
| +static std::list<uint32_t> GetListOfUsableFourCcsSinglePlane() { |
| + return std::list<uint32_t>( |
| + kSinglePlaneSupportedFormats, |
| + kSinglePlaneSupportedFormats + arraysize(kSinglePlaneSupportedFormats)); |
| +} |
| + |
| +static size_t GetNumPlanesForFourCc(uint32_t fourcc) { |
| + for (const auto& fourcc_and_pixel_format : kMultiPlaneSupportedFormats) { |
| + if (fourcc_and_pixel_format.fourcc == fourcc) |
| + return fourcc_and_pixel_format.num_planes; |
| + } |
| + NOTREACHED() << "Unknown fourcc " << FourccToString(fourcc); |
| + return 0; |
| +} |
| + |
| +static std::list<uint32_t> GetListOfUsableFourCcsMultiPlane() { |
| + std::list<uint32_t> supported_formats; |
| + for (const auto& i : kMultiPlaneSupportedFormats) |
| + supported_formats.push_back(i.fourcc); |
| + return supported_formats; |
| +} |
| + |
| +// V4L2 specifics for SPLANE API. |
| +class V4L2CaptureDelegateSinglePlane final : public V4L2VideoCaptureDelegate { |
| + public: |
| + V4L2CaptureDelegateSinglePlane( |
| + const VideoCaptureDevice::Name& device_name, |
| + const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
| + int power_line_frequency) |
| + : V4L2VideoCaptureDelegate(device_name, |
| + v4l2_task_runner, |
| + power_line_frequency) {} |
| + |
| + private: |
| + // BufferTracker derivation to implement construction semantics for SPLANE. |
| + class BufferTrackerSPlane final : public BufferTracker { |
| + public: |
| + BufferTrackerSPlane(int fd, const v4l2_buffer& buffer); |
|
Pawel Osciak
2015/03/06 10:43:54
We need Initialize() methods as I mention elsewher
mcasas
2015/03/09 21:23:56
Done.
|
| + |
| + private: |
| + ~BufferTrackerSPlane() override {}; |
| + }; |
| + |
| + ~V4L2CaptureDelegateSinglePlane() override {}; |
| + |
| + // V4L2VideoCaptureDelegate virtual methods implementation. |
| + scoped_refptr<BufferTracker> CreateBufferTracker( |
| + int fd, |
| + const v4l2_buffer& buffer) override; |
| + void FillV4L2Format(v4l2_format* format, |
| + uint32_t width, |
| + uint32_t height, |
| + uint32_t pixelformat_fourcc) override; |
| + void FinishFillingV4L2Buffer(v4l2_buffer* buffer) override {} |
| + void SendBuffer(const v4l2_buffer& buffer) override; |
| +}; |
| + |
| +// V4L2 specifics for MPLANE API. |
| +class V4L2CaptureDelegateMultiPlane final : public V4L2VideoCaptureDelegate { |
| + public: |
| + V4L2CaptureDelegateMultiPlane( |
| + const VideoCaptureDevice::Name& device_name, |
| + const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
| + int power_line_frequency) |
| + : V4L2VideoCaptureDelegate(device_name, |
| + v4l2_task_runner, |
| + power_line_frequency), |
| + fourcc_(0), |
| + num_planes_(0) {} |
| + |
| + private: |
| + // BufferTracker derivation to implement construction semantics for MPLANE. |
| + class BufferTrackerMPlane final : public BufferTracker { |
| + public: |
| + BufferTrackerMPlane(int fd, const v4l2_buffer& buffer); |
| + |
| + private: |
| + ~BufferTrackerMPlane() override {}; |
| + }; |
| + |
| + ~V4L2CaptureDelegateMultiPlane() override {}; |
| + |
| + // V4L2VideoCaptureDelegate virtual methods implementation. |
| + scoped_refptr<BufferTracker> CreateBufferTracker( |
| + int fd, |
| + const v4l2_buffer& buffer) override; |
| + void FillV4L2Format(v4l2_format* format, |
| + uint32_t width, |
| + uint32_t height, |
| + uint32_t pixelformat_fourcc) override; |
| + void FinishFillingV4L2Buffer(v4l2_buffer* buffer) override; |
| + void SendBuffer(const v4l2_buffer& buffer) override; |
| + |
| + // Actual pixel format and number of planes, known after FillV4L2Format(). |
| + uint32_t fourcc_; |
| + size_t num_planes_; |
| + |
| + // Scoped_ptr to allocate and track as many v4l2_plane structs as planes, |
| + // needed inside v4l2_buffer. |
| + scoped_ptr<struct v4l2_plane[]> v4l2_plane_; |
| +}; |
| + |
| +// static |
| +scoped_refptr<V4L2VideoCaptureDelegate> |
| +V4L2VideoCaptureDelegate::CreateV4L2VideoCaptureDelegate( |
| + const VideoCaptureDevice::Name& device_name, |
| + const VideoPixelFormat pixel_format, |
| + const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
| + int power_line_frequency) { |
| + switch (device_name.capture_api_type()) { |
| + case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE: |
| + return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane( |
| + device_name, v4l2_task_runner, power_line_frequency)); |
| + case VideoCaptureDevice::Name::V4L2_MULTI_PLANE: |
| + if (pixel_format != PIXEL_FORMAT_I420) |
|
Pawel Osciak
2015/03/06 10:43:54
This may bitrot. Please instead look up pixel_form
mcasas
2015/03/09 21:23:56
I think this early-bail-out is not really needed
a
Pawel Osciak
2015/03/13 09:52:52
Acknowledged.
|
| + return scoped_refptr<V4L2VideoCaptureDelegate>(); |
| + return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane( |
| + device_name, v4l2_task_runner, power_line_frequency)); |
| + default: |
| + NOTIMPLEMENTED() << "Unknown V4L2 capture API type"; |
| + return scoped_refptr<V4L2VideoCaptureDelegate>(); |
| + } |
| +} |
| + |
| +// static |
| +VideoPixelFormat V4L2VideoCaptureDelegate::V4l2FourCcToChromiumPixelFormat( |
| + uint32_t v4l2_fourcc) { |
| + const struct { |
| + uint32_t fourcc; |
| + VideoPixelFormat pixel_format; |
| + } kFourCcAndChromiumPixelFormats[] = { |
| + {V4L2_PIX_FMT_YUV420M, PIXEL_FORMAT_I420}, |
| + {V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420}, |
| + {V4L2_PIX_FMT_YUYV, PIXEL_FORMAT_YUY2}, |
| + {V4L2_PIX_FMT_UYVY, PIXEL_FORMAT_UYVY}, |
| + {V4L2_PIX_FMT_MJPEG, PIXEL_FORMAT_MJPEG}, |
| + {V4L2_PIX_FMT_JPEG, PIXEL_FORMAT_MJPEG}, |
| + }; |
| + for (const auto& fourcc_and_pixel_format : kFourCcAndChromiumPixelFormats) { |
| + if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) |
| + return fourcc_and_pixel_format.pixel_format; |
| + } |
| + DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc); |
| + return PIXEL_FORMAT_UNKNOWN; |
| +} |
| + |
| +// static |
| +std::list<uint32_t> V4L2VideoCaptureDelegate::GetListOfUsableFourCss( |
|
emircan
2015/03/04 02:47:47
/s/FourCss/FourCcs/
mcasas
2015/03/09 21:23:56
Done.
|
| + bool prefer_mjpeg) { |
| + std::list<uint32_t> singleplane_formats = GetListOfUsableFourCcsSinglePlane(); |
| + std::list<uint32_t> multiplane_formats = GetListOfUsableFourCcsMultiPlane(); |
| + multiplane_formats.insert(multiplane_formats.end(), |
| + singleplane_formats.begin(), |
| + singleplane_formats.end()); |
| + // Add MJPEG to the front or the back of the list depending on |prefer_mjpeg|. |
| + multiplane_formats.insert( |
|
Pawel Osciak
2015/03/06 10:43:54
if (prefer_mjpeg)
multiplane_formats.push_front(
mcasas
2015/03/09 21:23:56
Done.
|
| + (prefer_mjpeg ? multiplane_formats.begin() : multiplane_formats.end()), |
| + V4L2_PIX_FMT_MJPEG); |
|
Pawel Osciak
2015/03/06 10:43:55
This is different from existing code, which always
mcasas
2015/03/09 21:23:56
Done.
|
| + return multiplane_formats; |
| +} |
| + |
| +V4L2VideoCaptureDelegate::BufferTracker::BufferTracker( |
|
Pawel Osciak
2015/03/06 10:43:55
This is not needed?
mcasas
2015/03/09 21:23:56
Needed due to:
[chromium-style] Complex class/str
|
| + int fd, |
| + const v4l2_buffer& buffer) { |
| +} |
| + |
| +V4L2VideoCaptureDelegate::BufferTracker::~BufferTracker() { |
| + for (const auto& plane : planes_) { |
| + if (plane->start == NULL) |
| + continue; |
| + const int result = munmap(plane->start, plane->length); |
| + PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; |
| + } |
| +} |
| + |
| +V4L2VideoCaptureDelegate::V4L2VideoCaptureDelegate( |
| + const VideoCaptureDevice::Name& device_name, |
| + const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, |
| + int power_line_frequency) |
| + : capture_type_((device_name.capture_api_type() == |
| + VideoCaptureDevice::Name::V4L2_SINGLE_PLANE) |
| + ? V4L2_BUF_TYPE_VIDEO_CAPTURE |
| + : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE), |
| + v4l2_task_runner_(v4l2_task_runner), |
| + device_name_(device_name), |
| + power_line_frequency_(power_line_frequency), |
| + is_capturing_(false), |
| + timeout_count_(0), |
| + rotation_(0) { |
| +} |
| + |
| +void V4L2VideoCaptureDelegate::AllocateAndStart( |
| + int width, |
| + int height, |
| + float frame_rate, |
| + scoped_ptr<VideoCaptureDevice::Client> client) { |
| + DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| + DCHECK(client); |
| + client_ = client.Pass(); |
| + |
| + // Need to open camera with O_RDWR after Linux kernel 3.3. |
| + device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); |
| + if (!device_fd_.is_valid()) { |
| + SetErrorState("Failed to open V4L2 device driver file."); |
| + return; |
| + } |
| + |
| + v4l2_capability cap = {}; |
| + if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && |
| + ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE || |
| + cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && |
| + !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) && |
| + !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) { |
| + device_fd_.reset(); |
| + SetErrorState("This is not a V4L2 video capture device"); |
| + return; |
| + } |
| + |
| + // Get supported video formats in preferred order. |
| + // For large resolutions, favour mjpeg over raw formats. |
| + const std::list<uint32_t>& desired_v4l2_formats = |
| + GetListOfUsableFourCss(width > kMjpegWidth || height > kMjpegHeight); |
| + std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); |
| + |
| + v4l2_fmtdesc fmtdesc = {}; |
| + fmtdesc.type = capture_type_; |
| + for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; |
| + ++fmtdesc.index) { |
| + best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); |
|
Pawel Osciak
2015/03/06 10:43:55
Maybe I'm missing something, but I think this will
mcasas
2015/03/09 21:23:56
According to the fact that it works and the list
o
Pawel Osciak
2015/03/13 09:52:52
Acknowledged and true. Sorry.
|
| + } |
|
emircan
2015/03/04 02:47:47
You can end the iteration when (best == desired_v4
mcasas
2015/03/09 21:23:56
That would be an overoptimization, wouldn't it?
M
|
| + if (best == desired_v4l2_formats.end()) { |
| + SetErrorState("Failed to find a supported camera format."); |
| + return; |
| + } |
| + |
| + DVLOG(1) << "chosen pixel format is " << FourccToString(*best); |
| + |
| + v4l2_format video_fmt = {}; |
| + video_fmt.type = capture_type_; |
| + FillV4L2Format(&video_fmt, width, height, *best); |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt)) < 0) { |
| + SetErrorState("Failed to set video capture format"); |
| + return; |
| + } |
| + |
| + // Set capture framerate in the form of capture interval. |
| + v4l2_streamparm streamparm = {}; |
| + streamparm.type = capture_type_; |
| + // The following line checks that the driver knows about framerate get/set. |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { |
| + // Now check if the device is able to accept a capture framerate set. |
| + if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { |
| + // |frame_rate| is float, approximate by a fraction. |
| + streamparm.parm.capture.timeperframe.numerator = |
| + media::kFrameRatePrecision; |
| + streamparm.parm.capture.timeperframe.denominator = |
| + (frame_rate) ? (frame_rate * media::kFrameRatePrecision) |
| + : (kTypicalFramerate * media::kFrameRatePrecision); |
| + |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < |
| + 0) { |
| + SetErrorState("Failed to set camera framerate"); |
| + return; |
| + } |
| + DVLOG(2) << "Actual camera driverframerate: " |
| + << streamparm.parm.capture.timeperframe.denominator << "/" |
| + << streamparm.parm.capture.timeperframe.numerator; |
| + } |
| + } |
| + // TODO(mcasas): what should be done if the camera driver does not allow |
| + // framerate configuration, or the actual one is different from the desired? |
| + |
| + // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported |
| + // operation (|errno| == EINVAL in this case) or plain failure. |
| + if ((power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_50HZ) || |
| + (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_60HZ)) { |
| + struct v4l2_control control = {}; |
| + control.id = V4L2_CID_POWER_LINE_FREQUENCY; |
| + control.value = power_line_frequency_; |
| + HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); |
| + } |
| + |
| + capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width, |
| + video_fmt.fmt.pix.height); |
| + capture_format_.frame_rate = frame_rate; |
| + capture_format_.pixel_format = |
| + V4l2FourCcToChromiumPixelFormat(video_fmt.fmt.pix.pixelformat); |
|
Pawel Osciak
2015/03/06 10:43:55
This needs to be checked for success, we don't wan
mcasas
2015/03/09 21:23:56
Done, but moved a bit farther above.
|
| + |
| + if (!AllocateVideoBuffers()) { |
| + SetErrorState("Allocate buffer failed"); |
| + return; |
| + } |
| + |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) |
| + < 0) { |
| + SetErrorState("VIDIOC_STREAMON failed"); |
| + return; |
| + } |
| + |
| + is_capturing_ = true; |
| + // Post task to start fetching frames from v4l2. |
| + v4l2_task_runner_->PostTask( |
| + FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); |
| +} |
| + |
| +void V4L2VideoCaptureDelegate::StopAndDeAllocate() { |
| + DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| + // The order is important: stop streaming, clear |buffer_pool_|, |
| + // thus munmap()ing the v4l2_buffers, and then return them to the OS. |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) |
| + < 0) { |
| + SetErrorState("VIDIOC_STREAMOFF failed"); |
| + return; |
| + } |
| + |
| + buffer_tracker_pool_.clear(); |
| + |
| + v4l2_requestbuffers r_buffer = {}; |
| + r_buffer.type = capture_type_; |
| + r_buffer.memory = V4L2_MEMORY_MMAP; |
| + r_buffer.count = 0; |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) |
| + SetErrorState("Failed to VIDIOC_REQBUFS with count = 0"); |
| + |
| + // At this point we can close the device. |
| + // This is also needed for correctly changing settings later via VIDIOC_S_FMT. |
| + device_fd_.reset(); |
| + is_capturing_ = false; |
| + client_.reset(); |
| +} |
| + |
| +void V4L2VideoCaptureDelegate::SetRotation(int rotation) { |
| + DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| + DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); |
| + rotation_ = rotation; |
| +} |
| + |
| +bool V4L2VideoCaptureDelegate::AllocateVideoBuffers() { |
| + v4l2_requestbuffers r_buffer = {}; |
| + r_buffer.type = capture_type_; |
| + r_buffer.memory = V4L2_MEMORY_MMAP; |
| + r_buffer.count = kMaxVideoBuffers; |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { |
| + DLOG(ERROR) << "Error requesting MMAP buffers from V4L2"; |
| + return false; |
| + } |
| + DCHECK_EQ(r_buffer.count, kMaxVideoBuffers); |
| + r_buffer.count = std::min(r_buffer.count, kMaxVideoBuffers); |
| + for (unsigned int i = 0; i < r_buffer.count; ++i) { |
| + v4l2_buffer buffer = {}; |
| + buffer.type = capture_type_; |
| + buffer.memory = V4L2_MEMORY_MMAP; |
| + buffer.index = i; |
| + FinishFillingV4L2Buffer(&buffer); |
| + |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { |
| + DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; |
| + return false; |
| + } |
|
Pawel Osciak
2015/03/06 10:43:54
As described in h, this all could go to CreateBuff
mcasas
2015/03/09 21:23:56
Acknowledged.
|
| + |
| + buffer_tracker_pool_.push_back(CreateBufferTracker(device_fd_.get(), |
| + buffer)); |
| + |
| + // Enqueue the buffer in the drivers incoming queue. |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { |
| + DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; |
| + return false; |
| + } |
| + } |
| + return true; |
| +} |
| + |
| +void V4L2VideoCaptureDelegate::DoCapture() { |
| + DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| + if (!is_capturing_) |
| + return; |
| + |
| + pollfd device_pfd = {}; |
| + device_pfd.fd = device_fd_.get(); |
| + device_pfd.events = POLLIN; |
| + const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); |
| + if (result < 0) { |
| + SetErrorState("Poll failed"); |
| + return; |
| + } |
| + // Check if poll() timed out; track the amount of times it did in a row and |
| + // throw an error if it times out too many times. |
| + if (result == 0) { |
| + timeout_count_++; |
| + if (timeout_count_ >= kContinuousTimeoutLimit) { |
| + SetErrorState("Multiple continuous timeouts while read-polling."); |
| + timeout_count_ = 0; |
| + return; |
| + } |
| + } else { |
| + timeout_count_ = 0; |
| + } |
| + |
| + // Deenqueue, send and reenqueue a buffer if the driver has filled one in. |
| + if (device_pfd.revents & POLLIN) { |
| + v4l2_buffer buffer = {}; |
| + buffer.type = capture_type_; |
| + buffer.memory = V4L2_MEMORY_MMAP; |
| + buffer.index = 0; |
| + FinishFillingV4L2Buffer(&buffer); |
|
Pawel Osciak
2015/03/06 10:43:54
This would just be FillV4L2Buffer().
mcasas
2015/03/09 21:23:56
Done.
|
| + |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { |
| + SetErrorState("Failed to dequeue capture buffer"); |
| + return; |
| + } |
| + |
| + SendBuffer(buffer); |
| + |
| + if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { |
| + SetErrorState("Failed to enqueue capture buffer"); |
| + return; |
| + } |
| + } |
| + |
| + v4l2_task_runner_->PostTask( |
| + FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); |
| +} |
| + |
| +void V4L2VideoCaptureDelegate::SetErrorState(const std::string& reason) { |
| + DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); |
| + is_capturing_ = false; |
| + client_->OnError(reason); |
| +} |
| + |
| +V4L2VideoCaptureDelegate::~V4L2VideoCaptureDelegate() { |
| +} |
| + |
| +scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker> |
| +V4L2CaptureDelegateSinglePlane::CreateBufferTracker(int fd, |
| + const v4l2_buffer& buffer) { |
| + return make_scoped_refptr(new BufferTrackerSPlane(fd, buffer)); |
| +} |
| + |
| +void V4L2CaptureDelegateSinglePlane::FillV4L2Format( |
| + v4l2_format* format, |
| + uint32_t width, |
| + uint32_t height, |
| + uint32_t pixelformat_fourcc) { |
| + format->fmt.pix.width = width; |
| + format->fmt.pix.height = height; |
| + format->fmt.pix.pixelformat = pixelformat_fourcc; |
| +} |
| + |
| +void V4L2CaptureDelegateSinglePlane::SendBuffer(const v4l2_buffer& buffer) { |
| + BufferTrackerSPlane* const buffer_tracker = |
| + reinterpret_cast<BufferTrackerSPlane*>( |
| + buffer_tracker_pool()[buffer.index].get()); |
| + DCHECK_EQ(buffer_tracker->planes()[0]->length, buffer.length); |
| + client()->OnIncomingCapturedData( |
| + static_cast<uint8*>(buffer_tracker->planes()[0]->start), |
| + buffer_tracker->planes()[0]->length, |
| + capture_format(), |
| + rotation(), |
| + base::TimeTicks::Now()); |
| +} |
| + |
| +V4L2CaptureDelegateSinglePlane::BufferTrackerSPlane::BufferTrackerSPlane( |
| + int fd, |
| + const v4l2_buffer& buffer) |
| + : BufferTracker(fd, buffer) { |
| + scoped_ptr<Plane> plane(new Plane()); |
| + // Some devices require mmap() to be called with both READ and WRITE. |
| + // See http://crbug.com/178582. |
| + plane->start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, |
| + fd, buffer.m.offset); |
| + if (plane->start == MAP_FAILED) { |
| + DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; |
| + return; |
| + } |
| + plane->length = buffer.length; |
| + planes().push_back(plane.Pass()); |
| +} |
| + |
| +scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker> |
| +V4L2CaptureDelegateMultiPlane::CreateBufferTracker(int fd, |
| + const v4l2_buffer& buffer) { |
| + DCHECK_LE(buffer.length, num_planes_); |
| + return make_scoped_refptr(new BufferTrackerMPlane(fd, buffer)); |
| +} |
| + |
| +void V4L2CaptureDelegateMultiPlane::FillV4L2Format( |
| + v4l2_format* format, |
| + uint32_t width, |
| + uint32_t height, |
| + uint32_t pixelformat_fourcc) { |
| + format->fmt.pix_mp.width = width; |
| + format->fmt.pix_mp.height = height; |
| + |
| + fourcc_ = pixelformat_fourcc; |
| + format->fmt.pix_mp.pixelformat = fourcc_; |
| + |
| + num_planes_ = GetNumPlanesForFourCc(fourcc_); |
| + DCHECK_GT(num_planes_, 0u); |
| + DCHECK_LE(num_planes_, static_cast<unsigned long>(VIDEO_MAX_PLANES)); |
| + format->fmt.pix_mp.num_planes = num_planes_; |
| + |
| + v4l2_plane_.reset(new v4l2_plane[num_planes_]); |
|
Pawel Osciak
2015/03/06 10:43:55
We still don't want to call new foo[0] if GetNumPl
mcasas
2015/03/09 21:23:56
Done. I return a bool (false) if this goes south
a
|
| +} |
| + |
| +void V4L2CaptureDelegateMultiPlane::FinishFillingV4L2Buffer( |
| + v4l2_buffer* buffer) { |
| + buffer->length = num_planes_; |
| + buffer->m.planes = v4l2_plane_.get(); |
| +} |
| + |
| +void V4L2CaptureDelegateMultiPlane::SendBuffer(const v4l2_buffer& buffer) { |
| + DCHECK_EQ(capture_format().pixel_format, PIXEL_FORMAT_I420); |
| + |
| + BufferTrackerMPlane* const buffer_tracker = |
| + reinterpret_cast<BufferTrackerMPlane*>( |
| + buffer_tracker_pool()[buffer.index].get()); |
| + |
| + client()->OnIncomingCapturedYuvData( |
| + static_cast<uint8*>(buffer_tracker->planes()[0]->start), |
| + static_cast<uint8*>(buffer_tracker->planes()[1]->start), |
| + static_cast<uint8*>(buffer_tracker->planes()[2]->start), |
| + buffer_tracker->planes()[0]->length, |
| + buffer_tracker->planes()[1]->length, |
| + buffer_tracker->planes()[2]->length, |
| + capture_format(), |
| + rotation(), |
| + base::TimeTicks::Now()); |
| +} |
| + |
| +V4L2CaptureDelegateMultiPlane::BufferTrackerMPlane::BufferTrackerMPlane( |
| + int fd, |
| + const v4l2_buffer& buffer) |
| + : BufferTracker(fd, buffer) { |
| + for (size_t p = 0; p < buffer.length; ++p) { |
| + scoped_ptr<Plane> plane(new Plane()); |
| + plane->start = mmap(NULL, buffer.m.planes[p].length, PROT_READ | PROT_WRITE, |
| + MAP_SHARED, fd, buffer.m.planes[p].m.mem_offset); |
| + if (plane->start == MAP_FAILED) { |
| + DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; |
| + return; |
|
Pawel Osciak
2015/03/06 10:43:55
This way you'd still use the plane as normal if th
mcasas
2015/03/09 21:23:56
Done.
|
| + } |
| + plane->length = buffer.m.planes[p].length; |
| + DVLOG(3) << "Mmap()ed plane #" << p << ", length " << plane->length << "B"; |
| + planes().push_back(plane.Pass()); |
| + } |
| +} |
| + |
| +} // namespace media |