Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "media/video/capture/linux/v4l2_video_capture_delegate.h" | |
| 6 | |
| 7 #include <poll.h> | |
| 8 #include <sys/fcntl.h> | |
| 9 #include <sys/ioctl.h> | |
| 10 #include <sys/mman.h> | |
| 11 | |
| 12 #include "base/bind.h" | |
| 13 #include "base/files/file_enumerator.h" | |
| 14 #include "base/posix/eintr_wrapper.h" | |
| 15 #include "base/strings/stringprintf.h" | |
| 16 #include "media/base/bind_to_current_loop.h" | |
| 17 #include "media/video/capture/linux/video_capture_device_linux.h" | |
| 18 | |
| 19 namespace media { | |
| 20 | |
| 21 // Max number of video buffers VideoCaptureDeviceLinux can allocate. | |
| 22 const uint32_t kMaxVideoBuffers = 2; | |
| 23 // Timeout in milliseconds v4l2_thread_ blocks waiting for a frame from the hw. | |
| 24 const int kCaptureTimeoutMs = 200; | |
| 25 // The number of continuous timeouts tolerated before treated as error. | |
| 26 const int kContinuousTimeoutLimit = 10; | |
| 27 // MJPEG is preferred if the requested width or height is larger than this. | |
| 28 const int kMjpegWidth = 640; | |
| 29 const int kMjpegHeight = 480; | |
| 30 // Typical framerate, in fps | |
| 31 const int kTypicalFramerate = 30; | |
| 32 | |
| 33 // V4L2 color formats supported by V4L2CaptureDelegate{Single,Multi}Plane | |
| 34 // (depending on |num_planes|). This list is ordered by precedence of use. | |
|
Pawel Osciak
2015/03/17 11:05:25
This suggests a little bit that Splane is for num_
mcasas
2015/03/17 22:01:57
Rewrote it. I don't want to say too much in this
l
| |
| 35 // MJPEG format might be duplicated on top of in GetListOfUsableFourCcs(). | |
| 36 static const struct{ | |
| 37 uint32_t fourcc; | |
| 38 size_t num_planes; | |
| 39 VideoPixelFormat pixel_format; | |
| 40 } kSupportedFormatsAndPlanarity[] = { | |
| 41 {V4L2_PIX_FMT_YUV420, 1, PIXEL_FORMAT_I420}, | |
| 42 {V4L2_PIX_FMT_YUYV, 1, PIXEL_FORMAT_YUY2}, | |
| 43 {V4L2_PIX_FMT_UYVY, 1, PIXEL_FORMAT_UYVY}, | |
| 44 // TODO(mcasas): add V4L2_PIX_FMT_YVU420M when available in bots. | |
| 45 {V4L2_PIX_FMT_YUV420M, 3, PIXEL_FORMAT_I420}, | |
| 46 // MJPEG is usually sitting fairly low since we don't want to have to decode. | |
|
Pawel Osciak
2015/03/17 11:05:25
This is slightly misleading. We *have* to choose i
mcasas
2015/03/17 22:01:57
Done.
| |
| 47 {V4L2_PIX_FMT_MJPEG, 1, PIXEL_FORMAT_MJPEG}, | |
| 48 // JPEG works as MJPEG on some gspca webcams from field reports, see | |
| 49 // https://code.google.com/p/webrtc/issues/detail?id=529, put it as the least | |
| 50 // preferred format. | |
| 51 {V4L2_PIX_FMT_JPEG, 1, PIXEL_FORMAT_MJPEG}, | |
| 52 }; | |
| 53 | |
| 54 // Returns the input fourcc as a std::string four char representation. | |
| 55 static std::string FourccToString(uint32_t fourcc) { | |
| 56 return base::StringPrintf("%c%c%c%c", fourcc & 0xFF, (fourcc >> 8) & 0xFF, | |
| 57 (fourcc >> 16) & 0xFF, (fourcc >> 24) & 0xFF); | |
| 58 } | |
| 59 | |
| 60 static size_t GetNumPlanesForFourCc(uint32_t fourcc) { | |
| 61 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { | |
| 62 if (fourcc_and_pixel_format.fourcc == fourcc) | |
| 63 return fourcc_and_pixel_format.num_planes; | |
| 64 } | |
| 65 NOTREACHED() << "Unknown fourcc " << FourccToString(fourcc); | |
| 66 return 0; | |
| 67 } | |
| 68 | |
| 69 // Class keeping track of SPLANE/MPLANE V4L2 buffers, mmap()ed on construction | |
| 70 // and munmap()ed on destruction. Destruction is syntactically equal for | |
| 71 // S/MPLANE but not construction, so this is implemented in derived classes. | |
| 72 // Internally it has a vector of planes, which for SPLANE will contain only | |
| 73 // one element. | |
| 74 class V4L2VideoCaptureDelegate::BufferTracker | |
| 75 : public base::RefCounted<BufferTracker> { | |
| 76 public: | |
| 77 // Abstract method to mmap() given |fd| according to |buffer|, planarity | |
| 78 // specific. | |
| 79 virtual bool Init(int fd, const v4l2_buffer& buffer) = 0; | |
| 80 | |
| 81 uint8_t* const GetPlaneStart(size_t plane) const { | |
| 82 return static_cast<uint8_t* const>(planes_[plane].start); | |
| 83 } | |
| 84 size_t GetPlaneLength(size_t plane) const { return planes_[plane].length; } | |
| 85 | |
| 86 protected: | |
| 87 friend class base::RefCounted<BufferTracker>; | |
| 88 virtual ~BufferTracker(); | |
| 89 // Adds a given mmap()ed plane to internal tracking list. | |
|
Pawel Osciak
2015/03/17 11:05:25
s/internal tracking list/planes_/
mcasas
2015/03/17 22:01:57
Done.
| |
| 90 void AddMmapedPlane(void* const start, size_t length); | |
| 91 | |
| 92 private: | |
| 93 struct Plane { | |
| 94 void* start; | |
| 95 size_t length; | |
| 96 }; | |
| 97 std::vector<Plane> planes_; | |
| 98 }; | |
| 99 | |
| 100 // V4L2 specifics for SPLANE API. | |
| 101 class V4L2CaptureDelegateSinglePlane final : public V4L2VideoCaptureDelegate { | |
| 102 public: | |
| 103 V4L2CaptureDelegateSinglePlane( | |
| 104 const VideoCaptureDevice::Name& device_name, | |
| 105 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
| 106 int power_line_frequency) | |
| 107 : V4L2VideoCaptureDelegate(device_name, | |
| 108 v4l2_task_runner, | |
| 109 power_line_frequency) {} | |
| 110 | |
| 111 private: | |
| 112 // BufferTracker derivation to implement construction semantics for SPLANE. | |
| 113 class BufferTrackerSPlane final : public BufferTracker { | |
| 114 public: | |
| 115 bool Init(int fd, const v4l2_buffer& buffer) override; | |
| 116 | |
| 117 private: | |
| 118 ~BufferTrackerSPlane() override {} | |
| 119 }; | |
| 120 | |
| 121 ~V4L2CaptureDelegateSinglePlane() override {} | |
| 122 | |
| 123 // V4L2VideoCaptureDelegate virtual methods implementation. | |
| 124 scoped_refptr<BufferTracker> CreateBufferTracker() override; | |
| 125 bool FillV4L2Format(v4l2_format* format, | |
| 126 uint32_t width, | |
| 127 uint32_t height, | |
| 128 uint32_t pixelformat_fourcc) override; | |
| 129 void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const override; | |
| 130 void SendBuffer(const scoped_refptr<BufferTracker>& buffer_tracker) override; | |
| 131 }; | |
| 132 | |
| 133 // V4L2 specifics for MPLANE API. | |
| 134 class V4L2CaptureDelegateMultiPlane final : public V4L2VideoCaptureDelegate { | |
| 135 public: | |
| 136 V4L2CaptureDelegateMultiPlane( | |
| 137 const VideoCaptureDevice::Name& device_name, | |
| 138 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
| 139 int power_line_frequency) | |
| 140 : V4L2VideoCaptureDelegate(device_name, | |
| 141 v4l2_task_runner, | |
| 142 power_line_frequency), | |
| 143 num_v4l2_planes_(0) {} | |
| 144 | |
| 145 private: | |
| 146 // BufferTracker derivation to implement construction semantics for MPLANE. | |
| 147 class BufferTrackerMPlane final : public BufferTracker { | |
| 148 public: | |
| 149 bool Init(int fd, const v4l2_buffer& buffer) override; | |
| 150 | |
| 151 private: | |
| 152 ~BufferTrackerMPlane() override {} | |
| 153 }; | |
| 154 | |
| 155 ~V4L2CaptureDelegateMultiPlane() override {} | |
| 156 | |
| 157 // V4L2VideoCaptureDelegate virtual methods implementation. | |
| 158 scoped_refptr<BufferTracker> CreateBufferTracker() override; | |
| 159 bool FillV4L2Format(v4l2_format* format, | |
| 160 uint32_t width, | |
| 161 uint32_t height, | |
| 162 uint32_t pixelformat_fourcc) override; | |
| 163 void FinishFillingV4L2Buffer(v4l2_buffer* buffer) const override; | |
| 164 void SendBuffer(const scoped_refptr<BufferTracker>& buffer_tracker) override; | |
| 165 | |
| 166 // Scoped_ptr to allocate and track as many v4l2_plane structs as planes, | |
| 167 // needed inside v4l2_buffer. | |
| 168 scoped_ptr<struct v4l2_plane[]> v4l2_planes_; | |
|
Pawel Osciak
2015/03/17 11:05:25
This could be a std::vector<struct v4l2_plane> v4l
mcasas
2015/03/17 22:01:57
Done.
| |
| 169 size_t num_v4l2_planes_; | |
| 170 }; | |
| 171 | |
| 172 // static | |
| 173 scoped_refptr<V4L2VideoCaptureDelegate> | |
| 174 V4L2VideoCaptureDelegate::CreateV4L2VideoCaptureDelegate( | |
| 175 const VideoCaptureDevice::Name& device_name, | |
| 176 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
| 177 int power_line_frequency) { | |
| 178 switch (device_name.capture_api_type()) { | |
| 179 case VideoCaptureDevice::Name::V4L2_SINGLE_PLANE: | |
| 180 return make_scoped_refptr(new V4L2CaptureDelegateSinglePlane( | |
| 181 device_name, v4l2_task_runner, power_line_frequency)); | |
| 182 case VideoCaptureDevice::Name::V4L2_MULTI_PLANE: | |
| 183 return make_scoped_refptr(new V4L2CaptureDelegateMultiPlane( | |
| 184 device_name, v4l2_task_runner, power_line_frequency)); | |
| 185 default: | |
| 186 NOTIMPLEMENTED() << "Unknown V4L2 capture API type"; | |
| 187 return scoped_refptr<V4L2VideoCaptureDelegate>(); | |
| 188 } | |
| 189 } | |
| 190 | |
| 191 // static | |
| 192 VideoPixelFormat V4L2VideoCaptureDelegate::V4l2FourCcToChromiumPixelFormat( | |
| 193 uint32_t v4l2_fourcc) { | |
| 194 for (const auto& fourcc_and_pixel_format : kSupportedFormatsAndPlanarity) { | |
| 195 if (fourcc_and_pixel_format.fourcc == v4l2_fourcc) | |
| 196 return fourcc_and_pixel_format.pixel_format; | |
| 197 } | |
| 198 DVLOG(1) << "Unsupported pixel format: " << FourccToString(v4l2_fourcc); | |
|
Pawel Osciak
2015/03/17 11:05:25
We use NOTREACHED above inV4L2VideoCaptureDelegate
mcasas
2015/03/17 22:01:57
V4l2FourCcToChromiumPixelFormat() is used during f
| |
| 199 return PIXEL_FORMAT_UNKNOWN; | |
| 200 } | |
| 201 | |
| 202 // static | |
| 203 std::list<uint32_t> V4L2VideoCaptureDelegate::GetListOfUsableFourCcs( | |
| 204 bool prefer_mjpeg) { | |
| 205 std::list<uint32_t> supported_formats; | |
| 206 for (const auto& format : kSupportedFormatsAndPlanarity) | |
| 207 supported_formats.push_back(format.fourcc); | |
| 208 | |
| 209 // Duplicate MJPEG on top of the list depending on |prefer_mjpeg|. | |
| 210 if (prefer_mjpeg) | |
| 211 supported_formats.push_front(V4L2_PIX_FMT_MJPEG); | |
| 212 | |
| 213 return supported_formats; | |
| 214 } | |
| 215 | |
| 216 V4L2VideoCaptureDelegate::BufferTracker::~BufferTracker() { | |
| 217 for (const auto& plane : planes_) { | |
| 218 if (plane.start == nullptr) | |
| 219 continue; | |
| 220 const int result = munmap(plane.start, plane.length); | |
| 221 PLOG_IF(ERROR, result < 0) << "Error munmap()ing V4L2 buffer"; | |
| 222 } | |
| 223 } | |
| 224 | |
| 225 void V4L2VideoCaptureDelegate::BufferTracker::AddMmapedPlane(void* const start, | |
| 226 size_t length) { | |
| 227 Plane plane; | |
| 228 plane.start = start; | |
| 229 plane.length = length; | |
| 230 planes_.push_back(plane); | |
| 231 } | |
| 232 | |
| 233 V4L2VideoCaptureDelegate::V4L2VideoCaptureDelegate( | |
| 234 const VideoCaptureDevice::Name& device_name, | |
| 235 const scoped_refptr<base::SingleThreadTaskRunner>& v4l2_task_runner, | |
| 236 int power_line_frequency) | |
| 237 : capture_type_((device_name.capture_api_type() == | |
| 238 VideoCaptureDevice::Name::V4L2_SINGLE_PLANE) | |
| 239 ? V4L2_BUF_TYPE_VIDEO_CAPTURE | |
| 240 : V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE), | |
| 241 v4l2_task_runner_(v4l2_task_runner), | |
| 242 device_name_(device_name), | |
| 243 power_line_frequency_(power_line_frequency), | |
| 244 is_capturing_(false), | |
| 245 timeout_count_(0), | |
| 246 rotation_(0) { | |
| 247 } | |
| 248 | |
| 249 void V4L2VideoCaptureDelegate::AllocateAndStart( | |
| 250 int width, | |
| 251 int height, | |
| 252 float frame_rate, | |
| 253 scoped_ptr<VideoCaptureDevice::Client> client) { | |
| 254 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 255 DCHECK(client); | |
| 256 client_ = client.Pass(); | |
| 257 | |
| 258 // Need to open camera with O_RDWR after Linux kernel 3.3. | |
| 259 device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR))); | |
| 260 if (!device_fd_.is_valid()) { | |
| 261 SetErrorState("Failed to open V4L2 device driver file."); | |
| 262 return; | |
| 263 } | |
| 264 | |
| 265 v4l2_capability cap = {}; | |
| 266 if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) && | |
| 267 ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE || | |
| 268 cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && | |
| 269 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT) && | |
| 270 !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) { | |
| 271 device_fd_.reset(); | |
| 272 SetErrorState("This is not a V4L2 video capture device"); | |
| 273 return; | |
| 274 } | |
| 275 | |
| 276 // Get supported video formats in preferred order. | |
| 277 // For large resolutions, favour mjpeg over raw formats. | |
| 278 const std::list<uint32_t>& desired_v4l2_formats = | |
| 279 GetListOfUsableFourCcs(width > kMjpegWidth || height > kMjpegHeight); | |
| 280 std::list<uint32_t>::const_iterator best = desired_v4l2_formats.end(); | |
| 281 | |
| 282 v4l2_fmtdesc fmtdesc = {}; | |
| 283 fmtdesc.type = capture_type_; | |
| 284 for (; HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) == 0; | |
| 285 ++fmtdesc.index) { | |
| 286 best = std::find(desired_v4l2_formats.begin(), best, fmtdesc.pixelformat); | |
| 287 } | |
| 288 if (best == desired_v4l2_formats.end()) { | |
| 289 SetErrorState("Failed to find a supported camera format."); | |
| 290 return; | |
| 291 } | |
| 292 | |
| 293 DVLOG(1) << "Chosen pixel format is " << FourccToString(*best); | |
| 294 | |
| 295 v4l2_format video_fmt = {}; | |
| 296 video_fmt.type = capture_type_; | |
| 297 if (!FillV4L2Format(&video_fmt, width, height, *best)) { | |
| 298 SetErrorState("Failed filling in V4L2 Format"); | |
| 299 return; | |
| 300 } | |
| 301 | |
| 302 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt)) < 0) { | |
| 303 SetErrorState("Failed to set video capture format"); | |
| 304 return; | |
| 305 } | |
| 306 const VideoPixelFormat pixel_format = | |
| 307 V4l2FourCcToChromiumPixelFormat(video_fmt.fmt.pix.pixelformat); | |
| 308 if (pixel_format == PIXEL_FORMAT_UNKNOWN) { | |
| 309 SetErrorState("Unsupported pixel format"); | |
| 310 return; | |
| 311 } | |
| 312 | |
| 313 // Set capture framerate in the form of capture interval. | |
| 314 v4l2_streamparm streamparm = {}; | |
| 315 streamparm.type = capture_type_; | |
| 316 // The following line checks that the driver knows about framerate get/set. | |
| 317 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { | |
| 318 // Now check if the device is able to accept a capture framerate set. | |
| 319 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { | |
| 320 // |frame_rate| is float, approximate by a fraction. | |
| 321 streamparm.parm.capture.timeperframe.numerator = | |
| 322 media::kFrameRatePrecision; | |
| 323 streamparm.parm.capture.timeperframe.denominator = | |
| 324 (frame_rate) ? (frame_rate * media::kFrameRatePrecision) | |
| 325 : (kTypicalFramerate * media::kFrameRatePrecision); | |
| 326 | |
| 327 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) < | |
| 328 0) { | |
| 329 SetErrorState("Failed to set camera framerate"); | |
| 330 return; | |
| 331 } | |
| 332 DVLOG(2) << "Actual camera driverframerate: " | |
| 333 << streamparm.parm.capture.timeperframe.denominator << "/" | |
| 334 << streamparm.parm.capture.timeperframe.numerator; | |
| 335 } | |
| 336 } | |
| 337 // TODO(mcasas): what should be done if the camera driver does not allow | |
| 338 // framerate configuration, or the actual one is different from the desired? | |
| 339 | |
| 340 // Set anti-banding/anti-flicker to 50/60Hz. May fail due to not supported | |
| 341 // operation (|errno| == EINVAL in this case) or plain failure. | |
| 342 if ((power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_50HZ) || | |
| 343 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_60HZ) || | |
| 344 (power_line_frequency_ == V4L2_CID_POWER_LINE_FREQUENCY_AUTO)) { | |
| 345 struct v4l2_control control = {}; | |
| 346 control.id = V4L2_CID_POWER_LINE_FREQUENCY; | |
| 347 control.value = power_line_frequency_; | |
| 348 const int retval = | |
| 349 HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_CTRL, &control)); | |
| 350 if (retval != 0) { | |
| 351 DVLOG_IF(1, retval == EINVAL) << "Error setting power line flicker " | |
| 352 << "removal, unsupported operation or frequency"; | |
|
Pawel Osciak
2015/03/17 11:05:25
s/flicker removal/frequency/
mcasas
2015/03/17 22:01:57
Done.
| |
| 353 DVLOG_IF(1, retval != EINVAL) << "Mysterious error while setting power " | |
|
Pawel Osciak
2015/03/17 11:05:25
I don't think this is needed.
mcasas
2015/03/17 22:01:57
Done.
| |
| 354 << "line flicker removal"; | |
| 355 } | |
| 356 } | |
| 357 | |
| 358 capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width, | |
| 359 video_fmt.fmt.pix.height); | |
| 360 capture_format_.frame_rate = frame_rate; | |
| 361 capture_format_.pixel_format = pixel_format; | |
| 362 | |
| 363 v4l2_requestbuffers r_buffer = {}; | |
| 364 r_buffer.type = capture_type_; | |
| 365 r_buffer.memory = V4L2_MEMORY_MMAP; | |
| 366 r_buffer.count = kMaxVideoBuffers; | |
| 367 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) { | |
| 368 SetErrorState("Error requesting MMAP buffers from V4L2"); | |
| 369 return; | |
| 370 } | |
| 371 DCHECK_EQ(r_buffer.count, kMaxVideoBuffers); | |
| 372 for (unsigned int i = 0; i < r_buffer.count; ++i) { | |
| 373 if (!MapAndQueueBuffer(i)) { | |
| 374 SetErrorState("Allocate buffer failed"); | |
| 375 return; | |
| 376 } | |
| 377 } | |
| 378 | |
| 379 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &capture_type_)) | |
| 380 < 0) { | |
| 381 SetErrorState("VIDIOC_STREAMON failed"); | |
| 382 return; | |
| 383 } | |
| 384 | |
| 385 is_capturing_ = true; | |
| 386 // Post task to start fetching frames from v4l2. | |
| 387 v4l2_task_runner_->PostTask( | |
| 388 FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); | |
| 389 } | |
| 390 | |
| 391 void V4L2VideoCaptureDelegate::StopAndDeAllocate() { | |
| 392 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 393 // The order is important: stop streaming, clear |buffer_pool_|, | |
| 394 // thus munmap()ing the v4l2_buffers, and then return them to the OS. | |
| 395 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &capture_type_)) | |
| 396 < 0) { | |
| 397 SetErrorState("VIDIOC_STREAMOFF failed"); | |
| 398 return; | |
| 399 } | |
| 400 | |
| 401 buffer_tracker_pool_.clear(); | |
| 402 | |
| 403 v4l2_requestbuffers r_buffer = {}; | |
| 404 r_buffer.type = capture_type_; | |
| 405 r_buffer.memory = V4L2_MEMORY_MMAP; | |
| 406 r_buffer.count = 0; | |
| 407 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) | |
| 408 SetErrorState("Failed to VIDIOC_REQBUFS with count = 0"); | |
| 409 | |
| 410 // At this point we can close the device. | |
| 411 // This is also needed for correctly changing settings later via VIDIOC_S_FMT. | |
| 412 device_fd_.reset(); | |
| 413 is_capturing_ = false; | |
| 414 client_.reset(); | |
| 415 } | |
| 416 | |
| 417 void V4L2VideoCaptureDelegate::SetRotation(int rotation) { | |
| 418 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 419 DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0); | |
| 420 rotation_ = rotation; | |
| 421 } | |
| 422 | |
| 423 bool V4L2VideoCaptureDelegate::MapAndQueueBuffer(int index) { | |
| 424 v4l2_buffer buffer; | |
| 425 FillV4L2Buffer(&buffer, index); | |
| 426 | |
| 427 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { | |
| 428 DLOG(ERROR) << "Error querying status of a MMAP V4L2 buffer"; | |
| 429 return false; | |
| 430 } | |
| 431 | |
| 432 const scoped_refptr<BufferTracker>& buffer_tracker = CreateBufferTracker(); | |
| 433 if (!buffer_tracker->Init(device_fd_.get(), buffer)) { | |
| 434 DLOG(ERROR) << "Error creating BufferTracker"; | |
| 435 return false; | |
| 436 } | |
| 437 buffer_tracker_pool_.push_back(buffer_tracker); | |
| 438 | |
| 439 // Enqueue the buffer in the drivers incoming queue. | |
| 440 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
| 441 DLOG(ERROR) << "Error enqueuing a V4L2 buffer back into the driver"; | |
| 442 return false; | |
| 443 } | |
| 444 return true; | |
| 445 } | |
| 446 | |
| 447 void V4L2VideoCaptureDelegate::FillV4L2Buffer(v4l2_buffer* buffer, | |
| 448 int i) const { | |
| 449 memset(buffer, 0, sizeof(*buffer)); | |
| 450 buffer->memory = V4L2_MEMORY_MMAP; | |
| 451 buffer->index = i; | |
| 452 FinishFillingV4L2Buffer(buffer); | |
| 453 } | |
| 454 | |
| 455 void V4L2VideoCaptureDelegate::DoCapture() { | |
| 456 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 457 if (!is_capturing_) | |
| 458 return; | |
| 459 | |
| 460 pollfd device_pfd = {}; | |
| 461 device_pfd.fd = device_fd_.get(); | |
| 462 device_pfd.events = POLLIN; | |
| 463 const int result = HANDLE_EINTR(poll(&device_pfd, 1, kCaptureTimeoutMs)); | |
| 464 if (result < 0) { | |
| 465 SetErrorState("Poll failed"); | |
| 466 return; | |
| 467 } | |
| 468 // Check if poll() timed out; track the amount of times it did in a row and | |
| 469 // throw an error if it times out too many times. | |
| 470 if (result == 0) { | |
| 471 timeout_count_++; | |
| 472 if (timeout_count_ >= kContinuousTimeoutLimit) { | |
| 473 SetErrorState("Multiple continuous timeouts while read-polling."); | |
| 474 timeout_count_ = 0; | |
| 475 return; | |
| 476 } | |
| 477 } else { | |
| 478 timeout_count_ = 0; | |
| 479 } | |
| 480 | |
| 481 // Deenqueue, send and reenqueue a buffer if the driver has filled one in. | |
| 482 if (device_pfd.revents & POLLIN) { | |
| 483 v4l2_buffer buffer; | |
| 484 FillV4L2Buffer(&buffer, 0); | |
| 485 FinishFillingV4L2Buffer(&buffer); | |
|
Pawel Osciak
2015/03/17 11:05:25
FillV4L2Buffer() already calls this.
mcasas
2015/03/17 22:01:57
Done.
| |
| 486 | |
| 487 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) < 0) { | |
| 488 SetErrorState("Failed to dequeue capture buffer"); | |
| 489 return; | |
| 490 } | |
| 491 | |
| 492 SendBuffer(buffer_tracker_pool_[buffer.index]); | |
| 493 | |
| 494 if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) { | |
| 495 SetErrorState("Failed to enqueue capture buffer"); | |
| 496 return; | |
| 497 } | |
| 498 } | |
| 499 | |
| 500 v4l2_task_runner_->PostTask( | |
| 501 FROM_HERE, base::Bind(&V4L2VideoCaptureDelegate::DoCapture, this)); | |
| 502 } | |
| 503 | |
| 504 void V4L2VideoCaptureDelegate::SetErrorState(const std::string& reason) { | |
| 505 DCHECK(v4l2_task_runner_->BelongsToCurrentThread()); | |
| 506 is_capturing_ = false; | |
| 507 client_->OnError(reason); | |
| 508 } | |
| 509 | |
| 510 V4L2VideoCaptureDelegate::~V4L2VideoCaptureDelegate() { | |
| 511 } | |
| 512 | |
| 513 scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker> | |
| 514 V4L2CaptureDelegateSinglePlane::CreateBufferTracker() { | |
| 515 return make_scoped_refptr(new BufferTrackerSPlane()); | |
| 516 } | |
| 517 | |
| 518 bool V4L2CaptureDelegateSinglePlane::FillV4L2Format( | |
| 519 v4l2_format* format, | |
| 520 uint32_t width, | |
| 521 uint32_t height, | |
| 522 uint32_t pixelformat_fourcc) { | |
| 523 format->fmt.pix.width = width; | |
| 524 format->fmt.pix.height = height; | |
| 525 format->fmt.pix.pixelformat = pixelformat_fourcc; | |
| 526 return true; | |
| 527 } | |
| 528 | |
| 529 void V4L2CaptureDelegateSinglePlane::FinishFillingV4L2Buffer( | |
| 530 v4l2_buffer* buffer) const { | |
| 531 buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | |
| 532 } | |
| 533 | |
| 534 void V4L2CaptureDelegateSinglePlane::SendBuffer( | |
| 535 const scoped_refptr<BufferTracker>& buffer_tracker) { | |
| 536 client()->OnIncomingCapturedData( | |
| 537 buffer_tracker->GetPlaneStart(0), | |
| 538 buffer_tracker->GetPlaneLength(0), | |
| 539 capture_format(), | |
| 540 rotation(), | |
| 541 base::TimeTicks::Now()); | |
| 542 } | |
| 543 | |
| 544 bool V4L2CaptureDelegateSinglePlane::BufferTrackerSPlane::Init( | |
| 545 int fd, | |
| 546 const v4l2_buffer& buffer) { | |
| 547 // Some devices require mmap() to be called with both READ and WRITE. | |
| 548 // See http://crbug.com/178582. | |
| 549 void* const start =mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, | |
|
Pawel Osciak
2015/03/17 11:05:25
s/=mmap/= mmap/
mcasas
2015/03/17 22:01:57
Done. (Actually already OK on PS7).
| |
| 550 MAP_SHARED, fd, buffer.m.offset); | |
| 551 if (start == MAP_FAILED) { | |
| 552 DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; | |
| 553 return false; | |
| 554 } | |
| 555 const size_t length = buffer.length; | |
| 556 AddMmapedPlane(start, length); | |
|
Pawel Osciak
2015/03/17 11:05:25
Just AddMmappedPlane(start, buffer.length)
mcasas
2015/03/17 22:01:57
Done.
| |
| 557 return true; | |
| 558 } | |
| 559 | |
| 560 scoped_refptr<V4L2VideoCaptureDelegate::BufferTracker> | |
| 561 V4L2CaptureDelegateMultiPlane::CreateBufferTracker() { | |
| 562 return make_scoped_refptr(new BufferTrackerMPlane()); | |
| 563 } | |
| 564 | |
| 565 bool V4L2CaptureDelegateMultiPlane::FillV4L2Format( | |
| 566 v4l2_format* format, | |
| 567 uint32_t width, | |
| 568 uint32_t height, | |
| 569 uint32_t pixelformat_fourcc) { | |
| 570 format->fmt.pix_mp.width = width; | |
| 571 format->fmt.pix_mp.height = height; | |
| 572 format->fmt.pix_mp.pixelformat = pixelformat_fourcc; | |
| 573 | |
| 574 num_v4l2_planes_ = GetNumPlanesForFourCc(pixelformat_fourcc); | |
| 575 if (num_v4l2_planes_ == 0u) | |
| 576 return false; | |
| 577 DCHECK_LE(num_v4l2_planes_, static_cast<size_t>(VIDEO_MAX_PLANES)); | |
| 578 format->fmt.pix_mp.num_planes = num_v4l2_planes_; | |
| 579 | |
| 580 v4l2_planes_.reset(new v4l2_plane[num_v4l2_planes_]); | |
| 581 return true; | |
| 582 } | |
| 583 | |
| 584 void V4L2CaptureDelegateMultiPlane::FinishFillingV4L2Buffer( | |
| 585 v4l2_buffer* buffer) const { | |
| 586 buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
| 587 buffer->length = num_v4l2_planes_; | |
| 588 buffer->m.planes = v4l2_planes_.get(); | |
| 589 } | |
| 590 | |
| 591 void V4L2CaptureDelegateMultiPlane::SendBuffer( | |
| 592 const scoped_refptr<BufferTracker>& buffer_tracker) { | |
| 593 DCHECK_EQ(capture_format().pixel_format, PIXEL_FORMAT_I420); | |
| 594 client()->OnIncomingCapturedYuvData(buffer_tracker->GetPlaneStart(0), | |
| 595 buffer_tracker->GetPlaneStart(1), | |
| 596 buffer_tracker->GetPlaneStart(2), | |
| 597 buffer_tracker->GetPlaneLength(0), | |
| 598 buffer_tracker->GetPlaneLength(1), | |
| 599 buffer_tracker->GetPlaneLength(2), | |
| 600 capture_format(), | |
| 601 rotation(), | |
| 602 base::TimeTicks::Now()); | |
| 603 } | |
| 604 | |
| 605 bool V4L2CaptureDelegateMultiPlane::BufferTrackerMPlane::Init( | |
| 606 int fd, | |
| 607 const v4l2_buffer& buffer) { | |
| 608 for (size_t p = 0; p < buffer.length; ++p) { | |
| 609 void* const start = | |
| 610 mmap(NULL, buffer.m.planes[p].length, PROT_READ | PROT_WRITE, | |
| 611 MAP_SHARED, fd, buffer.m.planes[p].m.mem_offset); | |
| 612 if (start == MAP_FAILED) { | |
| 613 DLOG(ERROR) << "Error mmap()ing a V4L2 buffer into userspace"; | |
| 614 return false; | |
| 615 } | |
| 616 const size_t length = buffer.m.planes[p].length; | |
| 617 DVLOG(3) << "Mmap()ed plane #" << p << ", length " << length << "B"; | |
| 618 AddMmapedPlane(start, length); | |
|
Pawel Osciak
2015/03/17 11:05:25
AddMmappedPlane(start, buffer.m.planes[p].length);
mcasas
2015/03/17 22:01:57
Done.
| |
| 619 } | |
| 620 return true; | |
| 621 } | |
| 622 | |
| 623 } // namespace media | |
| OLD | NEW |